]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
type_traits (_GLIBCXX_HAS_NESTED_TYPE): Add.
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
a418679d 1/* Callgraph based interprocedural optimizations.
566f27e4 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
cac67c08 3 Free Software Foundation, Inc.
1c4a429a
JH
4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1c4a429a
JH
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
1c4a429a 21
18c6ada9 22/* This module implements main driver of compilation process as well as
a418679d 23 few basic interprocedural optimizers.
18c6ada9
JH
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
efe75b6f
JH
35 (There is one exception needed for implementing GCC extern inline
36 function.)
18c6ada9 37
8a4a83ed 38 - varpool_finalize_variable
18c6ada9 39
1ae58c30 40 This function has same behavior as the above but is used for static
18c6ada9
JH
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
efe75b6f
JH
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
18c6ada9 47
7e8b322a 48 In the the call-graph construction and local function
18c6ada9
JH
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
efe75b6f
JH
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
18c6ada9
JH
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
18c6ada9 62 - cgraph_mark_needed_node
8a4a83ed 63 - varpool_mark_needed_node
18c6ada9 64
efe75b6f
JH
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
dbb23ff7 69 used by C++ frontend to explicitly mark the keyed methods.
18c6ada9
JH
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
7e8b322a 80 Analyzing of all functions is deferred
18c6ada9
JH
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
e1990f69 91 The intra-procedural information is produced and its existence
18c6ada9
JH
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
7e8b322a 106 where reference has been optimized out. */
9b3e897d 107
6674a6ce 108
1c4a429a
JH
109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
c9b9aa64 114#include "rtl.h"
6674a6ce 115#include "tree-flow.h"
1c4a429a
JH
116#include "tree-inline.h"
117#include "langhooks.h"
0c58f841 118#include "pointer-set.h"
1c4a429a
JH
119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
dafc5b82 125#include "diagnostic.h"
cf835838
JM
126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
a194aa56 128#include "timevar.h"
b58b1157
JH
129#include "params.h"
130#include "fibheap.h"
dc0bfe6a 131#include "intl.h"
902edd36 132#include "function.h"
57fb5341 133#include "ipa-prop.h"
726a989a
RB
134#include "gimple.h"
135#include "tree-iterator.h"
b4861090 136#include "tree-pass.h"
a406865a 137#include "tree-dump.h"
cd9c7bd2 138#include "output.h"
3baf459d 139#include "coverage.h"
090fa0ab 140#include "plugin.h"
b58b1157 141
a20af5b8 142static void cgraph_expand_all_functions (void);
db0e878d
AJ
143static void cgraph_mark_functions_to_output (void);
144static void cgraph_expand_function (struct cgraph_node *);
21c4a6a7 145static void cgraph_output_pending_asms (void);
a406865a 146static void cgraph_analyze_function (struct cgraph_node *);
7dff32e6 147
0a5fa5a1 148FILE *cgraph_dump_file;
9b3e897d 149
6744a6ab
JH
150/* Used for vtable lookup in thunk adjusting. */
151static GTY (()) tree vtable_entry_type;
152
8dafba3c
RH
153/* Determine if function DECL is needed. That is, visible to something
154 either outside this translation unit, something magic in the system
7e8b322a 155 configury. */
8dafba3c 156
d7f09764
DN
157bool
158cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
8dafba3c 159{
e7d6beb0 160 /* If the user told us it is used, then it must be so. */
386b46cf
JH
161 if (node->local.externally_visible)
162 return true;
163
e7d6beb0
JH
164 /* ??? If the assembler name is set by hand, it is possible to assemble
165 the name later after finalizing the function and the fact is noticed
166 in assemble_name then. This is arguably a bug. */
167 if (DECL_ASSEMBLER_NAME_SET_P (decl)
168 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
169 return true;
170
a1d31187
JH
171 /* With -fkeep-inline-functions we are keeping all inline functions except
172 for extern inline ones. */
173 if (flag_keep_inline_functions
174 && DECL_DECLARED_INLINE_P (decl)
b521dcbe
JH
175 && !DECL_EXTERNAL (decl)
176 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
a1d31187
JH
177 return true;
178
8dafba3c
RH
179 /* If we decided it was needed before, but at the time we didn't have
180 the body of the function available, then it's still needed. We have
181 to go back and re-check its dependencies now. */
182 if (node->needed)
183 return true;
184
185 /* Externally visible functions must be output. The exception is
c22cacf3 186 COMDAT functions that must be output only when they are needed.
04f77d0f
JH
187
188 When not optimizing, also output the static functions. (see
46f5f7f2 189 PR24561), but don't do so for always_inline functions, functions
c5d01958 190 declared inline and nested functions. These were optimized out
b633db7b 191 in the original implementation and it is unclear whether we want
6fc0bb99 192 to change the behavior here. */
5d342ef9 193 if (((TREE_PUBLIC (decl)
c5d01958
EB
194 || (!optimize
195 && !node->local.disregard_inline_limits
b633db7b 196 && !DECL_DECLARED_INLINE_P (decl)
c5d01958
EB
197 && !(DECL_CONTEXT (decl)
198 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
b20996ff
JH
199 && !flag_whole_program
200 && !flag_lto
201 && !flag_whopr)
ce91e74c 202 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
8dafba3c
RH
203 return true;
204
8dafba3c
RH
205 return false;
206}
207
d60ab196 208/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
f45e0ad1
JH
209 functions into callgraph in a way so they look like ordinary reachable
210 functions inserted into callgraph already at construction time. */
211
212bool
213cgraph_process_new_functions (void)
214{
215 bool output = false;
216 tree fndecl;
217 struct cgraph_node *node;
218
2942c502 219 varpool_analyze_pending_decls ();
f45e0ad1
JH
220 /* Note that this queue may grow as its being processed, as the new
221 functions may generate new ones. */
222 while (cgraph_new_nodes)
223 {
224 node = cgraph_new_nodes;
225 fndecl = node->decl;
226 cgraph_new_nodes = cgraph_new_nodes->next_needed;
227 switch (cgraph_state)
228 {
229 case CGRAPH_STATE_CONSTRUCTION:
230 /* At construction time we just need to finalize function and move
231 it into reachable functions list. */
232
233 node->next_needed = NULL;
234 cgraph_finalize_function (fndecl, false);
235 cgraph_mark_reachable_node (node);
236 output = true;
237 break;
238
239 case CGRAPH_STATE_IPA:
7a388ee4 240 case CGRAPH_STATE_IPA_SSA:
f45e0ad1
JH
241 /* When IPA optimization already started, do all essential
242 transformations that has been already performed on the whole
243 cgraph but not on this function. */
244
726a989a 245 gimple_register_cfg_hooks ();
f45e0ad1
JH
246 if (!node->analyzed)
247 cgraph_analyze_function (node);
248 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
249 current_function_decl = fndecl;
1920df6c 250 compute_inline_parameters (node);
7a388ee4
JH
251 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
252 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
253 /* When not optimizing, be sure we run early local passes anyway
254 to expand OMP. */
255 || !optimize)
8ddbbcae 256 execute_pass_list (pass_early_local_passes.pass.sub);
f45e0ad1
JH
257 free_dominance_info (CDI_POST_DOMINATORS);
258 free_dominance_info (CDI_DOMINATORS);
259 pop_cfun ();
260 current_function_decl = NULL;
261 break;
262
263 case CGRAPH_STATE_EXPANSION:
264 /* Functions created during expansion shall be compiled
265 directly. */
257eb6e3 266 node->process = 0;
f45e0ad1
JH
267 cgraph_expand_function (node);
268 break;
269
270 default:
271 gcc_unreachable ();
272 break;
273 }
129a37fc 274 cgraph_call_function_insertion_hooks (node);
2942c502 275 varpool_analyze_pending_decls ();
f45e0ad1
JH
276 }
277 return output;
278}
279
d71cc23f
JH
280/* As an GCC extension we allow redefinition of the function. The
281 semantics when both copies of bodies differ is not well defined.
282 We replace the old body with new body so in unit at a time mode
283 we always use new body, while in normal mode we may end up with
284 old body inlined into some functions and new body expanded and
285 inlined in others.
286
287 ??? It may make more sense to use one body for inlining and other
288 body for expanding the function but this is difficult to do. */
289
290static void
291cgraph_reset_node (struct cgraph_node *node)
292{
257eb6e3 293 /* If node->process is set, then we have already begun whole-unit analysis.
7e8b322a
JH
294 This is *not* testing for whether we've already emitted the function.
295 That case can be sort-of legitimately seen with real function redefinition
296 errors. I would argue that the front end should never present us with
297 such a case, but don't enforce that for now. */
257eb6e3 298 gcc_assert (!node->process);
d71cc23f
JH
299
300 /* Reset our data structures so we can analyze the function again. */
301 memset (&node->local, 0, sizeof (node->local));
302 memset (&node->global, 0, sizeof (node->global));
303 memset (&node->rtl, 0, sizeof (node->rtl));
304 node->analyzed = false;
305 node->local.redefined_extern_inline = true;
306 node->local.finalized = false;
307
d71cc23f
JH
308 cgraph_node_remove_callees (node);
309
310 /* We may need to re-queue the node for assembling in case
b86b3ea3
RG
311 we already proceeded it and ignored as not needed or got
312 a re-declaration in IMA mode. */
313 if (node->reachable)
d71cc23f
JH
314 {
315 struct cgraph_node *n;
316
317 for (n = cgraph_nodes_queue; n; n = n->next_needed)
318 if (n == node)
319 break;
320 if (!n)
321 node->reachable = 0;
322 }
323}
d853a20e 324
953ff289
DN
325static void
326cgraph_lower_function (struct cgraph_node *node)
327{
328 if (node->lowered)
329 return;
a406865a
RG
330
331 if (node->nested)
332 lower_nested_functions (node->decl);
333 gcc_assert (!node->nested);
334
953ff289
DN
335 tree_lowering_passes (node->decl);
336 node->lowered = true;
337}
338
6b00c969
RH
339/* DECL has been parsed. Take it, queue it, compile it at the whim of the
340 logic in effect. If NESTED is true, then our caller cannot stand to have
341 the garbage collector run at the moment. We would need to either create
342 a new GC context, or just not compile right now. */
1c4a429a
JH
343
344void
6b00c969 345cgraph_finalize_function (tree decl, bool nested)
1c4a429a
JH
346{
347 struct cgraph_node *node = cgraph_node (decl);
348
d853a20e 349 if (node->local.finalized)
d71cc23f 350 cgraph_reset_node (node);
6b00c969 351
6bad2617 352 node->pid = cgraph_max_pid ++;
d853a20e 353 notice_global_symbol (decl);
f6981e16 354 node->local.finalized = true;
e21aff8a 355 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
d88e5c37 356 node->finalized_by_frontend = true;
1c4a429a 357
d7f09764 358 if (cgraph_decide_is_function_needed (node, decl))
8dafba3c
RH
359 cgraph_mark_needed_node (node);
360
ff5c4582 361 /* Since we reclaim unreachable nodes at the end of every language
e7d6beb0
JH
362 level unit, we need to be conservative about possible entry points
363 there. */
508e4757
JH
364 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
365 || DECL_STATIC_CONSTRUCTOR (decl)
9b389a5e
JH
366 || DECL_STATIC_DESTRUCTOR (decl)
367 /* COMDAT virtual functions may be referenced by vtable from
368 other compilatoin unit. Still we want to devirtualize calls
369 to those so we need to analyze them.
370 FIXME: We should introduce may edges for this purpose and update
371 their handling in unreachable function removal and inliner too. */
372 || (DECL_VIRTUAL_P (decl) && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
e7d6beb0
JH
373 cgraph_mark_reachable_node (node);
374
8dafba3c 375 /* If we've not yet emitted decl, tell the debug info about it. */
6b00c969 376 if (!TREE_ASM_WRITTEN (decl))
8dafba3c 377 (*debug_hooks->deferred_inline_function) (decl);
d173e685 378
902edd36
JH
379 /* Possibly warn about unused parameters. */
380 if (warn_unused_parameter)
381 do_warn_unused_parameter (decl);
7e8b322a
JH
382
383 if (!nested)
384 ggc_collect ();
1c4a429a
JH
385}
386
f0c882ab
JH
387/* C99 extern inline keywords allow changing of declaration after function
388 has been finalized. We need to re-decide if we want to mark the function as
389 needed then. */
390
391void
392cgraph_mark_if_needed (tree decl)
393{
394 struct cgraph_node *node = cgraph_node (decl);
d7f09764 395 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
f0c882ab
JH
396 cgraph_mark_needed_node (node);
397}
398
753d358d 399#ifdef ENABLE_CHECKING
9187e02d
JH
400/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
401static bool
402clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
403{
404 while (node != node2 && node2)
405 node2 = node2->clone_of;
406 return node2 != NULL;
407}
753d358d 408#endif
9187e02d 409
02ec6988
MJ
410/* Verify edge E count and frequency. */
411
412static bool
413verify_edge_count_and_frequency (struct cgraph_edge *e)
414{
415 bool error_found = false;
416 if (e->count < 0)
417 {
418 error ("caller edge count is negative");
419 error_found = true;
420 }
421 if (e->frequency < 0)
422 {
423 error ("caller edge frequency is negative");
424 error_found = true;
425 }
426 if (e->frequency > CGRAPH_FREQ_MAX)
427 {
428 error ("caller edge frequency is too large");
429 error_found = true;
430 }
431 if (gimple_has_body_p (e->caller->decl)
432 && !e->caller->global.inlined_to
433 && (e->frequency
434 != compute_call_stmt_bb_frequency (e->caller->decl,
435 gimple_bb (e->call_stmt))))
436 {
437 error ("caller edge frequency %i does not match BB freqency %i",
438 e->frequency,
439 compute_call_stmt_bb_frequency (e->caller->decl,
440 gimple_bb (e->call_stmt)));
441 error_found = true;
442 }
443 return error_found;
444}
445
18c6ada9 446/* Verify cgraph nodes of given cgraph node. */
24e47c76 447DEBUG_FUNCTION void
18c6ada9
JH
448verify_cgraph_node (struct cgraph_node *node)
449{
450 struct cgraph_edge *e;
e21aff8a 451 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
2bafad93 452 struct function *saved_cfun = cfun;
e21aff8a 453 basic_block this_block;
726a989a 454 gimple_stmt_iterator gsi;
e0704a46 455 bool error_found = false;
18c6ada9 456
1da2ed5f 457 if (seen_error ())
5771bd91
RG
458 return;
459
18c6ada9 460 timevar_push (TV_CGRAPH_VERIFY);
2bafad93
JJ
461 /* debug_generic_stmt needs correct cfun */
462 set_cfun (this_cfun);
18c6ada9
JH
463 for (e = node->callees; e; e = e->next_callee)
464 if (e->aux)
465 {
ab532386 466 error ("aux field set for edge %s->%s",
4f1e4960
JM
467 identifier_to_locale (cgraph_node_name (e->caller)),
468 identifier_to_locale (cgraph_node_name (e->callee)));
18c6ada9
JH
469 error_found = true;
470 }
06191a23
JH
471 if (node->count < 0)
472 {
473 error ("Execution count is negative");
474 error_found = true;
475 }
b20996ff
JH
476 if (node->global.inlined_to && node->local.externally_visible)
477 {
478 error ("Externally visible inline clone");
479 error_found = true;
480 }
481 if (node->global.inlined_to && node->address_taken)
482 {
483 error ("Inline clone with address taken");
484 error_found = true;
485 }
486 if (node->global.inlined_to && node->needed)
487 {
488 error ("Inline clone is needed");
489 error_found = true;
490 }
e33c6cd6
MJ
491 for (e = node->indirect_calls; e; e = e->next_callee)
492 {
493 if (e->aux)
494 {
495 error ("aux field set for indirect edge from %s",
496 identifier_to_locale (cgraph_node_name (e->caller)));
497 error_found = true;
498 }
499 if (!e->indirect_unknown_callee
500 || !e->indirect_info)
501 {
502 error ("An indirect edge from %s is not marked as indirect or has "
503 "associated indirect_info, the corresponding statement is: ",
504 identifier_to_locale (cgraph_node_name (e->caller)));
505 debug_gimple_stmt (e->call_stmt);
506 error_found = true;
507 }
508 }
18c6ada9
JH
509 for (e = node->callers; e; e = e->next_caller)
510 {
02ec6988
MJ
511 if (verify_edge_count_and_frequency (e))
512 error_found = true;
18c6ada9
JH
513 if (!e->inline_failed)
514 {
515 if (node->global.inlined_to
516 != (e->caller->global.inlined_to
517 ? e->caller->global.inlined_to : e->caller))
518 {
ab532386 519 error ("inlined_to pointer is wrong");
18c6ada9
JH
520 error_found = true;
521 }
522 if (node->callers->next_caller)
523 {
ab532386 524 error ("multiple inline callers");
18c6ada9
JH
525 error_found = true;
526 }
527 }
528 else
529 if (node->global.inlined_to)
530 {
ab532386 531 error ("inlined_to pointer set for noninline callers");
18c6ada9
JH
532 error_found = true;
533 }
534 }
02ec6988
MJ
535 for (e = node->indirect_calls; e; e = e->next_callee)
536 if (verify_edge_count_and_frequency (e))
537 error_found = true;
18c6ada9
JH
538 if (!node->callers && node->global.inlined_to)
539 {
95a52ebb 540 error ("inlined_to pointer is set but no predecessors found");
18c6ada9
JH
541 error_found = true;
542 }
543 if (node->global.inlined_to == node)
544 {
ab532386 545 error ("inlined_to pointer refers to itself");
18c6ada9
JH
546 error_found = true;
547 }
548
69fb1284 549 if (!cgraph_node (node->decl))
18c6ada9 550 {
69fb1284 551 error ("node not found in cgraph_hash");
18c6ada9
JH
552 error_found = true;
553 }
c22cacf3 554
9187e02d
JH
555 if (node->clone_of)
556 {
557 struct cgraph_node *n;
558 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
559 if (n == node)
560 break;
561 if (!n)
562 {
563 error ("node has wrong clone_of");
564 error_found = true;
565 }
566 }
567 if (node->clones)
568 {
569 struct cgraph_node *n;
570 for (n = node->clones; n; n = n->next_sibling_clone)
571 if (n->clone_of != node)
572 break;
573 if (n)
574 {
575 error ("node has wrong clone list");
576 error_found = true;
577 }
578 }
579 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
580 {
581 error ("node is in clone list but it is not clone");
582 error_found = true;
583 }
584 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
585 {
586 error ("node has wrong prev_clone pointer");
587 error_found = true;
588 }
589 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
590 {
591 error ("double linked list of clones corrupted");
592 error_found = true;
593 }
78eaf7bf
MJ
594 if (node->same_comdat_group)
595 {
596 struct cgraph_node *n = node->same_comdat_group;
597
598 if (!DECL_ONE_ONLY (node->decl))
599 {
600 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
601 error_found = true;
602 }
603 if (n == node)
604 {
605 error ("node is alone in a comdat group");
606 error_found = true;
607 }
608 do
609 {
610 if (!n->same_comdat_group)
611 {
612 error ("same_comdat_group is not a circular list");
613 error_found = true;
614 break;
615 }
616 n = n->same_comdat_group;
617 }
618 while (n != node);
619 }
9187e02d
JH
620
621 if (node->analyzed && gimple_has_body_p (node->decl)
726a989a 622 && !TREE_ASM_WRITTEN (node->decl)
d7f09764
DN
623 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
624 && !flag_wpa)
18c6ada9 625 {
e21aff8a
SB
626 if (this_cfun->cfg)
627 {
628 /* The nodes we're interested in are never shared, so walk
629 the tree ignoring duplicates. */
2dee695b 630 struct pointer_set_t *visited_nodes = pointer_set_create ();
e21aff8a
SB
631 /* Reach the trees by walking over the CFG, and note the
632 enclosing basic-blocks in the call edges. */
633 FOR_EACH_BB_FN (this_block, this_cfun)
726a989a
RB
634 for (gsi = gsi_start_bb (this_block);
635 !gsi_end_p (gsi);
636 gsi_next (&gsi))
e0704a46 637 {
726a989a 638 gimple stmt = gsi_stmt (gsi);
e33c6cd6 639 if (is_gimple_call (stmt))
e0704a46
JH
640 {
641 struct cgraph_edge *e = cgraph_edge (node, stmt);
e33c6cd6 642 tree decl = gimple_call_fndecl (stmt);
e0704a46
JH
643 if (e)
644 {
645 if (e->aux)
646 {
ab532386 647 error ("shared call_stmt:");
726a989a 648 debug_gimple_stmt (stmt);
e0704a46
JH
649 error_found = true;
650 }
e33c6cd6 651 if (!e->indirect_unknown_callee)
6744a6ab 652 {
e33c6cd6
MJ
653 if (e->callee->same_body_alias)
654 {
655 error ("edge points to same body alias:");
656 debug_tree (e->callee->decl);
657 error_found = true;
658 }
e466e2ce
JH
659#ifdef ENABLE_CHECKING
660 else if (!e->callee->global.inlined_to
e33c6cd6 661 && decl
e466e2ce
JH
662 && cgraph_get_node (decl)
663 && (e->callee->former_clone_of
664 != cgraph_get_node (decl)->decl)
e33c6cd6
MJ
665 && !clone_of_p (cgraph_node (decl),
666 e->callee))
667 {
668 error ("edge points to wrong declaration:");
669 debug_tree (e->callee->decl);
670 fprintf (stderr," Instead of:");
671 debug_tree (decl);
672 error_found = true;
673 }
e466e2ce 674#endif
6744a6ab 675 }
e33c6cd6 676 else if (decl)
e0704a46 677 {
e33c6cd6
MJ
678 error ("an indirect edge with unknown callee "
679 "corresponding to a call_stmt with "
680 "a known declaration:");
47cb0d7d 681 error_found = true;
e33c6cd6 682 debug_gimple_stmt (e->call_stmt);
e0704a46
JH
683 }
684 e->aux = (void *)1;
685 }
e33c6cd6 686 else if (decl)
e0704a46 687 {
ab532386 688 error ("missing callgraph edge for call stmt:");
726a989a 689 debug_gimple_stmt (stmt);
e0704a46
JH
690 error_found = true;
691 }
692 }
693 }
e21aff8a 694 pointer_set_destroy (visited_nodes);
e21aff8a
SB
695 }
696 else
697 /* No CFG available?! */
698 gcc_unreachable ();
699
18c6ada9
JH
700 for (e = node->callees; e; e = e->next_callee)
701 {
e33c6cd6 702 if (!e->aux)
18c6ada9 703 {
ab532386 704 error ("edge %s->%s has no corresponding call_stmt",
4f1e4960
JM
705 identifier_to_locale (cgraph_node_name (e->caller)),
706 identifier_to_locale (cgraph_node_name (e->callee)));
726a989a 707 debug_gimple_stmt (e->call_stmt);
18c6ada9
JH
708 error_found = true;
709 }
710 e->aux = 0;
711 }
e33c6cd6
MJ
712 for (e = node->indirect_calls; e; e = e->next_callee)
713 {
714 if (!e->aux)
715 {
716 error ("an indirect edge from %s has no corresponding call_stmt",
717 identifier_to_locale (cgraph_node_name (e->caller)));
718 debug_gimple_stmt (e->call_stmt);
719 error_found = true;
720 }
721 e->aux = 0;
722 }
18c6ada9
JH
723 }
724 if (error_found)
725 {
726 dump_cgraph_node (stderr, node);
ab532386 727 internal_error ("verify_cgraph_node failed");
18c6ada9 728 }
2bafad93 729 set_cfun (saved_cfun);
18c6ada9
JH
730 timevar_pop (TV_CGRAPH_VERIFY);
731}
732
733/* Verify whole cgraph structure. */
24e47c76 734DEBUG_FUNCTION void
18c6ada9
JH
735verify_cgraph (void)
736{
737 struct cgraph_node *node;
738
1da2ed5f 739 if (seen_error ())
89480522
JH
740 return;
741
18c6ada9
JH
742 for (node = cgraph_nodes; node; node = node->next)
743 verify_cgraph_node (node);
744}
745
474eccc6
ILT
746/* Output all asm statements we have stored up to be output. */
747
748static void
749cgraph_output_pending_asms (void)
750{
751 struct cgraph_asm_node *can;
752
1da2ed5f 753 if (seen_error ())
474eccc6
ILT
754 return;
755
756 for (can = cgraph_asm_nodes; can; can = can->next)
757 assemble_asm (can->asm_str);
758 cgraph_asm_nodes = NULL;
759}
760
e767b5be 761/* Analyze the function scheduled to be output. */
a406865a 762static void
e767b5be
JH
763cgraph_analyze_function (struct cgraph_node *node)
764{
a406865a 765 tree save = current_function_decl;
e767b5be
JH
766 tree decl = node->decl;
767
25c84396 768 current_function_decl = decl;
e21aff8a 769 push_cfun (DECL_STRUCT_FUNCTION (decl));
a406865a 770
0e0a1359
MJ
771 assign_assembler_name_if_neeeded (node->decl);
772
a406865a
RG
773 /* Make sure to gimplify bodies only once. During analyzing a
774 function we lower it, which will require gimplified nested
775 functions, so we can end up here with an already gimplified
776 body. */
777 if (!gimple_body (decl))
778 gimplify_function_tree (decl);
779 dump_function (TDI_generic, decl);
780
e21aff8a 781 cgraph_lower_function (node);
6a84c098 782 node->analyzed = true;
e767b5be 783
e21aff8a 784 pop_cfun ();
a406865a 785 current_function_decl = save;
e767b5be
JH
786}
787
386b46cf
JH
788/* Look for externally_visible and used attributes and mark cgraph nodes
789 accordingly.
790
791 We cannot mark the nodes at the point the attributes are processed (in
792 handle_*_attribute) because the copy of the declarations available at that
793 point may not be canonical. For example, in:
794
795 void f();
796 void f() __attribute__((used));
797
798 the declaration we see in handle_used_attribute will be the second
799 declaration -- but the front end will subsequently merge that declaration
800 with the original declaration and discard the second declaration.
801
802 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
803
804 void f() {}
805 void f() __attribute__((externally_visible));
806
807 is valid.
808
809 So, we walk the nodes at the end of the translation unit, applying the
810 attributes at that point. */
811
812static void
813process_function_and_variable_attributes (struct cgraph_node *first,
8a4a83ed 814 struct varpool_node *first_var)
386b46cf
JH
815{
816 struct cgraph_node *node;
8a4a83ed 817 struct varpool_node *vnode;
386b46cf
JH
818
819 for (node = cgraph_nodes; node != first; node = node->next)
820 {
821 tree decl = node->decl;
b42186f1 822 if (DECL_PRESERVE_P (decl))
152464d2 823 cgraph_mark_needed_node (node);
386b46cf
JH
824 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
825 {
343d4b27 826 if (! TREE_PUBLIC (node->decl))
c5d75364
MLI
827 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
828 "%<externally_visible%>"
829 " attribute have effect only on public objects");
b20996ff
JH
830 else if (node->local.finalized)
831 cgraph_mark_needed_node (node);
386b46cf
JH
832 }
833 }
8a4a83ed 834 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
386b46cf
JH
835 {
836 tree decl = vnode->decl;
b42186f1 837 if (DECL_PRESERVE_P (decl))
386b46cf 838 {
a8289259 839 vnode->force_output = true;
386b46cf 840 if (vnode->finalized)
8a4a83ed 841 varpool_mark_needed_node (vnode);
386b46cf
JH
842 }
843 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
844 {
343d4b27 845 if (! TREE_PUBLIC (vnode->decl))
c5d75364
MLI
846 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
847 "%<externally_visible%>"
848 " attribute have effect only on public objects");
b20996ff
JH
849 else if (vnode->finalized)
850 varpool_mark_needed_node (vnode);
386b46cf
JH
851 }
852 }
853}
854
151e6f24
JH
855/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
856 each reachable functions) and build cgraph.
857 The function can be called multiple times after inserting new nodes
88512ba0 858 into beginning of queue. Just the new part of queue is re-scanned then. */
1c4a429a 859
151e6f24
JH
860static void
861cgraph_analyze_functions (void)
1c4a429a 862{
cd9c7bd2 863 /* Keep track of already processed nodes when called multiple times for
aabcd309 864 intermodule optimization. */
cd9c7bd2 865 static struct cgraph_node *first_analyzed;
61e00a5e 866 struct cgraph_node *first_processed = first_analyzed;
8a4a83ed 867 static struct varpool_node *first_analyzed_var;
151e6f24 868 struct cgraph_node *node, *next;
1c4a429a 869
1389294c 870 bitmap_obstack_initialize (NULL);
61e00a5e
JH
871 process_function_and_variable_attributes (first_processed,
872 first_analyzed_var);
873 first_processed = cgraph_nodes;
8a4a83ed
JH
874 first_analyzed_var = varpool_nodes;
875 varpool_analyze_pending_decls ();
a194aa56 876 if (cgraph_dump_file)
1c4a429a 877 {
7d82fe7c 878 fprintf (cgraph_dump_file, "Initial entry points:");
cd9c7bd2 879 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
39ecc018 880 if (node->needed)
a194aa56
JH
881 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
882 fprintf (cgraph_dump_file, "\n");
1c4a429a 883 }
151e6f24 884 cgraph_process_new_functions ();
1c4a429a 885
7660e67e
SB
886 /* Propagate reachability flag and lower representation of all reachable
887 functions. In the future, lowering will introduce new functions and
888 new entry points on the way (by template instantiation and virtual
889 method table generation for instance). */
1668aabc 890 while (cgraph_nodes_queue)
1c4a429a 891 {
e767b5be 892 struct cgraph_edge *edge;
1668aabc
JH
893 tree decl = cgraph_nodes_queue->decl;
894
895 node = cgraph_nodes_queue;
8bd87c4e 896 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
18c6ada9 897 node->next_needed = NULL;
1c4a429a 898
cd4dea62 899 /* ??? It is possible to create extern inline function and later using
9d203871 900 weak alias attribute to kill its body. See
cd4dea62 901 gcc.c-torture/compile/20011119-1.c */
726a989a 902 if (!DECL_STRUCT_FUNCTION (decl))
d71cc23f
JH
903 {
904 cgraph_reset_node (node);
905 continue;
906 }
cd4dea62 907
d7f09764
DN
908 if (!node->analyzed)
909 cgraph_analyze_function (node);
8dafba3c 910
1c4a429a 911 for (edge = node->callees; edge; edge = edge->next_callee)
e767b5be 912 if (!edge->callee->reachable)
8dafba3c
RH
913 cgraph_mark_reachable_node (edge->callee);
914
b66887e4
JJ
915 if (node->same_comdat_group)
916 {
917 for (next = node->same_comdat_group;
918 next != node;
919 next = next->same_comdat_group)
920 cgraph_mark_reachable_node (next);
921 }
922
6b20f353
DS
923 /* If decl is a clone of an abstract function, mark that abstract
924 function so that we don't release its body. The DECL_INITIAL() of that
925 abstract function declaration will be later needed to output debug info. */
926 if (DECL_ABSTRACT_ORIGIN (decl))
927 {
928 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
929 origin_node->abstract_and_needed = true;
930 }
931
61e00a5e
JH
932 /* We finalize local static variables during constructing callgraph
933 edges. Process their attributes too. */
934 process_function_and_variable_attributes (first_processed,
935 first_analyzed_var);
936 first_processed = cgraph_nodes;
8a4a83ed
JH
937 first_analyzed_var = varpool_nodes;
938 varpool_analyze_pending_decls ();
151e6f24 939 cgraph_process_new_functions ();
1c4a429a 940 }
8dafba3c 941
564738df 942 /* Collect entry points to the unit. */
a194aa56 943 if (cgraph_dump_file)
1668aabc 944 {
7d82fe7c 945 fprintf (cgraph_dump_file, "Unit entry points:");
cd9c7bd2 946 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
39ecc018 947 if (node->needed)
a194aa56 948 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
7d82fe7c 949 fprintf (cgraph_dump_file, "\n\nInitial ");
e767b5be 950 dump_cgraph (cgraph_dump_file);
1668aabc 951 }
7660e67e 952
a194aa56
JH
953 if (cgraph_dump_file)
954 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1c4a429a 955
96fc428c 956 for (node = cgraph_nodes; node != first_analyzed; node = next)
1c4a429a
JH
957 {
958 tree decl = node->decl;
96fc428c 959 next = node->next;
1c4a429a 960
39ecc018 961 if (node->local.finalized && !gimple_has_body_p (decl))
c22cacf3 962 cgraph_reset_node (node);
d71cc23f 963
39ecc018 964 if (!node->reachable && gimple_has_body_p (decl))
1c4a429a 965 {
a194aa56
JH
966 if (cgraph_dump_file)
967 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
18c6ada9 968 cgraph_remove_node (node);
d71cc23f 969 continue;
1c4a429a 970 }
9b0436b7
JH
971 else
972 node->next_needed = NULL;
39ecc018 973 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
d71cc23f 974 gcc_assert (node->analyzed == node->local.finalized);
1c4a429a 975 }
a194aa56 976 if (cgraph_dump_file)
7d82fe7c
KC
977 {
978 fprintf (cgraph_dump_file, "\n\nReclaimed ");
979 dump_cgraph (cgraph_dump_file);
980 }
1389294c 981 bitmap_obstack_release (NULL);
cd9c7bd2 982 first_analyzed = cgraph_nodes;
1c4a429a 983 ggc_collect ();
151e6f24
JH
984}
985
5f1a9ebb 986
151e6f24
JH
987/* Analyze the whole compilation unit once it is parsed completely. */
988
989void
990cgraph_finalize_compilation_unit (void)
991{
90097c67
RG
992 timevar_push (TV_CGRAPH);
993
a406865a
RG
994 /* Do not skip analyzing the functions if there were errors, we
995 miss diagnostics for following functions otherwise. */
151e6f24 996
5f1a9ebb 997 /* Emit size functions we didn't inline. */
f82a627c 998 finalize_size_functions ();
5f1a9ebb 999
90097c67
RG
1000 /* Mark alias targets necessary and emit diagnostics. */
1001 finish_aliases_1 ();
1002
151e6f24
JH
1003 if (!quiet_flag)
1004 {
1005 fprintf (stderr, "\nAnalyzing compilation unit\n");
1006 fflush (stderr);
1007 }
1008
90097c67
RG
1009 /* Gimplify and lower all functions, compute reachability and
1010 remove unreachable nodes. */
1011 cgraph_analyze_functions ();
1012
5f1a9ebb
RG
1013 /* Mark alias targets necessary and emit diagnostics. */
1014 finish_aliases_1 ();
1015
90097c67 1016 /* Gimplify and lower thunks. */
151e6f24 1017 cgraph_analyze_functions ();
a406865a 1018
90097c67 1019 /* Finally drive the pass manager. */
a406865a 1020 cgraph_optimize ();
90097c67
RG
1021
1022 timevar_pop (TV_CGRAPH);
1c4a429a 1023}
3baf459d
DN
1024
1025
1c4a429a
JH
1026/* Figure out what functions we want to assemble. */
1027
1028static void
db0e878d 1029cgraph_mark_functions_to_output (void)
1c4a429a
JH
1030{
1031 struct cgraph_node *node;
b66887e4
JJ
1032#ifdef ENABLE_CHECKING
1033 bool check_same_comdat_groups = false;
1034
1035 for (node = cgraph_nodes; node; node = node->next)
1036 gcc_assert (!node->process);
1037#endif
1c4a429a 1038
1c4a429a
JH
1039 for (node = cgraph_nodes; node; node = node->next)
1040 {
1041 tree decl = node->decl;
b58b1157 1042 struct cgraph_edge *e;
c22cacf3 1043
b66887e4
JJ
1044 gcc_assert (!node->process || node->same_comdat_group);
1045 if (node->process)
1046 continue;
b58b1157
JH
1047
1048 for (e = node->callers; e; e = e->next_caller)
dc0bfe6a 1049 if (e->inline_failed)
b58b1157 1050 break;
1c4a429a 1051
7660e67e
SB
1052 /* We need to output all local functions that are used and not
1053 always inlined, as well as those that are reachable from
1054 outside the current compilation unit. */
39ecc018 1055 if (node->analyzed
18c6ada9 1056 && !node->global.inlined_to
508e4757 1057 && (!cgraph_only_called_directly_p (node)
b58b1157 1058 || (e && node->reachable))
6de9cd9a 1059 && !TREE_ASM_WRITTEN (decl)
1c4a429a 1060 && !DECL_EXTERNAL (decl))
b66887e4
JJ
1061 {
1062 node->process = 1;
1063 if (node->same_comdat_group)
1064 {
1065 struct cgraph_node *next;
1066 for (next = node->same_comdat_group;
1067 next != node;
1068 next = next->same_comdat_group)
1069 next->process = 1;
1070 }
1071 }
1072 else if (node->same_comdat_group)
1073 {
1074#ifdef ENABLE_CHECKING
1075 check_same_comdat_groups = true;
1076#endif
1077 }
341c100f 1078 else
1a2caa7a
NS
1079 {
1080 /* We should've reclaimed all functions that are not needed. */
1081#ifdef ENABLE_CHECKING
726a989a 1082 if (!node->global.inlined_to
39ecc018 1083 && gimple_has_body_p (decl)
a837268b
JH
1084 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1085 are inside partition, we can end up not removing the body since we no longer
1086 have analyzed node pointing to it. */
1087 && !node->in_other_partition
1a2caa7a
NS
1088 && !DECL_EXTERNAL (decl))
1089 {
1090 dump_cgraph_node (stderr, node);
1091 internal_error ("failed to reclaim unneeded function");
1092 }
1093#endif
726a989a 1094 gcc_assert (node->global.inlined_to
39ecc018 1095 || !gimple_has_body_p (decl)
a837268b 1096 || node->in_other_partition
1a2caa7a
NS
1097 || DECL_EXTERNAL (decl));
1098
1099 }
c22cacf3 1100
18d13f34 1101 }
b66887e4
JJ
1102#ifdef ENABLE_CHECKING
1103 if (check_same_comdat_groups)
1104 for (node = cgraph_nodes; node; node = node->next)
1105 if (node->same_comdat_group && !node->process)
1106 {
1107 tree decl = node->decl;
1108 if (!node->global.inlined_to
1109 && gimple_has_body_p (decl)
a837268b
JH
1110 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1111 are inside partition, we can end up not removing the body since we no longer
1112 have analyzed node pointing to it. */
1113 && !node->in_other_partition
b66887e4
JJ
1114 && !DECL_EXTERNAL (decl))
1115 {
1116 dump_cgraph_node (stderr, node);
1117 internal_error ("failed to reclaim unneeded function");
1118 }
1119 }
1120#endif
18d13f34
JH
1121}
1122
6744a6ab
JH
1123/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1124 in lowered gimple form.
1125
1126 Set current_function_decl and cfun to newly constructed empty function body.
1127 return basic block in the function body. */
1128
1129static basic_block
1130init_lowered_empty_function (tree decl)
1131{
1132 basic_block bb;
1133
1134 current_function_decl = decl;
1135 allocate_struct_function (decl, false);
1136 gimple_register_cfg_hooks ();
1137 init_empty_tree_cfg ();
1138 init_tree_ssa (cfun);
1139 init_ssa_operands ();
1140 cfun->gimple_df->in_ssa_p = true;
1141 DECL_INITIAL (decl) = make_node (BLOCK);
1142
1143 DECL_SAVED_TREE (decl) = error_mark_node;
1144 cfun->curr_properties |=
1145 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1146 PROP_ssa);
1147
1148 /* Create BB for body of the function and connect it properly. */
1149 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1150 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1151 make_edge (bb, EXIT_BLOCK_PTR, 0);
1152
1153 return bb;
1154}
1155
1156/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1157 offset indicated by VIRTUAL_OFFSET, if that is
1158 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1159 zero for a result adjusting thunk. */
1160
1161static tree
1162thunk_adjust (gimple_stmt_iterator * bsi,
1163 tree ptr, bool this_adjusting,
1164 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1165{
1166 gimple stmt;
1167 tree ret;
1168
313333a6
RG
1169 if (this_adjusting
1170 && fixed_offset != 0)
6744a6ab
JH
1171 {
1172 stmt = gimple_build_assign (ptr,
1173 fold_build2_loc (input_location,
1174 POINTER_PLUS_EXPR,
1175 TREE_TYPE (ptr), ptr,
1176 size_int (fixed_offset)));
1177 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1178 }
1179
1180 /* If there's a virtual offset, look up that value in the vtable and
1181 adjust the pointer again. */
1182 if (virtual_offset)
1183 {
1184 tree vtabletmp;
1185 tree vtabletmp2;
1186 tree vtabletmp3;
1187 tree offsettmp;
1188
1189 if (!vtable_entry_type)
1190 {
1191 tree vfunc_type = make_node (FUNCTION_TYPE);
1192 TREE_TYPE (vfunc_type) = integer_type_node;
1193 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1194 layout_type (vfunc_type);
1195
1196 vtable_entry_type = build_pointer_type (vfunc_type);
1197 }
1198
1199 vtabletmp =
1200 create_tmp_var (build_pointer_type
1201 (build_pointer_type (vtable_entry_type)), "vptr");
1202
1203 /* The vptr is always at offset zero in the object. */
1204 stmt = gimple_build_assign (vtabletmp,
1205 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1206 ptr));
1207 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1208 mark_symbols_for_renaming (stmt);
1209 find_referenced_vars_in (stmt);
1210
1211 /* Form the vtable address. */
1212 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1213 "vtableaddr");
1214 stmt = gimple_build_assign (vtabletmp2,
70f34814 1215 build_simple_mem_ref (vtabletmp));
6744a6ab
JH
1216 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1217 mark_symbols_for_renaming (stmt);
1218 find_referenced_vars_in (stmt);
1219
1220 /* Find the entry with the vcall offset. */
1221 stmt = gimple_build_assign (vtabletmp2,
1222 fold_build2_loc (input_location,
1223 POINTER_PLUS_EXPR,
1224 TREE_TYPE (vtabletmp2),
1225 vtabletmp2,
1226 fold_convert (sizetype,
1227 virtual_offset)));
1228 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1229
1230 /* Get the offset itself. */
1231 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1232 "vcalloffset");
1233 stmt = gimple_build_assign (vtabletmp3,
70f34814 1234 build_simple_mem_ref (vtabletmp2));
6744a6ab
JH
1235 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1236 mark_symbols_for_renaming (stmt);
1237 find_referenced_vars_in (stmt);
1238
1239 /* Cast to sizetype. */
1240 offsettmp = create_tmp_var (sizetype, "offset");
1241 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1242 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1243 mark_symbols_for_renaming (stmt);
1244 find_referenced_vars_in (stmt);
1245
1246 /* Adjust the `this' pointer. */
1247 ptr = fold_build2_loc (input_location,
1248 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1249 offsettmp);
1250 }
1251
313333a6
RG
1252 if (!this_adjusting
1253 && fixed_offset != 0)
6744a6ab
JH
1254 /* Adjust the pointer by the constant. */
1255 {
1256 tree ptrtmp;
1257
1258 if (TREE_CODE (ptr) == VAR_DECL)
1259 ptrtmp = ptr;
1260 else
1261 {
1262 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1263 stmt = gimple_build_assign (ptrtmp, ptr);
1264 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1265 mark_symbols_for_renaming (stmt);
1266 find_referenced_vars_in (stmt);
1267 }
1268 ptr = fold_build2_loc (input_location,
1269 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1270 size_int (fixed_offset));
1271 }
1272
1273 /* Emit the statement and gimplify the adjustment expression. */
1274 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1275 stmt = gimple_build_assign (ret, ptr);
1276 mark_symbols_for_renaming (stmt);
1277 find_referenced_vars_in (stmt);
1278 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1279
1280 return ret;
1281}
1282
1283/* Produce assembler for thunk NODE. */
1284
1285static void
1286assemble_thunk (struct cgraph_node *node)
1287{
1288 bool this_adjusting = node->thunk.this_adjusting;
1289 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1290 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1291 tree virtual_offset = NULL;
1292 tree alias = node->thunk.alias;
1293 tree thunk_fndecl = node->decl;
1294 tree a = DECL_ARGUMENTS (thunk_fndecl);
1295
1296 current_function_decl = thunk_fndecl;
1297
1298 if (this_adjusting
1299 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1300 virtual_value, alias))
1301 {
1302 const char *fnname;
1303 tree fn_block;
1304
1305 DECL_RESULT (thunk_fndecl)
1306 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1307 RESULT_DECL, 0, integer_type_node);
15488554 1308 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
6744a6ab
JH
1309
1310 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1311 create one. */
1312 fn_block = make_node (BLOCK);
1313 BLOCK_VARS (fn_block) = a;
1314 DECL_INITIAL (thunk_fndecl) = fn_block;
1315 init_function_start (thunk_fndecl);
1316 cfun->is_thunk = 1;
1317 assemble_start_function (thunk_fndecl, fnname);
1318
1319 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1320 fixed_offset, virtual_value, alias);
1321
1322 assemble_end_function (thunk_fndecl, fnname);
1323 init_insn_lengths ();
1324 free_after_compilation (cfun);
1325 set_cfun (NULL);
1326 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1327 }
1328 else
1329 {
1330 tree restype;
1331 basic_block bb, then_bb, else_bb, return_bb;
1332 gimple_stmt_iterator bsi;
1333 int nargs = 0;
1334 tree arg;
1335 int i;
1336 tree resdecl;
1337 tree restmp = NULL;
1338 VEC(tree, heap) *vargs;
1339
1340 gimple call;
1341 gimple ret;
1342
1343 DECL_IGNORED_P (thunk_fndecl) = 1;
1344 bitmap_obstack_initialize (NULL);
1345
1346 if (node->thunk.virtual_offset_p)
1347 virtual_offset = size_int (virtual_value);
1348
1349 /* Build the return declaration for the function. */
1350 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1351 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1352 {
1353 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1354 DECL_ARTIFICIAL (resdecl) = 1;
1355 DECL_IGNORED_P (resdecl) = 1;
1356 DECL_RESULT (thunk_fndecl) = resdecl;
1357 }
1358 else
1359 resdecl = DECL_RESULT (thunk_fndecl);
1360
1361 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1362
1363 bsi = gsi_start_bb (bb);
1364
1365 /* Build call to the function being thunked. */
1366 if (!VOID_TYPE_P (restype))
1367 {
1368 if (!is_gimple_reg_type (restype))
1369 {
1370 restmp = resdecl;
c021f10b 1371 add_local_decl (cfun, restmp);
6744a6ab
JH
1372 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1373 }
1374 else
1375 restmp = create_tmp_var_raw (restype, "retval");
1376 }
1377
910ad8de 1378 for (arg = a; arg; arg = DECL_CHAIN (arg))
6744a6ab
JH
1379 nargs++;
1380 vargs = VEC_alloc (tree, heap, nargs);
1381 if (this_adjusting)
1382 VEC_quick_push (tree, vargs,
1383 thunk_adjust (&bsi,
1384 a, 1, fixed_offset,
1385 virtual_offset));
1386 else
1387 VEC_quick_push (tree, vargs, a);
910ad8de 1388 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
6744a6ab
JH
1389 VEC_quick_push (tree, vargs, arg);
1390 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1391 VEC_free (tree, heap, vargs);
1392 gimple_call_set_cannot_inline (call, true);
1393 gimple_call_set_from_thunk (call, true);
1394 if (restmp)
1395 gimple_call_set_lhs (call, restmp);
1396 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1397 mark_symbols_for_renaming (call);
1398 find_referenced_vars_in (call);
1399 update_stmt (call);
1400
1401 if (restmp && !this_adjusting)
1402 {
1124098b 1403 tree true_label = NULL_TREE;
6744a6ab
JH
1404
1405 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1406 {
1407 gimple stmt;
1408 /* If the return type is a pointer, we need to
1409 protect against NULL. We know there will be an
1410 adjustment, because that's why we're emitting a
1411 thunk. */
1412 then_bb = create_basic_block (NULL, (void *) 0, bb);
1413 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1414 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1415 remove_edge (single_succ_edge (bb));
1416 true_label = gimple_block_label (then_bb);
6744a6ab
JH
1417 stmt = gimple_build_cond (NE_EXPR, restmp,
1418 fold_convert (TREE_TYPE (restmp),
1419 integer_zero_node),
1420 NULL_TREE, NULL_TREE);
1421 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1422 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1423 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1424 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1425 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1426 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1427 bsi = gsi_last_bb (then_bb);
1428 }
1429
1430 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1431 fixed_offset, virtual_offset);
1432 if (true_label)
1433 {
1434 gimple stmt;
1435 bsi = gsi_last_bb (else_bb);
1436 stmt = gimple_build_assign (restmp, fold_convert (TREE_TYPE (restmp),
1437 integer_zero_node));
1438 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1439 bsi = gsi_last_bb (return_bb);
1440 }
1441 }
1442 else
1443 gimple_call_set_tail (call, true);
1444
1445 /* Build return value. */
1446 ret = gimple_build_return (restmp);
1447 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1448
1449 delete_unreachable_blocks ();
1450 update_ssa (TODO_update_ssa);
1451
1452 cgraph_remove_same_body_alias (node);
1453 /* Since we want to emit the thunk, we explicitly mark its name as
1454 referenced. */
6744a6ab
JH
1455 cgraph_add_new_function (thunk_fndecl, true);
1456 bitmap_obstack_release (NULL);
1457 }
1458 current_function_decl = NULL;
1459}
1460
1c4a429a 1461/* Expand function specified by NODE. */
7660e67e 1462
1c4a429a 1463static void
db0e878d 1464cgraph_expand_function (struct cgraph_node *node)
1c4a429a
JH
1465{
1466 tree decl = node->decl;
1467
18c6ada9 1468 /* We ought to not compile any inline clones. */
341c100f 1469 gcc_assert (!node->global.inlined_to);
18c6ada9 1470
7e8b322a 1471 announce_function (decl);
257eb6e3 1472 node->process = 0;
18d13f34 1473
2dee695b 1474 gcc_assert (node->lowered);
776b966e 1475
a3546141 1476 /* Generate RTL for the body of DECL. */
e89d6010 1477 tree_rest_of_compilation (decl);
18d13f34 1478
6de9cd9a 1479 /* Make sure that BE didn't give up on compiling. */
f30cfcb1 1480 gcc_assert (TREE_ASM_WRITTEN (decl));
1c4a429a 1481 current_function_decl = NULL;
b2583345
JJ
1482 if (node->same_body)
1483 {
6744a6ab 1484 struct cgraph_node *alias, *next;
b2583345 1485 bool saved_alias = node->alias;
6744a6ab
JH
1486 for (alias = node->same_body;
1487 alias && alias->next; alias = alias->next)
1488 ;
1489 /* Walk aliases in the order they were created; it is possible that
1490 thunks reffers to the aliases made earlier. */
1491 for (; alias; alias = next)
1492 {
1493 next = alias->previous;
1494 if (!alias->thunk.thunk_p)
1495 assemble_alias (alias->decl,
1496 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1497 else
1498 assemble_thunk (alias);
1499 }
b2583345
JJ
1500 node->alias = saved_alias;
1501 }
39ecc018
JH
1502 gcc_assert (!cgraph_preserve_function_body_p (decl));
1503 cgraph_release_function_body (node);
1504 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1505 points to the dead function body. */
1506 cgraph_node_remove_callees (node);
6b02a499
JH
1507
1508 cgraph_function_flags_ready = true;
1c4a429a
JH
1509}
1510
18c6ada9 1511/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
b58b1157
JH
1512
1513bool
61a05df1 1514cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
b58b1157 1515{
18c6ada9
JH
1516 *reason = e->inline_failed;
1517 return !e->inline_failed;
b58b1157 1518}
18c6ada9 1519
6674a6ce 1520
6674a6ce 1521
db0e878d
AJ
1522/* Expand all functions that must be output.
1523
b58b1157
JH
1524 Attempt to topologically sort the nodes so function is output when
1525 all called functions are already assembled to allow data to be
a98ebe2e 1526 propagated across the callgraph. Use a stack to get smaller distance
d1a6adeb 1527 between a function and its callees (later we may choose to use a more
b58b1157
JH
1528 sophisticated algorithm for function reordering; we will likely want
1529 to use subsections to make the output functions appear in top-down
1530 order). */
1531
1532static void
a20af5b8 1533cgraph_expand_all_functions (void)
b58b1157
JH
1534{
1535 struct cgraph_node *node;
5ed6ace5 1536 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
f30cfcb1 1537 int order_pos, new_order_pos = 0;
b58b1157
JH
1538 int i;
1539
b58b1157 1540 order_pos = cgraph_postorder (order);
341c100f 1541 gcc_assert (order_pos == cgraph_n_nodes);
b58b1157 1542
1ae58c30 1543 /* Garbage collector may remove inline clones we eliminate during
18c6ada9
JH
1544 optimization. So we must be sure to not reference them. */
1545 for (i = 0; i < order_pos; i++)
257eb6e3 1546 if (order[i]->process)
18c6ada9
JH
1547 order[new_order_pos++] = order[i];
1548
1549 for (i = new_order_pos - 1; i >= 0; i--)
b58b1157
JH
1550 {
1551 node = order[i];
257eb6e3 1552 if (node->process)
b58b1157 1553 {
341c100f 1554 gcc_assert (node->reachable);
257eb6e3 1555 node->process = 0;
b58b1157
JH
1556 cgraph_expand_function (node);
1557 }
1558 }
f45e0ad1 1559 cgraph_process_new_functions ();
50674e96 1560
b58b1157 1561 free (order);
50674e96 1562
b58b1157
JH
1563}
1564
474eccc6
ILT
1565/* This is used to sort the node types by the cgraph order number. */
1566
24b97832
ILT
1567enum cgraph_order_sort_kind
1568{
1569 ORDER_UNDEFINED = 0,
1570 ORDER_FUNCTION,
1571 ORDER_VAR,
1572 ORDER_ASM
1573};
1574
474eccc6
ILT
1575struct cgraph_order_sort
1576{
24b97832 1577 enum cgraph_order_sort_kind kind;
474eccc6
ILT
1578 union
1579 {
1580 struct cgraph_node *f;
8a4a83ed 1581 struct varpool_node *v;
474eccc6
ILT
1582 struct cgraph_asm_node *a;
1583 } u;
1584};
1585
1586/* Output all functions, variables, and asm statements in the order
1587 according to their order fields, which is the order in which they
1588 appeared in the file. This implements -fno-toplevel-reorder. In
1589 this mode we may output functions and variables which don't really
1590 need to be output. */
1591
1592static void
1593cgraph_output_in_order (void)
1594{
1595 int max;
474eccc6
ILT
1596 struct cgraph_order_sort *nodes;
1597 int i;
1598 struct cgraph_node *pf;
8a4a83ed 1599 struct varpool_node *pv;
474eccc6
ILT
1600 struct cgraph_asm_node *pa;
1601
1602 max = cgraph_order;
33283dad 1603 nodes = XCNEWVEC (struct cgraph_order_sort, max);
474eccc6 1604
8a4a83ed 1605 varpool_analyze_pending_decls ();
474eccc6
ILT
1606
1607 for (pf = cgraph_nodes; pf; pf = pf->next)
1608 {
257eb6e3 1609 if (pf->process)
474eccc6
ILT
1610 {
1611 i = pf->order;
1612 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1613 nodes[i].kind = ORDER_FUNCTION;
1614 nodes[i].u.f = pf;
1615 }
1616 }
1617
8a4a83ed 1618 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
474eccc6
ILT
1619 {
1620 i = pv->order;
1621 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1622 nodes[i].kind = ORDER_VAR;
1623 nodes[i].u.v = pv;
1624 }
1625
1626 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1627 {
1628 i = pa->order;
1629 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1630 nodes[i].kind = ORDER_ASM;
1631 nodes[i].u.a = pa;
1632 }
474eccc6 1633
7386e3ee
JH
1634 /* In toplevel reorder mode we output all statics; mark them as needed. */
1635 for (i = 0; i < max; ++i)
1636 {
1637 if (nodes[i].kind == ORDER_VAR)
1638 {
1639 varpool_mark_needed_node (nodes[i].u.v);
1640 }
1641 }
1642 varpool_empty_needed_queue ();
1643
474eccc6
ILT
1644 for (i = 0; i < max; ++i)
1645 {
1646 switch (nodes[i].kind)
1647 {
1648 case ORDER_FUNCTION:
257eb6e3 1649 nodes[i].u.f->process = 0;
474eccc6
ILT
1650 cgraph_expand_function (nodes[i].u.f);
1651 break;
1652
1653 case ORDER_VAR:
8a4a83ed 1654 varpool_assemble_decl (nodes[i].u.v);
474eccc6
ILT
1655 break;
1656
1657 case ORDER_ASM:
1658 assemble_asm (nodes[i].u.a->asm_str);
1659 break;
1660
1661 case ORDER_UNDEFINED:
1662 break;
1663
1664 default:
1665 gcc_unreachable ();
1666 }
1667 }
e7b9eb2c
ILT
1668
1669 cgraph_asm_nodes = NULL;
33283dad 1670 free (nodes);
474eccc6
ILT
1671}
1672
18c6ada9
JH
1673/* Return true when function body of DECL still needs to be kept around
1674 for later re-use. */
1675bool
1676cgraph_preserve_function_body_p (tree decl)
1677{
1678 struct cgraph_node *node;
c37f4ba4
JH
1679
1680 gcc_assert (cgraph_global_info_ready);
18c6ada9 1681 /* Look if there is any clone around. */
9187e02d
JH
1682 node = cgraph_node (decl);
1683 if (node->clones)
1684 return true;
18c6ada9
JH
1685 return false;
1686}
1687
ef330312
PB
1688static void
1689ipa_passes (void)
1690{
db2960f4 1691 set_cfun (NULL);
04b201a2 1692 current_function_decl = NULL;
726a989a 1693 gimple_register_cfg_hooks ();
ef330312 1694 bitmap_obstack_initialize (NULL);
b20996ff 1695
090fa0ab
GF
1696 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1697
b20996ff
JH
1698 if (!in_lto_p)
1699 execute_ipa_pass_list (all_small_ipa_passes);
3baf459d 1700
d7f09764
DN
1701 /* If pass_all_early_optimizations was not scheduled, the state of
1702 the cgraph will not be properly updated. Update it now. */
1703 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1704 cgraph_state = CGRAPH_STATE_IPA_SSA;
3baf459d 1705
d7f09764
DN
1706 if (!in_lto_p)
1707 {
1708 /* Generate coverage variables and constructors. */
1709 coverage_finish ();
1710
1711 /* Process new functions added. */
1712 set_cfun (NULL);
1713 current_function_decl = NULL;
1714 cgraph_process_new_functions ();
d7f09764 1715
090fa0ab
GF
1716 execute_ipa_summary_passes
1717 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
fb3f88cc 1718 }
c082f9f3
SB
1719
1720 /* Some targets need to handle LTO assembler output specially. */
1721 if (flag_generate_lto)
1722 targetm.asm_out.lto_start ();
1723
d7f09764
DN
1724 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1725
1726 if (!in_lto_p)
1727 ipa_write_summaries ();
1728
c082f9f3
SB
1729 if (flag_generate_lto)
1730 targetm.asm_out.lto_end ();
1731
fb3f88cc
JH
1732 if (!flag_ltrans)
1733 execute_ipa_pass_list (all_regular_ipa_passes);
090fa0ab 1734 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
3baf459d 1735
ef330312
PB
1736 bitmap_obstack_release (NULL);
1737}
1738
4537ec0c 1739
1c4a429a
JH
1740/* Perform simple optimizations based on callgraph. */
1741
d7f09764 1742void
db0e878d 1743cgraph_optimize (void)
1c4a429a 1744{
1da2ed5f 1745 if (seen_error ())
413803d3
VR
1746 return;
1747
18c6ada9
JH
1748#ifdef ENABLE_CHECKING
1749 verify_cgraph ();
1750#endif
7be82279 1751
cd9c7bd2
JH
1752 /* Frontend may output common variables after the unit has been finalized.
1753 It is safe to deal with them here as they are always zero initialized. */
8a4a83ed 1754 varpool_analyze_pending_decls ();
857e7259 1755
a194aa56 1756 timevar_push (TV_CGRAPHOPT);
a5573239
JH
1757 if (pre_ipa_mem_report)
1758 {
1759 fprintf (stderr, "Memory consumption before IPA\n");
1760 dump_memory_report (false);
1761 }
b58b1157 1762 if (!quiet_flag)
a418679d 1763 fprintf (stderr, "Performing interprocedural optimizations\n");
f45e0ad1 1764 cgraph_state = CGRAPH_STATE_IPA;
f30cfcb1 1765
7e2fe9d8 1766 /* Don't run the IPA passes if there was any error or sorry messages. */
1da2ed5f 1767 if (!seen_error ())
7e2fe9d8
AP
1768 ipa_passes ();
1769
4537ec0c 1770 /* Do nothing else if any IPA pass found errors. */
1da2ed5f 1771 if (seen_error ())
9ba0399e
RH
1772 {
1773 timevar_pop (TV_CGRAPHOPT);
1774 return;
1775 }
4537ec0c 1776
6b02a499
JH
1777 /* This pass remove bodies of extern inline functions we never inlined.
1778 Do this later so other IPA passes see what is really going on. */
1779 cgraph_remove_unreachable_nodes (false, dump_file);
dafc5b82 1780 cgraph_global_info_ready = true;
a194aa56
JH
1781 if (cgraph_dump_file)
1782 {
7d82fe7c 1783 fprintf (cgraph_dump_file, "Optimized ");
a194aa56 1784 dump_cgraph (cgraph_dump_file);
cd9c7bd2 1785 dump_varpool (cgraph_dump_file);
a194aa56 1786 }
a5573239
JH
1787 if (post_ipa_mem_report)
1788 {
7fa982e5 1789 fprintf (stderr, "Memory consumption after IPA\n");
a5573239
JH
1790 dump_memory_report (false);
1791 }
a194aa56 1792 timevar_pop (TV_CGRAPHOPT);
1c4a429a 1793
b58b1157 1794 /* Output everything. */
3df9609a 1795 (*debug_hooks->assembly_start) ();
7d82fe7c
KC
1796 if (!quiet_flag)
1797 fprintf (stderr, "Assembling functions:\n");
18c6ada9
JH
1798#ifdef ENABLE_CHECKING
1799 verify_cgraph ();
1800#endif
474eccc6 1801
9187e02d 1802 cgraph_materialize_all_clones ();
6674a6ce 1803 cgraph_mark_functions_to_output ();
cd9c7bd2 1804
f45e0ad1 1805 cgraph_state = CGRAPH_STATE_EXPANSION;
474eccc6
ILT
1806 if (!flag_toplevel_reorder)
1807 cgraph_output_in_order ();
1808 else
1809 {
1810 cgraph_output_pending_asms ();
1811
1812 cgraph_expand_all_functions ();
8a4a83ed 1813 varpool_remove_unreferenced_decls ();
474eccc6 1814
8a4a83ed 1815 varpool_assemble_pending_decls ();
474eccc6 1816 }
f45e0ad1
JH
1817 cgraph_process_new_functions ();
1818 cgraph_state = CGRAPH_STATE_FINISHED;
cd9c7bd2 1819
a194aa56
JH
1820 if (cgraph_dump_file)
1821 {
7d82fe7c 1822 fprintf (cgraph_dump_file, "\nFinal ");
a194aa56
JH
1823 dump_cgraph (cgraph_dump_file);
1824 }
18c6ada9
JH
1825#ifdef ENABLE_CHECKING
1826 verify_cgraph ();
6de9cd9a
DN
1827 /* Double check that all inline clones are gone and that all
1828 function bodies have been released from memory. */
1da2ed5f 1829 if (!seen_error ())
6de9cd9a
DN
1830 {
1831 struct cgraph_node *node;
1832 bool error_found = false;
1833
1834 for (node = cgraph_nodes; node; node = node->next)
1835 if (node->analyzed
1836 && (node->global.inlined_to
39ecc018 1837 || gimple_has_body_p (node->decl)))
6de9cd9a
DN
1838 {
1839 error_found = true;
1840 dump_cgraph_node (stderr, node);
c22cacf3 1841 }
6de9cd9a 1842 if (error_found)
f30cfcb1 1843 internal_error ("nodes with unreleased memory found");
6de9cd9a 1844 }
18c6ada9 1845#endif
1c4a429a 1846}
4537ec0c 1847
9b3e897d
PB
1848void
1849init_cgraph (void)
1850{
a05541a9
JH
1851 if (!cgraph_dump_file)
1852 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
9b3e897d 1853}
57fb5341 1854
c22cacf3 1855/* The edges representing the callers of the NEW_VERSION node were
57fb5341
RL
1856 fixed by cgraph_function_versioning (), now the call_expr in their
1857 respective tree code should be updated to call the NEW_VERSION. */
1858
1859static void
1860update_call_expr (struct cgraph_node *new_version)
1861{
1862 struct cgraph_edge *e;
1863
1864 gcc_assert (new_version);
726a989a
RB
1865
1866 /* Update the call expr on the edges to call the new version. */
57fb5341 1867 for (e = new_version->callers; e; e = e->next_caller)
c0ab1df3
AP
1868 {
1869 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1870 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
1d65f45c 1871 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
c0ab1df3 1872 }
57fb5341
RL
1873}
1874
1875
1876/* Create a new cgraph node which is the new version of
1877 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1878 edges which should be redirected to point to
1879 NEW_VERSION. ALL the callees edges of OLD_VERSION
1880 are cloned to the new version node. Return the new
91382288
JH
1881 version node.
1882
1883 If non-NULL BLOCK_TO_COPY determine what basic blocks
1884 was copied to prevent duplications of calls that are dead
1885 in the clone. */
57fb5341
RL
1886
1887static struct cgraph_node *
1888cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
b2c0ad40 1889 tree new_decl,
91382288
JH
1890 VEC(cgraph_edge_p,heap) *redirect_callers,
1891 bitmap bbs_to_copy)
1892 {
57fb5341 1893 struct cgraph_node *new_version;
ae2b0888 1894 struct cgraph_edge *e;
57fb5341
RL
1895 unsigned i;
1896
1897 gcc_assert (old_version);
c22cacf3 1898
57fb5341
RL
1899 new_version = cgraph_node (new_decl);
1900
1901 new_version->analyzed = true;
1902 new_version->local = old_version->local;
036546e5
JH
1903 new_version->local.externally_visible = false;
1904 new_version->local.local = true;
1905 new_version->local.vtable_method = false;
57fb5341 1906 new_version->global = old_version->global;
8cf9feca 1907 new_version->rtl = old_version->rtl;
57fb5341
RL
1908 new_version->reachable = true;
1909 new_version->count = old_version->count;
1910
036546e5 1911 for (e = old_version->callees; e; e=e->next_callee)
91382288
JH
1912 if (!bbs_to_copy
1913 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1914 cgraph_clone_edge (e, new_version, e->call_stmt,
1915 e->lto_stmt_uid, REG_BR_PROB_BASE,
1916 CGRAPH_FREQ_BASE,
1917 e->loop_nest, true);
036546e5 1918 for (e = old_version->indirect_calls; e; e=e->next_callee)
91382288
JH
1919 if (!bbs_to_copy
1920 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1921 cgraph_clone_edge (e, new_version, e->call_stmt,
1922 e->lto_stmt_uid, REG_BR_PROB_BASE,
1923 CGRAPH_FREQ_BASE,
1924 e->loop_nest, true);
ac47786e 1925 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
b2c0ad40
KH
1926 {
1927 /* Redirect calls to the old version node to point to its new
1928 version. */
1929 cgraph_redirect_edge_callee (e, new_version);
1930 }
57fb5341
RL
1931
1932 return new_version;
1933 }
1934
1935 /* Perform function versioning.
c22cacf3 1936 Function versioning includes copying of the tree and
57fb5341
RL
1937 a callgraph update (creating a new cgraph node and updating
1938 its callees and callers).
1939
1940 REDIRECT_CALLERS varray includes the edges to be redirected
1941 to the new version.
1942
1943 TREE_MAP is a mapping of tree nodes we want to replace with
1944 new ones (according to results of prior analysis).
1945 OLD_VERSION_NODE is the node that is versioned.
b8698a0f 1946 It returns the new version's cgraph node.
91382288
JH
1947 If non-NULL ARGS_TO_SKIP determine function parameters to remove
1948 from new version.
1949 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
1950 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
57fb5341
RL
1951
1952struct cgraph_node *
1953cgraph_function_versioning (struct cgraph_node *old_version_node,
b2c0ad40 1954 VEC(cgraph_edge_p,heap) *redirect_callers,
9187e02d 1955 VEC (ipa_replace_map_p,gc)* tree_map,
036546e5 1956 bitmap args_to_skip,
91382288
JH
1957 bitmap bbs_to_copy,
1958 basic_block new_entry_block,
036546e5 1959 const char *clone_name)
57fb5341
RL
1960{
1961 tree old_decl = old_version_node->decl;
1962 struct cgraph_node *new_version_node = NULL;
1963 tree new_decl;
1964
1965 if (!tree_versionable_function_p (old_decl))
1966 return NULL;
1967
1968 /* Make a new FUNCTION_DECL tree node for the
1969 new version. */
c6f7cfc1
JH
1970 if (!args_to_skip)
1971 new_decl = copy_node (old_decl);
1972 else
1973 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
57fb5341 1974
9990e02a
JH
1975 /* Generate a new name for the new version. */
1976 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
1977 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
1978 SET_DECL_RTL (new_decl, NULL);
1979
57fb5341
RL
1980 /* Create the new version's call-graph node.
1981 and update the edges of the new node. */
1982 new_version_node =
1983 cgraph_copy_node_for_versioning (old_version_node, new_decl,
91382288 1984 redirect_callers, bbs_to_copy);
57fb5341
RL
1985
1986 /* Copy the OLD_VERSION_NODE function tree to the new version. */
91382288
JH
1987 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
1988 bbs_to_copy, new_entry_block);
57fb5341 1989
c22cacf3 1990 /* Update the new version's properties.
c0ab1df3
AP
1991 Make The new version visible only within this translation unit. Make sure
1992 that is not weak also.
c22cacf3 1993 ??? We cannot use COMDAT linkage because there is no
57fb5341 1994 ABI support for this. */
715a4e08 1995 cgraph_make_decl_local (new_version_node->decl);
e6e1c050 1996 DECL_VIRTUAL_P (new_version_node->decl) = 0;
57fb5341
RL
1997 new_version_node->local.externally_visible = 0;
1998 new_version_node->local.local = 1;
1999 new_version_node->lowered = true;
e6e1c050 2000
c0ab1df3
AP
2001 /* Update the call_expr on the edges to call the new version node. */
2002 update_call_expr (new_version_node);
b8698a0f 2003
129a37fc 2004 cgraph_call_function_insertion_hooks (new_version_node);
57fb5341
RL
2005 return new_version_node;
2006}
ea99e0be
JH
2007
2008/* Produce separate function body for inline clones so the offline copy can be
2009 modified without affecting them. */
2010struct cgraph_node *
2011save_inline_function_body (struct cgraph_node *node)
2012{
9187e02d 2013 struct cgraph_node *first_clone, *n;
ea99e0be
JH
2014
2015 gcc_assert (node == cgraph_node (node->decl));
2016
2017 cgraph_lower_function (node);
2018
9187e02d 2019 first_clone = node->clones;
ea99e0be
JH
2020
2021 first_clone->decl = copy_node (node->decl);
ea99e0be
JH
2022 cgraph_insert_node_to_hashtable (first_clone);
2023 gcc_assert (first_clone == cgraph_node (first_clone->decl));
9187e02d
JH
2024 if (first_clone->next_sibling_clone)
2025 {
2026 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2027 n->clone_of = first_clone;
2028 n->clone_of = first_clone;
2029 n->next_sibling_clone = first_clone->clones;
2030 if (first_clone->clones)
2031 first_clone->clones->prev_sibling_clone = n;
2032 first_clone->clones = first_clone->next_sibling_clone;
2033 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2034 first_clone->next_sibling_clone = NULL;
2035 gcc_assert (!first_clone->prev_sibling_clone);
2036 }
2037 first_clone->clone_of = NULL;
2038 node->clones = NULL;
2039
2040 if (first_clone->clones)
2041 for (n = first_clone->clones; n != first_clone;)
2042 {
2043 gcc_assert (n->decl == node->decl);
2044 n->decl = first_clone->decl;
2045 if (n->clones)
2046 n = n->clones;
2047 else if (n->next_sibling_clone)
2048 n = n->next_sibling_clone;
2049 else
2050 {
2051 while (n != first_clone && !n->next_sibling_clone)
2052 n = n->clone_of;
2053 if (n != first_clone)
2054 n = n->next_sibling_clone;
2055 }
2056 }
ea99e0be
JH
2057
2058 /* Copy the OLD_VERSION_NODE function tree to the new version. */
91382288
JH
2059 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
2060 NULL, NULL);
ea99e0be
JH
2061
2062 DECL_EXTERNAL (first_clone->decl) = 0;
fc26fae3 2063 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
ea99e0be
JH
2064 TREE_PUBLIC (first_clone->decl) = 0;
2065 DECL_COMDAT (first_clone->decl) = 0;
21ecdec5 2066 VEC_free (ipa_opt_pass, heap,
0e3776db
JH
2067 first_clone->ipa_transforms_to_apply);
2068 first_clone->ipa_transforms_to_apply = NULL;
ea99e0be 2069
ea99e0be
JH
2070#ifdef ENABLE_CHECKING
2071 verify_cgraph_node (first_clone);
2072#endif
2073 return first_clone;
2074}
7be82279 2075
9187e02d
JH
2076/* Given virtual clone, turn it into actual clone. */
2077static void
2078cgraph_materialize_clone (struct cgraph_node *node)
2079{
2080 bitmap_obstack_initialize (NULL);
e466e2ce
JH
2081#ifdef ENABLE_CHECKING
2082 node->former_clone_of = node->clone_of->decl;
2083 if (node->clone_of->former_clone_of)
2084 node->former_clone_of = node->clone_of->former_clone_of;
2085#endif
9187e02d
JH
2086 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2087 tree_function_versioning (node->clone_of->decl, node->decl,
2088 node->clone.tree_map, true,
91382288 2089 node->clone.args_to_skip, NULL, NULL);
08ad1d6d
JH
2090 if (cgraph_dump_file)
2091 {
2092 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2093 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2094 }
9187e02d
JH
2095
2096 /* Function is no longer clone. */
2097 if (node->next_sibling_clone)
2098 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2099 if (node->prev_sibling_clone)
2100 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2101 else
2102 node->clone_of->clones = node->next_sibling_clone;
2103 node->next_sibling_clone = NULL;
2104 node->prev_sibling_clone = NULL;
0e3776db 2105 if (!node->clone_of->analyzed && !node->clone_of->clones)
f0c418dc
JH
2106 {
2107 cgraph_release_function_body (node->clone_of);
2108 cgraph_node_remove_callees (node->clone_of);
2109 ipa_remove_all_references (&node->clone_of->ref_list);
2110 }
9187e02d
JH
2111 node->clone_of = NULL;
2112 bitmap_obstack_release (NULL);
2113}
2114
8132a837
MJ
2115/* If necessary, change the function declaration in the call statement
2116 associated with E so that it corresponds to the edge callee. */
2117
2118gimple
2119cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2120{
2121 tree decl = gimple_call_fndecl (e->call_stmt);
2122 gimple new_stmt;
437ffe7b
JH
2123#ifdef ENABLE_CHECKING
2124 struct cgraph_node *node;
2125#endif
8132a837 2126
3949c4a7
MJ
2127 if (e->indirect_unknown_callee
2128 || decl == e->callee->decl
8132a837 2129 /* Don't update call from same body alias to the real function. */
3949c4a7 2130 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
8132a837
MJ
2131 return e->call_stmt;
2132
437ffe7b 2133#ifdef ENABLE_CHECKING
3949c4a7
MJ
2134 if (decl)
2135 {
2136 node = cgraph_get_node (decl);
2137 gcc_assert (!node || !node->clone.combined_args_to_skip);
2138 }
437ffe7b 2139#endif
e466e2ce 2140
8132a837
MJ
2141 if (cgraph_dump_file)
2142 {
2143 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2144 cgraph_node_name (e->caller), e->caller->uid,
2145 cgraph_node_name (e->callee), e->callee->uid);
2146 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e466e2ce 2147 if (e->callee->clone.combined_args_to_skip)
8d2adc24
EB
2148 {
2149 fprintf (cgraph_dump_file, " combined args to skip: ");
2150 dump_bitmap (cgraph_dump_file,
2151 e->callee->clone.combined_args_to_skip);
e466e2ce 2152 }
8132a837
MJ
2153 }
2154
2155 if (e->callee->clone.combined_args_to_skip)
8d2adc24
EB
2156 {
2157 gimple_stmt_iterator gsi;
2158
2159 new_stmt
2160 = gimple_call_copy_skip_args (e->call_stmt,
2161 e->callee->clone.combined_args_to_skip);
3d113394 2162 gimple_call_set_fndecl (new_stmt, e->callee->decl);
8d2adc24
EB
2163
2164 if (gimple_vdef (new_stmt)
2165 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2166 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2167
2168 gsi = gsi_for_stmt (e->call_stmt);
2169 gsi_replace (&gsi, new_stmt, true);
2170 }
8132a837 2171 else
3d113394
RG
2172 {
2173 new_stmt = e->call_stmt;
2174 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2175 update_stmt (new_stmt);
2176 }
8132a837 2177
8132a837
MJ
2178 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2179
2180 if (cgraph_dump_file)
2181 {
2182 fprintf (cgraph_dump_file, " updated to:");
2183 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2184 }
2185 return new_stmt;
2186}
2187
9187e02d 2188/* Once all functions from compilation unit are in memory, produce all clones
8132a837
MJ
2189 and update all calls. We might also do this on demand if we don't want to
2190 bring all functions to memory prior compilation, but current WHOPR
2191 implementation does that and it is is bit easier to keep everything right in
2192 this order. */
9187e02d
JH
2193void
2194cgraph_materialize_all_clones (void)
2195{
2196 struct cgraph_node *node;
2197 bool stabilized = false;
2198
2199 if (cgraph_dump_file)
2200 fprintf (cgraph_dump_file, "Materializing clones\n");
2201#ifdef ENABLE_CHECKING
2202 verify_cgraph ();
2203#endif
2204
2205 /* We can also do topological order, but number of iterations should be
2206 bounded by number of IPA passes since single IPA pass is probably not
2207 going to create clones of clones it created itself. */
2208 while (!stabilized)
2209 {
2210 stabilized = true;
2211 for (node = cgraph_nodes; node; node = node->next)
2212 {
2213 if (node->clone_of && node->decl != node->clone_of->decl
2214 && !gimple_has_body_p (node->decl))
2215 {
2216 if (gimple_has_body_p (node->clone_of->decl))
2217 {
2218 if (cgraph_dump_file)
08ad1d6d
JH
2219 {
2220 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2221 cgraph_node_name (node->clone_of),
2222 cgraph_node_name (node));
2223 if (node->clone.tree_map)
2224 {
2225 unsigned int i;
2226 fprintf (cgraph_dump_file, " replace map: ");
2227 for (i = 0; i < VEC_length (ipa_replace_map_p,
2228 node->clone.tree_map);
2229 i++)
2230 {
2231 struct ipa_replace_map *replace_info;
2232 replace_info = VEC_index (ipa_replace_map_p,
2233 node->clone.tree_map,
2234 i);
2235 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2236 fprintf (cgraph_dump_file, " -> ");
2237 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2238 fprintf (cgraph_dump_file, "%s%s;",
2239 replace_info->replace_p ? "(replace)":"",
2240 replace_info->ref_p ? "(ref)":"");
2241 }
2242 fprintf (cgraph_dump_file, "\n");
2243 }
2244 if (node->clone.args_to_skip)
2245 {
2246 fprintf (cgraph_dump_file, " args_to_skip: ");
2247 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2248 }
2249 if (node->clone.args_to_skip)
2250 {
2251 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2252 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2253 }
2254 }
9187e02d 2255 cgraph_materialize_clone (node);
36576655 2256 stabilized = false;
9187e02d 2257 }
9187e02d
JH
2258 }
2259 }
2260 }
47cb0d7d
JH
2261 for (node = cgraph_nodes; node; node = node->next)
2262 if (!node->analyzed && node->callees)
2263 cgraph_node_remove_callees (node);
8132a837
MJ
2264 if (cgraph_dump_file)
2265 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
9a23acef
JH
2266#ifdef ENABLE_CHECKING
2267 verify_cgraph ();
2268#endif
9187e02d
JH
2269 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2270}
2271
7be82279 2272#include "gt-cgraphunit.h"