]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
baseline_symbols.txt: Regenerated.
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
a418679d 1/* Callgraph based interprocedural optimizations.
d06865bf 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
0850f694 3 2011, 2012 Free Software Foundation, Inc.
1c4a429a
JH
4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1c4a429a
JH
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
1c4a429a 21
18c6ada9 22/* This module implements main driver of compilation process as well as
a418679d 23 few basic interprocedural optimizers.
18c6ada9
JH
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
efe75b6f
JH
35 (There is one exception needed for implementing GCC extern inline
36 function.)
18c6ada9 37
8a4a83ed 38 - varpool_finalize_variable
18c6ada9 39
1ae58c30 40 This function has same behavior as the above but is used for static
18c6ada9
JH
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
efe75b6f
JH
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
18c6ada9 47
dd5a833e
MS
48 In the call-graph construction and local function analysis takes
49 place here. Bodies of unreachable functions are released to
50 conserve memory usage.
18c6ada9 51
efe75b6f
JH
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
18c6ada9
JH
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
18c6ada9 62 - cgraph_mark_needed_node
8a4a83ed 63 - varpool_mark_needed_node
18c6ada9 64
efe75b6f
JH
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
dbb23ff7 69 used by C++ frontend to explicitly mark the keyed methods.
18c6ada9
JH
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
7e8b322a 80 Analyzing of all functions is deferred
18c6ada9
JH
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
e1990f69 91 The intra-procedural information is produced and its existence
18c6ada9
JH
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
7e8b322a 106 where reference has been optimized out. */
9b3e897d 107
6674a6ce 108
1c4a429a
JH
109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
c9b9aa64 114#include "rtl.h"
6674a6ce 115#include "tree-flow.h"
1c4a429a
JH
116#include "tree-inline.h"
117#include "langhooks.h"
0c58f841 118#include "pointer-set.h"
1c4a429a
JH
119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
dafc5b82 125#include "diagnostic.h"
cf835838
JM
126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
a194aa56 128#include "timevar.h"
b58b1157
JH
129#include "params.h"
130#include "fibheap.h"
dc0bfe6a 131#include "intl.h"
902edd36 132#include "function.h"
57fb5341 133#include "ipa-prop.h"
726a989a
RB
134#include "gimple.h"
135#include "tree-iterator.h"
b4861090 136#include "tree-pass.h"
a406865a 137#include "tree-dump.h"
cd9c7bd2 138#include "output.h"
3baf459d 139#include "coverage.h"
090fa0ab 140#include "plugin.h"
632b4f8e 141#include "ipa-inline.h"
af8bca3c 142#include "ipa-utils.h"
47c79d56 143#include "lto-streamer.h"
b58b1157 144
a20af5b8 145static void cgraph_expand_all_functions (void);
db0e878d
AJ
146static void cgraph_mark_functions_to_output (void);
147static void cgraph_expand_function (struct cgraph_node *);
21c4a6a7 148static void cgraph_output_pending_asms (void);
7dff32e6 149
0a5fa5a1 150FILE *cgraph_dump_file;
9b3e897d 151
6744a6ab
JH
152/* Used for vtable lookup in thunk adjusting. */
153static GTY (()) tree vtable_entry_type;
154
8dafba3c
RH
155/* Determine if function DECL is needed. That is, visible to something
156 either outside this translation unit, something magic in the system
7e8b322a 157 configury. */
8dafba3c 158
d7f09764
DN
159bool
160cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
8dafba3c 161{
e7d6beb0 162 /* If the user told us it is used, then it must be so. */
386b46cf
JH
163 if (node->local.externally_visible)
164 return true;
165
e7d6beb0
JH
166 /* ??? If the assembler name is set by hand, it is possible to assemble
167 the name later after finalizing the function and the fact is noticed
168 in assemble_name then. This is arguably a bug. */
169 if (DECL_ASSEMBLER_NAME_SET_P (decl)
39e2db00 170 && (!node->thunk.thunk_p && !node->same_body_alias)
e7d6beb0
JH
171 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
172 return true;
173
a1d31187
JH
174 /* With -fkeep-inline-functions we are keeping all inline functions except
175 for extern inline ones. */
176 if (flag_keep_inline_functions
177 && DECL_DECLARED_INLINE_P (decl)
b521dcbe 178 && !DECL_EXTERNAL (decl)
e7f23018 179 && !DECL_DISREGARD_INLINE_LIMITS (decl))
a1d31187
JH
180 return true;
181
8dafba3c
RH
182 /* If we decided it was needed before, but at the time we didn't have
183 the body of the function available, then it's still needed. We have
184 to go back and re-check its dependencies now. */
185 if (node->needed)
186 return true;
187
188 /* Externally visible functions must be output. The exception is
c22cacf3 189 COMDAT functions that must be output only when they are needed.
04f77d0f
JH
190
191 When not optimizing, also output the static functions. (see
46f5f7f2 192 PR24561), but don't do so for always_inline functions, functions
c5d01958 193 declared inline and nested functions. These were optimized out
b633db7b 194 in the original implementation and it is unclear whether we want
6fc0bb99 195 to change the behavior here. */
5d342ef9 196 if (((TREE_PUBLIC (decl)
c5d01958 197 || (!optimize
c5c90089 198 && !node->same_body_alias
e7f23018 199 && !DECL_DISREGARD_INLINE_LIMITS (decl)
b633db7b 200 && !DECL_DECLARED_INLINE_P (decl)
c5d01958
EB
201 && !(DECL_CONTEXT (decl)
202 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
b20996ff 203 && !flag_whole_program
014d92e1 204 && !flag_lto)
ce91e74c 205 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
8dafba3c
RH
206 return true;
207
8dafba3c
RH
208 return false;
209}
210
d60ab196 211/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
f45e0ad1
JH
212 functions into callgraph in a way so they look like ordinary reachable
213 functions inserted into callgraph already at construction time. */
214
215bool
216cgraph_process_new_functions (void)
217{
218 bool output = false;
219 tree fndecl;
220 struct cgraph_node *node;
221
2942c502 222 varpool_analyze_pending_decls ();
f45e0ad1
JH
223 /* Note that this queue may grow as its being processed, as the new
224 functions may generate new ones. */
225 while (cgraph_new_nodes)
226 {
227 node = cgraph_new_nodes;
228 fndecl = node->decl;
229 cgraph_new_nodes = cgraph_new_nodes->next_needed;
230 switch (cgraph_state)
231 {
232 case CGRAPH_STATE_CONSTRUCTION:
233 /* At construction time we just need to finalize function and move
234 it into reachable functions list. */
235
236 node->next_needed = NULL;
237 cgraph_finalize_function (fndecl, false);
238 cgraph_mark_reachable_node (node);
239 output = true;
4d5dcfb2 240 cgraph_call_function_insertion_hooks (node);
f45e0ad1
JH
241 break;
242
243 case CGRAPH_STATE_IPA:
7a388ee4 244 case CGRAPH_STATE_IPA_SSA:
f45e0ad1
JH
245 /* When IPA optimization already started, do all essential
246 transformations that has been already performed on the whole
247 cgraph but not on this function. */
248
726a989a 249 gimple_register_cfg_hooks ();
f45e0ad1
JH
250 if (!node->analyzed)
251 cgraph_analyze_function (node);
252 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
253 current_function_decl = fndecl;
7a388ee4
JH
254 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
255 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
256 /* When not optimizing, be sure we run early local passes anyway
257 to expand OMP. */
258 || !optimize)
8ddbbcae 259 execute_pass_list (pass_early_local_passes.pass.sub);
bb7e6d55 260 else
632b4f8e 261 compute_inline_parameters (node, true);
f45e0ad1
JH
262 free_dominance_info (CDI_POST_DOMINATORS);
263 free_dominance_info (CDI_DOMINATORS);
264 pop_cfun ();
265 current_function_decl = NULL;
4d5dcfb2 266 cgraph_call_function_insertion_hooks (node);
f45e0ad1
JH
267 break;
268
269 case CGRAPH_STATE_EXPANSION:
270 /* Functions created during expansion shall be compiled
271 directly. */
257eb6e3 272 node->process = 0;
4d5dcfb2 273 cgraph_call_function_insertion_hooks (node);
f45e0ad1
JH
274 cgraph_expand_function (node);
275 break;
276
277 default:
278 gcc_unreachable ();
279 break;
280 }
2942c502 281 varpool_analyze_pending_decls ();
f45e0ad1
JH
282 }
283 return output;
284}
285
d71cc23f
JH
286/* As an GCC extension we allow redefinition of the function. The
287 semantics when both copies of bodies differ is not well defined.
288 We replace the old body with new body so in unit at a time mode
289 we always use new body, while in normal mode we may end up with
290 old body inlined into some functions and new body expanded and
291 inlined in others.
292
293 ??? It may make more sense to use one body for inlining and other
294 body for expanding the function but this is difficult to do. */
295
296static void
297cgraph_reset_node (struct cgraph_node *node)
298{
257eb6e3 299 /* If node->process is set, then we have already begun whole-unit analysis.
7e8b322a
JH
300 This is *not* testing for whether we've already emitted the function.
301 That case can be sort-of legitimately seen with real function redefinition
302 errors. I would argue that the front end should never present us with
303 such a case, but don't enforce that for now. */
257eb6e3 304 gcc_assert (!node->process);
d71cc23f
JH
305
306 /* Reset our data structures so we can analyze the function again. */
307 memset (&node->local, 0, sizeof (node->local));
308 memset (&node->global, 0, sizeof (node->global));
309 memset (&node->rtl, 0, sizeof (node->rtl));
310 node->analyzed = false;
d71cc23f
JH
311 node->local.finalized = false;
312
d71cc23f 313 cgraph_node_remove_callees (node);
d71cc23f 314}
d853a20e 315
953ff289
DN
316static void
317cgraph_lower_function (struct cgraph_node *node)
318{
319 if (node->lowered)
320 return;
a406865a
RG
321
322 if (node->nested)
323 lower_nested_functions (node->decl);
324 gcc_assert (!node->nested);
325
953ff289
DN
326 tree_lowering_passes (node->decl);
327 node->lowered = true;
328}
329
6b00c969
RH
330/* DECL has been parsed. Take it, queue it, compile it at the whim of the
331 logic in effect. If NESTED is true, then our caller cannot stand to have
332 the garbage collector run at the moment. We would need to either create
333 a new GC context, or just not compile right now. */
1c4a429a
JH
334
335void
6b00c969 336cgraph_finalize_function (tree decl, bool nested)
1c4a429a 337{
a358e188 338 struct cgraph_node *node = cgraph_get_create_node (decl);
1c4a429a 339
d853a20e 340 if (node->local.finalized)
b125ad45
JH
341 {
342 cgraph_reset_node (node);
343 node->local.redefined_extern_inline = true;
344 }
6b00c969 345
d853a20e 346 notice_global_symbol (decl);
f6981e16 347 node->local.finalized = true;
e21aff8a 348 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
1c4a429a 349
d7f09764 350 if (cgraph_decide_is_function_needed (node, decl))
8dafba3c
RH
351 cgraph_mark_needed_node (node);
352
ff5c4582 353 /* Since we reclaim unreachable nodes at the end of every language
e7d6beb0
JH
354 level unit, we need to be conservative about possible entry points
355 there. */
508e4757
JH
356 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
357 || DECL_STATIC_CONSTRUCTOR (decl)
9b389a5e
JH
358 || DECL_STATIC_DESTRUCTOR (decl)
359 /* COMDAT virtual functions may be referenced by vtable from
61502ca8 360 other compilation unit. Still we want to devirtualize calls
9b389a5e
JH
361 to those so we need to analyze them.
362 FIXME: We should introduce may edges for this purpose and update
363 their handling in unreachable function removal and inliner too. */
c47d0034
JH
364 || (DECL_VIRTUAL_P (decl)
365 && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
e7d6beb0
JH
366 cgraph_mark_reachable_node (node);
367
8dafba3c 368 /* If we've not yet emitted decl, tell the debug info about it. */
6b00c969 369 if (!TREE_ASM_WRITTEN (decl))
8dafba3c 370 (*debug_hooks->deferred_inline_function) (decl);
d173e685 371
902edd36
JH
372 /* Possibly warn about unused parameters. */
373 if (warn_unused_parameter)
374 do_warn_unused_parameter (decl);
7e8b322a
JH
375
376 if (!nested)
377 ggc_collect ();
1c4a429a
JH
378}
379
f0c882ab
JH
380/* C99 extern inline keywords allow changing of declaration after function
381 has been finalized. We need to re-decide if we want to mark the function as
382 needed then. */
383
384void
385cgraph_mark_if_needed (tree decl)
386{
581985d7 387 struct cgraph_node *node = cgraph_get_node (decl);
d7f09764 388 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
f0c882ab
JH
389 cgraph_mark_needed_node (node);
390}
391
9187e02d
JH
392/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
393static bool
394clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
395{
39e2db00
JH
396 node = cgraph_function_or_thunk_node (node, NULL);
397 node2 = cgraph_function_or_thunk_node (node2, NULL);
9187e02d
JH
398 while (node != node2 && node2)
399 node2 = node2->clone_of;
400 return node2 != NULL;
401}
402
02ec6988
MJ
403/* Verify edge E count and frequency. */
404
405static bool
406verify_edge_count_and_frequency (struct cgraph_edge *e)
407{
408 bool error_found = false;
409 if (e->count < 0)
410 {
411 error ("caller edge count is negative");
412 error_found = true;
413 }
414 if (e->frequency < 0)
415 {
416 error ("caller edge frequency is negative");
417 error_found = true;
418 }
419 if (e->frequency > CGRAPH_FREQ_MAX)
420 {
421 error ("caller edge frequency is too large");
422 error_found = true;
423 }
424 if (gimple_has_body_p (e->caller->decl)
425 && !e->caller->global.inlined_to
74605a11
JH
426 /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
427 Remove this once edges are actualy removed from the function at that time. */
428 && (e->frequency
429 || (inline_edge_summary_vec
4762f561
JH
430 && ((VEC_length(inline_edge_summary_t, inline_edge_summary_vec)
431 <= (unsigned) e->uid)
432 || !inline_edge_summary (e)->predicate)))
02ec6988
MJ
433 && (e->frequency
434 != compute_call_stmt_bb_frequency (e->caller->decl,
435 gimple_bb (e->call_stmt))))
436 {
61502ca8 437 error ("caller edge frequency %i does not match BB frequency %i",
02ec6988
MJ
438 e->frequency,
439 compute_call_stmt_bb_frequency (e->caller->decl,
440 gimple_bb (e->call_stmt)));
441 error_found = true;
442 }
443 return error_found;
444}
445
89843f5d
JJ
446/* Switch to THIS_CFUN if needed and print STMT to stderr. */
447static void
448cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
449{
450 /* debug_gimple_stmt needs correct cfun */
451 if (cfun != this_cfun)
452 set_cfun (this_cfun);
453 debug_gimple_stmt (stmt);
454}
455
81361831
MJ
456/* Verify that call graph edge E corresponds to DECL from the associated
457 statement. Return true if the verification should fail. */
458
459static bool
460verify_edge_corresponds_to_fndecl (struct cgraph_edge *e, tree decl)
461{
b75d1e21
MJ
462 struct cgraph_node *node;
463
464 if (!decl || e->callee->global.inlined_to)
465 return false;
466 node = cgraph_get_node (decl);
467
468 /* We do not know if a node from a different partition is an alias or what it
469 aliases and therefore cannot do the former_clone_of check reliably. */
470 if (!node || node->in_other_partition)
471 return false;
472 node = cgraph_function_or_thunk_node (node, NULL);
473
474 if ((e->callee->former_clone_of != node->decl)
81361831
MJ
475 /* IPA-CP sometimes redirect edge to clone and then back to the former
476 function. This ping-pong has to go, eventaully. */
b75d1e21
MJ
477 && (node != cgraph_function_or_thunk_node (e->callee, NULL))
478 && !clone_of_p (node, e->callee))
81361831
MJ
479 return true;
480 else
481 return false;
482}
483
18c6ada9 484/* Verify cgraph nodes of given cgraph node. */
24e47c76 485DEBUG_FUNCTION void
18c6ada9
JH
486verify_cgraph_node (struct cgraph_node *node)
487{
488 struct cgraph_edge *e;
e21aff8a
SB
489 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
490 basic_block this_block;
726a989a 491 gimple_stmt_iterator gsi;
e0704a46 492 bool error_found = false;
18c6ada9 493
1da2ed5f 494 if (seen_error ())
5771bd91
RG
495 return;
496
18c6ada9 497 timevar_push (TV_CGRAPH_VERIFY);
18c6ada9
JH
498 for (e = node->callees; e; e = e->next_callee)
499 if (e->aux)
500 {
ab532386 501 error ("aux field set for edge %s->%s",
4f1e4960
JM
502 identifier_to_locale (cgraph_node_name (e->caller)),
503 identifier_to_locale (cgraph_node_name (e->callee)));
18c6ada9
JH
504 error_found = true;
505 }
06191a23
JH
506 if (node->count < 0)
507 {
d8a07487 508 error ("execution count is negative");
06191a23
JH
509 error_found = true;
510 }
b20996ff
JH
511 if (node->global.inlined_to && node->local.externally_visible)
512 {
d8a07487 513 error ("externally visible inline clone");
b20996ff
JH
514 error_found = true;
515 }
516 if (node->global.inlined_to && node->address_taken)
517 {
d8a07487 518 error ("inline clone with address taken");
b20996ff
JH
519 error_found = true;
520 }
521 if (node->global.inlined_to && node->needed)
522 {
d8a07487 523 error ("inline clone is needed");
b20996ff
JH
524 error_found = true;
525 }
e33c6cd6
MJ
526 for (e = node->indirect_calls; e; e = e->next_callee)
527 {
528 if (e->aux)
529 {
530 error ("aux field set for indirect edge from %s",
531 identifier_to_locale (cgraph_node_name (e->caller)));
532 error_found = true;
533 }
534 if (!e->indirect_unknown_callee
535 || !e->indirect_info)
536 {
537 error ("An indirect edge from %s is not marked as indirect or has "
538 "associated indirect_info, the corresponding statement is: ",
539 identifier_to_locale (cgraph_node_name (e->caller)));
89843f5d 540 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
e33c6cd6
MJ
541 error_found = true;
542 }
543 }
18c6ada9
JH
544 for (e = node->callers; e; e = e->next_caller)
545 {
02ec6988
MJ
546 if (verify_edge_count_and_frequency (e))
547 error_found = true;
18c6ada9
JH
548 if (!e->inline_failed)
549 {
550 if (node->global.inlined_to
551 != (e->caller->global.inlined_to
552 ? e->caller->global.inlined_to : e->caller))
553 {
ab532386 554 error ("inlined_to pointer is wrong");
18c6ada9
JH
555 error_found = true;
556 }
557 if (node->callers->next_caller)
558 {
ab532386 559 error ("multiple inline callers");
18c6ada9
JH
560 error_found = true;
561 }
562 }
563 else
564 if (node->global.inlined_to)
565 {
ab532386 566 error ("inlined_to pointer set for noninline callers");
18c6ada9
JH
567 error_found = true;
568 }
569 }
02ec6988
MJ
570 for (e = node->indirect_calls; e; e = e->next_callee)
571 if (verify_edge_count_and_frequency (e))
572 error_found = true;
18c6ada9
JH
573 if (!node->callers && node->global.inlined_to)
574 {
95a52ebb 575 error ("inlined_to pointer is set but no predecessors found");
18c6ada9
JH
576 error_found = true;
577 }
578 if (node->global.inlined_to == node)
579 {
ab532386 580 error ("inlined_to pointer refers to itself");
18c6ada9
JH
581 error_found = true;
582 }
583
62ecfeb8 584 if (!cgraph_get_node (node->decl))
18c6ada9 585 {
69fb1284 586 error ("node not found in cgraph_hash");
18c6ada9
JH
587 error_found = true;
588 }
c22cacf3 589
9187e02d
JH
590 if (node->clone_of)
591 {
592 struct cgraph_node *n;
593 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
594 if (n == node)
595 break;
596 if (!n)
597 {
598 error ("node has wrong clone_of");
599 error_found = true;
600 }
601 }
602 if (node->clones)
603 {
604 struct cgraph_node *n;
605 for (n = node->clones; n; n = n->next_sibling_clone)
606 if (n->clone_of != node)
607 break;
608 if (n)
609 {
610 error ("node has wrong clone list");
611 error_found = true;
612 }
613 }
614 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
615 {
616 error ("node is in clone list but it is not clone");
617 error_found = true;
618 }
619 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
620 {
621 error ("node has wrong prev_clone pointer");
622 error_found = true;
623 }
624 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
625 {
626 error ("double linked list of clones corrupted");
627 error_found = true;
628 }
78eaf7bf
MJ
629 if (node->same_comdat_group)
630 {
631 struct cgraph_node *n = node->same_comdat_group;
632
633 if (!DECL_ONE_ONLY (node->decl))
634 {
635 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
636 error_found = true;
637 }
638 if (n == node)
639 {
640 error ("node is alone in a comdat group");
641 error_found = true;
642 }
643 do
644 {
645 if (!n->same_comdat_group)
646 {
647 error ("same_comdat_group is not a circular list");
648 error_found = true;
649 break;
650 }
651 n = n->same_comdat_group;
652 }
653 while (n != node);
654 }
9187e02d 655
39e2db00
JH
656 if (node->analyzed && node->alias)
657 {
658 bool ref_found = false;
659 int i;
660 struct ipa_ref *ref;
661
662 if (node->callees)
663 {
664 error ("Alias has call edges");
665 error_found = true;
666 }
667 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
668 if (ref->use != IPA_REF_ALIAS)
669 {
670 error ("Alias has non-alias refernece");
671 error_found = true;
672 }
673 else if (ref_found)
674 {
675 error ("Alias has more than one alias reference");
676 error_found = true;
677 }
678 else
679 ref_found = true;
680 if (!ref_found)
681 {
682 error ("Analyzed alias has no reference");
683 error_found = true;
684 }
685 }
c47d0034
JH
686 if (node->analyzed && node->thunk.thunk_p)
687 {
688 if (!node->callees)
689 {
690 error ("No edge out of thunk node");
691 error_found = true;
692 }
693 else if (node->callees->next_callee)
694 {
695 error ("More than one edge out of thunk node");
696 error_found = true;
697 }
698 if (gimple_has_body_p (node->decl))
699 {
700 error ("Thunk is not supposed to have body");
701 error_found = true;
702 }
703 }
704 else if (node->analyzed && gimple_has_body_p (node->decl)
705 && !TREE_ASM_WRITTEN (node->decl)
706 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
707 && !flag_wpa)
18c6ada9 708 {
e21aff8a
SB
709 if (this_cfun->cfg)
710 {
711 /* The nodes we're interested in are never shared, so walk
712 the tree ignoring duplicates. */
2dee695b 713 struct pointer_set_t *visited_nodes = pointer_set_create ();
e21aff8a
SB
714 /* Reach the trees by walking over the CFG, and note the
715 enclosing basic-blocks in the call edges. */
716 FOR_EACH_BB_FN (this_block, this_cfun)
726a989a
RB
717 for (gsi = gsi_start_bb (this_block);
718 !gsi_end_p (gsi);
719 gsi_next (&gsi))
e0704a46 720 {
726a989a 721 gimple stmt = gsi_stmt (gsi);
e33c6cd6 722 if (is_gimple_call (stmt))
e0704a46
JH
723 {
724 struct cgraph_edge *e = cgraph_edge (node, stmt);
e33c6cd6 725 tree decl = gimple_call_fndecl (stmt);
e0704a46
JH
726 if (e)
727 {
728 if (e->aux)
729 {
ab532386 730 error ("shared call_stmt:");
89843f5d 731 cgraph_debug_gimple_stmt (this_cfun, stmt);
e0704a46
JH
732 error_found = true;
733 }
e33c6cd6 734 if (!e->indirect_unknown_callee)
6744a6ab 735 {
81361831 736 if (verify_edge_corresponds_to_fndecl (e, decl))
e33c6cd6
MJ
737 {
738 error ("edge points to wrong declaration:");
739 debug_tree (e->callee->decl);
740 fprintf (stderr," Instead of:");
741 debug_tree (decl);
742 error_found = true;
743 }
6744a6ab 744 }
e33c6cd6 745 else if (decl)
e0704a46 746 {
e33c6cd6
MJ
747 error ("an indirect edge with unknown callee "
748 "corresponding to a call_stmt with "
749 "a known declaration:");
47cb0d7d 750 error_found = true;
89843f5d 751 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
e0704a46
JH
752 }
753 e->aux = (void *)1;
754 }
e33c6cd6 755 else if (decl)
e0704a46 756 {
ab532386 757 error ("missing callgraph edge for call stmt:");
89843f5d 758 cgraph_debug_gimple_stmt (this_cfun, stmt);
e0704a46
JH
759 error_found = true;
760 }
761 }
762 }
e21aff8a 763 pointer_set_destroy (visited_nodes);
e21aff8a
SB
764 }
765 else
766 /* No CFG available?! */
767 gcc_unreachable ();
768
18c6ada9
JH
769 for (e = node->callees; e; e = e->next_callee)
770 {
e33c6cd6 771 if (!e->aux)
18c6ada9 772 {
ab532386 773 error ("edge %s->%s has no corresponding call_stmt",
4f1e4960
JM
774 identifier_to_locale (cgraph_node_name (e->caller)),
775 identifier_to_locale (cgraph_node_name (e->callee)));
89843f5d 776 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
18c6ada9
JH
777 error_found = true;
778 }
779 e->aux = 0;
780 }
e33c6cd6
MJ
781 for (e = node->indirect_calls; e; e = e->next_callee)
782 {
783 if (!e->aux)
784 {
785 error ("an indirect edge from %s has no corresponding call_stmt",
786 identifier_to_locale (cgraph_node_name (e->caller)));
89843f5d 787 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
e33c6cd6
MJ
788 error_found = true;
789 }
790 e->aux = 0;
791 }
18c6ada9
JH
792 }
793 if (error_found)
794 {
795 dump_cgraph_node (stderr, node);
ab532386 796 internal_error ("verify_cgraph_node failed");
18c6ada9
JH
797 }
798 timevar_pop (TV_CGRAPH_VERIFY);
799}
800
801/* Verify whole cgraph structure. */
24e47c76 802DEBUG_FUNCTION void
18c6ada9
JH
803verify_cgraph (void)
804{
805 struct cgraph_node *node;
806
1da2ed5f 807 if (seen_error ())
89480522
JH
808 return;
809
18c6ada9
JH
810 for (node = cgraph_nodes; node; node = node->next)
811 verify_cgraph_node (node);
812}
813
474eccc6
ILT
814/* Output all asm statements we have stored up to be output. */
815
816static void
817cgraph_output_pending_asms (void)
818{
819 struct cgraph_asm_node *can;
820
1da2ed5f 821 if (seen_error ())
474eccc6
ILT
822 return;
823
824 for (can = cgraph_asm_nodes; can; can = can->next)
825 assemble_asm (can->asm_str);
826 cgraph_asm_nodes = NULL;
827}
828
e767b5be 829/* Analyze the function scheduled to be output. */
322dd859 830void
e767b5be
JH
831cgraph_analyze_function (struct cgraph_node *node)
832{
a406865a 833 tree save = current_function_decl;
e767b5be
JH
834 tree decl = node->decl;
835
39e2db00
JH
836 if (node->alias && node->thunk.alias)
837 {
838 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
5ee770bf
JH
839 struct cgraph_node *n;
840
841 for (n = tgt; n && n->alias;
842 n = n->analyzed ? cgraph_alias_aliased_node (n) : NULL)
843 if (n == node)
844 {
845 error ("function %q+D part of alias cycle", node->decl);
846 node->alias = false;
847 return;
848 }
39e2db00
JH
849 if (!VEC_length (ipa_ref_t, node->ref_list.references))
850 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
851 if (node->same_body_alias)
852 {
853 DECL_VIRTUAL_P (node->decl) = DECL_VIRTUAL_P (node->thunk.alias);
854 DECL_DECLARED_INLINE_P (node->decl)
855 = DECL_DECLARED_INLINE_P (node->thunk.alias);
856 DECL_DISREGARD_INLINE_LIMITS (node->decl)
857 = DECL_DISREGARD_INLINE_LIMITS (node->thunk.alias);
858 }
859
860 /* Fixup visibility nonsences C++ frontend produce on same body aliases. */
861 if (TREE_PUBLIC (node->decl) && node->same_body_alias)
862 {
863 DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (node->thunk.alias);
ee6f1177 864 if (DECL_ONE_ONLY (node->thunk.alias))
39e2db00 865 {
ee6f1177 866 DECL_COMDAT (node->decl) = DECL_COMDAT (node->thunk.alias);
39e2db00
JH
867 DECL_COMDAT_GROUP (node->decl) = DECL_COMDAT_GROUP (node->thunk.alias);
868 if (DECL_ONE_ONLY (node->thunk.alias) && !node->same_comdat_group)
869 {
870 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
871 node->same_comdat_group = tgt;
872 if (!tgt->same_comdat_group)
873 tgt->same_comdat_group = node;
874 else
875 {
876 struct cgraph_node *n;
877 for (n = tgt->same_comdat_group;
878 n->same_comdat_group != tgt;
879 n = n->same_comdat_group)
880 ;
881 n->same_comdat_group = node;
882 }
883 }
884 }
885 }
886 cgraph_mark_reachable_node (cgraph_alias_aliased_node (node));
887 if (node->address_taken)
888 cgraph_mark_address_taken_node (cgraph_alias_aliased_node (node));
889 if (cgraph_decide_is_function_needed (node, node->decl))
890 cgraph_mark_needed_node (node);
891 }
892 else if (node->thunk.thunk_p)
c47d0034
JH
893 {
894 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
895 NULL, 0, CGRAPH_FREQ_BASE);
896 }
897 else
898 {
899 current_function_decl = decl;
900 push_cfun (DECL_STRUCT_FUNCTION (decl));
a406865a 901
c47d0034 902 assign_assembler_name_if_neeeded (node->decl);
0e0a1359 903
c47d0034
JH
904 /* Make sure to gimplify bodies only once. During analyzing a
905 function we lower it, which will require gimplified nested
906 functions, so we can end up here with an already gimplified
907 body. */
908 if (!gimple_body (decl))
909 gimplify_function_tree (decl);
910 dump_function (TDI_generic, decl);
a406865a 911
c47d0034
JH
912 cgraph_lower_function (node);
913 pop_cfun ();
914 }
6a84c098 915 node->analyzed = true;
e767b5be 916
a406865a 917 current_function_decl = save;
e767b5be
JH
918}
919
39e2db00
JH
920/* C++ frontend produce same body aliases all over the place, even before PCH
921 gets streamed out. It relies on us linking the aliases with their function
922 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
923 first produce aliases without links, but once C++ FE is sure he won't sream
924 PCH we build the links via this function. */
925
926void
927cgraph_process_same_body_aliases (void)
928{
929 struct cgraph_node *node;
930 for (node = cgraph_nodes; node; node = node->next)
931 if (node->same_body_alias
932 && !VEC_length (ipa_ref_t, node->ref_list.references))
933 {
934 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
935 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
936 }
937 same_body_aliases_done = true;
938}
939
768e3c60
RG
940/* Process attributes common for vars and functions. */
941
942static void
943process_common_attributes (tree decl)
944{
945 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
946
947 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
948 {
949 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
950 "%<weakref%> attribute should be accompanied with"
951 " an %<alias%> attribute");
952 DECL_WEAK (decl) = 0;
779d4b91
JH
953 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
954 DECL_ATTRIBUTES (decl));
768e3c60
RG
955 }
956}
957
386b46cf
JH
958/* Look for externally_visible and used attributes and mark cgraph nodes
959 accordingly.
960
961 We cannot mark the nodes at the point the attributes are processed (in
962 handle_*_attribute) because the copy of the declarations available at that
963 point may not be canonical. For example, in:
964
965 void f();
966 void f() __attribute__((used));
967
968 the declaration we see in handle_used_attribute will be the second
969 declaration -- but the front end will subsequently merge that declaration
970 with the original declaration and discard the second declaration.
971
972 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
973
974 void f() {}
975 void f() __attribute__((externally_visible));
976
977 is valid.
978
979 So, we walk the nodes at the end of the translation unit, applying the
980 attributes at that point. */
981
982static void
983process_function_and_variable_attributes (struct cgraph_node *first,
8a4a83ed 984 struct varpool_node *first_var)
386b46cf
JH
985{
986 struct cgraph_node *node;
8a4a83ed 987 struct varpool_node *vnode;
386b46cf
JH
988
989 for (node = cgraph_nodes; node != first; node = node->next)
990 {
991 tree decl = node->decl;
b42186f1 992 if (DECL_PRESERVE_P (decl))
152464d2 993 cgraph_mark_needed_node (node);
9d602c59
KT
994 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
995 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
996 && TREE_PUBLIC (node->decl))
997 {
998 if (node->local.finalized)
999 cgraph_mark_needed_node (node);
1000 }
1001 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
386b46cf 1002 {
343d4b27 1003 if (! TREE_PUBLIC (node->decl))
c5d75364
MLI
1004 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
1005 "%<externally_visible%>"
1006 " attribute have effect only on public objects");
b20996ff
JH
1007 else if (node->local.finalized)
1008 cgraph_mark_needed_node (node);
386b46cf 1009 }
779d4b91 1010 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
39e2db00 1011 && (node->local.finalized && !node->alias))
779d4b91
JH
1012 {
1013 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
1014 "%<weakref%> attribute ignored"
1015 " because function is defined");
1016 DECL_WEAK (decl) = 0;
1017 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1018 DECL_ATTRIBUTES (decl));
1019 }
c9fc06dc
CB
1020
1021 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
1022 && !DECL_DECLARED_INLINE_P (decl)
1023 /* redefining extern inline function makes it DECL_UNINLINABLE. */
1024 && !DECL_UNINLINABLE (decl))
1025 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
1026 "always_inline function might not be inlinable");
1027
768e3c60 1028 process_common_attributes (decl);
386b46cf 1029 }
8a4a83ed 1030 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
386b46cf
JH
1031 {
1032 tree decl = vnode->decl;
b42186f1 1033 if (DECL_PRESERVE_P (decl))
386b46cf 1034 {
a8289259 1035 vnode->force_output = true;
386b46cf 1036 if (vnode->finalized)
8a4a83ed 1037 varpool_mark_needed_node (vnode);
386b46cf 1038 }
9d602c59
KT
1039 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
1040 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
9659ff6e 1041 && TREE_PUBLIC (vnode->decl))
9d602c59
KT
1042 {
1043 if (vnode->finalized)
1044 varpool_mark_needed_node (vnode);
1045 }
1046 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
386b46cf 1047 {
343d4b27 1048 if (! TREE_PUBLIC (vnode->decl))
c5d75364
MLI
1049 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1050 "%<externally_visible%>"
1051 " attribute have effect only on public objects");
b20996ff
JH
1052 else if (vnode->finalized)
1053 varpool_mark_needed_node (vnode);
386b46cf 1054 }
779d4b91
JH
1055 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
1056 && vnode->finalized
1057 && DECL_INITIAL (decl))
1058 {
1059 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1060 "%<weakref%> attribute ignored"
1061 " because variable is initialized");
1062 DECL_WEAK (decl) = 0;
1063 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1064 DECL_ATTRIBUTES (decl));
1065 }
768e3c60 1066 process_common_attributes (decl);
386b46cf
JH
1067 }
1068}
1069
151e6f24
JH
1070/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1071 each reachable functions) and build cgraph.
1072 The function can be called multiple times after inserting new nodes
88512ba0 1073 into beginning of queue. Just the new part of queue is re-scanned then. */
1c4a429a 1074
151e6f24
JH
1075static void
1076cgraph_analyze_functions (void)
1c4a429a 1077{
cd9c7bd2 1078 /* Keep track of already processed nodes when called multiple times for
aabcd309 1079 intermodule optimization. */
cd9c7bd2 1080 static struct cgraph_node *first_analyzed;
61e00a5e 1081 struct cgraph_node *first_processed = first_analyzed;
8a4a83ed 1082 static struct varpool_node *first_analyzed_var;
151e6f24 1083 struct cgraph_node *node, *next;
1c4a429a 1084
1389294c 1085 bitmap_obstack_initialize (NULL);
61e00a5e
JH
1086 process_function_and_variable_attributes (first_processed,
1087 first_analyzed_var);
1088 first_processed = cgraph_nodes;
8a4a83ed
JH
1089 first_analyzed_var = varpool_nodes;
1090 varpool_analyze_pending_decls ();
a194aa56 1091 if (cgraph_dump_file)
1c4a429a 1092 {
7d82fe7c 1093 fprintf (cgraph_dump_file, "Initial entry points:");
cd9c7bd2 1094 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
39ecc018 1095 if (node->needed)
a194aa56
JH
1096 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1097 fprintf (cgraph_dump_file, "\n");
1c4a429a 1098 }
151e6f24 1099 cgraph_process_new_functions ();
1c4a429a 1100
7660e67e
SB
1101 /* Propagate reachability flag and lower representation of all reachable
1102 functions. In the future, lowering will introduce new functions and
1103 new entry points on the way (by template instantiation and virtual
1104 method table generation for instance). */
1668aabc 1105 while (cgraph_nodes_queue)
1c4a429a 1106 {
e767b5be 1107 struct cgraph_edge *edge;
1668aabc
JH
1108 tree decl = cgraph_nodes_queue->decl;
1109
1110 node = cgraph_nodes_queue;
8bd87c4e 1111 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
18c6ada9 1112 node->next_needed = NULL;
1c4a429a 1113
cd4dea62 1114 /* ??? It is possible to create extern inline function and later using
9d203871 1115 weak alias attribute to kill its body. See
cd4dea62 1116 gcc.c-torture/compile/20011119-1.c */
c47d0034 1117 if (!DECL_STRUCT_FUNCTION (decl)
39e2db00 1118 && (!node->alias || !node->thunk.alias)
c47d0034 1119 && !node->thunk.thunk_p)
d71cc23f
JH
1120 {
1121 cgraph_reset_node (node);
b125ad45 1122 node->local.redefined_extern_inline = true;
d71cc23f
JH
1123 continue;
1124 }
cd4dea62 1125
d7f09764
DN
1126 if (!node->analyzed)
1127 cgraph_analyze_function (node);
8dafba3c 1128
1c4a429a 1129 for (edge = node->callees; edge; edge = edge->next_callee)
e767b5be 1130 if (!edge->callee->reachable)
8dafba3c 1131 cgraph_mark_reachable_node (edge->callee);
c47d0034
JH
1132 for (edge = node->callers; edge; edge = edge->next_caller)
1133 if (!edge->caller->reachable && edge->caller->thunk.thunk_p)
1134 cgraph_mark_reachable_node (edge->caller);
8dafba3c 1135
b66887e4
JJ
1136 if (node->same_comdat_group)
1137 {
1138 for (next = node->same_comdat_group;
1139 next != node;
1140 next = next->same_comdat_group)
1141 cgraph_mark_reachable_node (next);
1142 }
1143
6b20f353
DS
1144 /* If decl is a clone of an abstract function, mark that abstract
1145 function so that we don't release its body. The DECL_INITIAL() of that
581985d7
MJ
1146 abstract function declaration will be later needed to output debug
1147 info. */
6b20f353
DS
1148 if (DECL_ABSTRACT_ORIGIN (decl))
1149 {
581985d7
MJ
1150 struct cgraph_node *origin_node;
1151 origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
6b20f353
DS
1152 origin_node->abstract_and_needed = true;
1153 }
1154
61e00a5e
JH
1155 /* We finalize local static variables during constructing callgraph
1156 edges. Process their attributes too. */
1157 process_function_and_variable_attributes (first_processed,
1158 first_analyzed_var);
1159 first_processed = cgraph_nodes;
8a4a83ed
JH
1160 first_analyzed_var = varpool_nodes;
1161 varpool_analyze_pending_decls ();
151e6f24 1162 cgraph_process_new_functions ();
1c4a429a 1163 }
8dafba3c 1164
564738df 1165 /* Collect entry points to the unit. */
a194aa56 1166 if (cgraph_dump_file)
1668aabc 1167 {
7d82fe7c 1168 fprintf (cgraph_dump_file, "Unit entry points:");
cd9c7bd2 1169 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
39ecc018 1170 if (node->needed)
a194aa56 1171 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
7d82fe7c 1172 fprintf (cgraph_dump_file, "\n\nInitial ");
e767b5be 1173 dump_cgraph (cgraph_dump_file);
df7705b1 1174 dump_varpool (cgraph_dump_file);
1668aabc 1175 }
7660e67e 1176
a194aa56
JH
1177 if (cgraph_dump_file)
1178 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1c4a429a 1179
96fc428c 1180 for (node = cgraph_nodes; node != first_analyzed; node = next)
1c4a429a
JH
1181 {
1182 tree decl = node->decl;
96fc428c 1183 next = node->next;
1c4a429a 1184
c47d0034 1185 if (node->local.finalized && !gimple_has_body_p (decl)
39e2db00 1186 && (!node->alias || !node->thunk.alias)
c47d0034 1187 && !node->thunk.thunk_p)
c22cacf3 1188 cgraph_reset_node (node);
d71cc23f 1189
c47d0034 1190 if (!node->reachable
39e2db00
JH
1191 && (gimple_has_body_p (decl) || node->thunk.thunk_p
1192 || (node->alias && node->thunk.alias)))
1c4a429a 1193 {
a194aa56
JH
1194 if (cgraph_dump_file)
1195 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
18c6ada9 1196 cgraph_remove_node (node);
d71cc23f 1197 continue;
1c4a429a 1198 }
9b0436b7
JH
1199 else
1200 node->next_needed = NULL;
c47d0034 1201 gcc_assert (!node->local.finalized || node->thunk.thunk_p
39e2db00 1202 || node->alias
c47d0034 1203 || gimple_has_body_p (decl));
d71cc23f 1204 gcc_assert (node->analyzed == node->local.finalized);
1c4a429a 1205 }
a194aa56 1206 if (cgraph_dump_file)
7d82fe7c
KC
1207 {
1208 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1209 dump_cgraph (cgraph_dump_file);
df7705b1 1210 dump_varpool (cgraph_dump_file);
7d82fe7c 1211 }
1389294c 1212 bitmap_obstack_release (NULL);
cd9c7bd2 1213 first_analyzed = cgraph_nodes;
1c4a429a 1214 ggc_collect ();
151e6f24
JH
1215}
1216
85ce9375
JH
1217/* Translate the ugly representation of aliases as alias pairs into nice
1218 representation in callgraph. We don't handle all cases yet,
1219 unforutnately. */
1220
1221static void
1222handle_alias_pairs (void)
1223{
1224 alias_pair *p;
1225 unsigned i;
1226 struct cgraph_node *target_node;
1227 struct cgraph_node *src_node;
cd35bcf7 1228 struct varpool_node *target_vnode;
85ce9375
JH
1229
1230 for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p);)
1231 {
1232 if (TREE_CODE (p->decl) == FUNCTION_DECL
85ce9375
JH
1233 && (target_node = cgraph_node_for_asm (p->target)) != NULL)
1234 {
1235 src_node = cgraph_get_node (p->decl);
1236 if (src_node && src_node->local.finalized)
1237 cgraph_reset_node (src_node);
1238 /* Normally EXTERNAL flag is used to mark external inlines,
1239 however for aliases it seems to be allowed to use it w/o
1240 any meaning. See gcc.dg/attr-alias-3.c
1241 However for weakref we insist on EXTERNAL flag being set.
1242 See gcc.dg/attr-alias-5.c */
1243 if (DECL_EXTERNAL (p->decl))
c9552bff
JH
1244 DECL_EXTERNAL (p->decl) = lookup_attribute ("weakref",
1245 DECL_ATTRIBUTES (p->decl)) != NULL;
85ce9375
JH
1246 cgraph_create_function_alias (p->decl, target_node->decl);
1247 VEC_unordered_remove (alias_pair, alias_pairs, i);
1248 }
cd35bcf7 1249 else if (TREE_CODE (p->decl) == VAR_DECL
cd35bcf7
JH
1250 && (target_vnode = varpool_node_for_asm (p->target)) != NULL)
1251 {
1252 /* Normally EXTERNAL flag is used to mark external inlines,
1253 however for aliases it seems to be allowed to use it w/o
1254 any meaning. See gcc.dg/attr-alias-3.c
1255 However for weakref we insist on EXTERNAL flag being set.
1256 See gcc.dg/attr-alias-5.c */
1257 if (DECL_EXTERNAL (p->decl))
c9552bff
JH
1258 DECL_EXTERNAL (p->decl) = lookup_attribute ("weakref",
1259 DECL_ATTRIBUTES (p->decl)) != NULL;
cd35bcf7
JH
1260 varpool_create_variable_alias (p->decl, target_vnode->decl);
1261 VEC_unordered_remove (alias_pair, alias_pairs, i);
1262 }
25e2c40d
JH
1263 /* Weakrefs with target not defined in current unit are easy to handle; they
1264 behave just as external variables except we need to note the alias flag
1265 to later output the weakref pseudo op into asm file. */
1266 else if (lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL
1267 && (TREE_CODE (p->decl) == FUNCTION_DECL
1268 ? (varpool_node_for_asm (p->target) == NULL)
1269 : (cgraph_node_for_asm (p->target) == NULL)))
1270 {
1271 if (TREE_CODE (p->decl) == FUNCTION_DECL)
1272 cgraph_get_create_node (p->decl)->alias = true;
1273 else
1274 varpool_get_node (p->decl)->alias = true;
1275 DECL_EXTERNAL (p->decl) = 1;
1276 VEC_unordered_remove (alias_pair, alias_pairs, i);
1277 }
85ce9375
JH
1278 else
1279 {
1280 if (dump_file)
1281 fprintf (dump_file, "Unhandled alias %s->%s\n",
1282 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
1283 IDENTIFIER_POINTER (p->target));
1284
1285 i++;
1286 }
1287 }
1288}
1289
5f1a9ebb 1290
151e6f24
JH
1291/* Analyze the whole compilation unit once it is parsed completely. */
1292
1293void
1294cgraph_finalize_compilation_unit (void)
1295{
90097c67
RG
1296 timevar_push (TV_CGRAPH);
1297
47c79d56
DN
1298 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
1299 if (flag_lto)
1300 lto_streamer_hooks_init ();
1301
0038d4e0
MM
1302 /* If we're here there's no current function anymore. Some frontends
1303 are lazy in clearing these. */
1304 current_function_decl = NULL;
1305 set_cfun (NULL);
1306
a406865a
RG
1307 /* Do not skip analyzing the functions if there were errors, we
1308 miss diagnostics for following functions otherwise. */
151e6f24 1309
5f1a9ebb 1310 /* Emit size functions we didn't inline. */
f82a627c 1311 finalize_size_functions ();
5f1a9ebb 1312
90097c67
RG
1313 /* Mark alias targets necessary and emit diagnostics. */
1314 finish_aliases_1 ();
85ce9375 1315 handle_alias_pairs ();
90097c67 1316
151e6f24
JH
1317 if (!quiet_flag)
1318 {
1319 fprintf (stderr, "\nAnalyzing compilation unit\n");
1320 fflush (stderr);
1321 }
1322
deced1e2
XDL
1323 if (flag_dump_passes)
1324 dump_passes ();
1325
90097c67
RG
1326 /* Gimplify and lower all functions, compute reachability and
1327 remove unreachable nodes. */
1328 cgraph_analyze_functions ();
1329
5f1a9ebb
RG
1330 /* Mark alias targets necessary and emit diagnostics. */
1331 finish_aliases_1 ();
85ce9375 1332 handle_alias_pairs ();
5f1a9ebb 1333
90097c67 1334 /* Gimplify and lower thunks. */
151e6f24 1335 cgraph_analyze_functions ();
a406865a 1336
90097c67 1337 /* Finally drive the pass manager. */
a406865a 1338 cgraph_optimize ();
90097c67
RG
1339
1340 timevar_pop (TV_CGRAPH);
1c4a429a 1341}
3baf459d
DN
1342
1343
1c4a429a
JH
1344/* Figure out what functions we want to assemble. */
1345
1346static void
db0e878d 1347cgraph_mark_functions_to_output (void)
1c4a429a
JH
1348{
1349 struct cgraph_node *node;
b66887e4
JJ
1350#ifdef ENABLE_CHECKING
1351 bool check_same_comdat_groups = false;
1352
1353 for (node = cgraph_nodes; node; node = node->next)
1354 gcc_assert (!node->process);
1355#endif
1c4a429a 1356
1c4a429a
JH
1357 for (node = cgraph_nodes; node; node = node->next)
1358 {
1359 tree decl = node->decl;
b58b1157 1360 struct cgraph_edge *e;
c22cacf3 1361
b66887e4
JJ
1362 gcc_assert (!node->process || node->same_comdat_group);
1363 if (node->process)
1364 continue;
b58b1157
JH
1365
1366 for (e = node->callers; e; e = e->next_caller)
dc0bfe6a 1367 if (e->inline_failed)
b58b1157 1368 break;
1c4a429a 1369
7660e67e
SB
1370 /* We need to output all local functions that are used and not
1371 always inlined, as well as those that are reachable from
1372 outside the current compilation unit. */
39ecc018 1373 if (node->analyzed
c47d0034 1374 && !node->thunk.thunk_p
39e2db00 1375 && !node->alias
18c6ada9 1376 && !node->global.inlined_to
508e4757 1377 && (!cgraph_only_called_directly_p (node)
39e2db00
JH
1378 || ((e || ipa_ref_has_aliases_p (&node->ref_list))
1379 && node->reachable))
6de9cd9a 1380 && !TREE_ASM_WRITTEN (decl)
1c4a429a 1381 && !DECL_EXTERNAL (decl))
b66887e4
JJ
1382 {
1383 node->process = 1;
1384 if (node->same_comdat_group)
1385 {
1386 struct cgraph_node *next;
1387 for (next = node->same_comdat_group;
1388 next != node;
1389 next = next->same_comdat_group)
39e2db00 1390 if (!next->thunk.thunk_p && !next->alias)
c47d0034 1391 next->process = 1;
b66887e4
JJ
1392 }
1393 }
1394 else if (node->same_comdat_group)
1395 {
1396#ifdef ENABLE_CHECKING
1397 check_same_comdat_groups = true;
1398#endif
1399 }
341c100f 1400 else
1a2caa7a
NS
1401 {
1402 /* We should've reclaimed all functions that are not needed. */
1403#ifdef ENABLE_CHECKING
726a989a 1404 if (!node->global.inlined_to
39ecc018 1405 && gimple_has_body_p (decl)
a837268b
JH
1406 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1407 are inside partition, we can end up not removing the body since we no longer
1408 have analyzed node pointing to it. */
1409 && !node->in_other_partition
39e2db00 1410 && !node->alias
1a2caa7a
NS
1411 && !DECL_EXTERNAL (decl))
1412 {
1413 dump_cgraph_node (stderr, node);
1414 internal_error ("failed to reclaim unneeded function");
1415 }
1416#endif
726a989a 1417 gcc_assert (node->global.inlined_to
39ecc018 1418 || !gimple_has_body_p (decl)
a837268b 1419 || node->in_other_partition
1a2caa7a
NS
1420 || DECL_EXTERNAL (decl));
1421
1422 }
c22cacf3 1423
18d13f34 1424 }
b66887e4
JJ
1425#ifdef ENABLE_CHECKING
1426 if (check_same_comdat_groups)
1427 for (node = cgraph_nodes; node; node = node->next)
1428 if (node->same_comdat_group && !node->process)
1429 {
1430 tree decl = node->decl;
1431 if (!node->global.inlined_to
1432 && gimple_has_body_p (decl)
a837268b
JH
1433 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1434 are inside partition, we can end up not removing the body since we no longer
1435 have analyzed node pointing to it. */
1436 && !node->in_other_partition
b66887e4
JJ
1437 && !DECL_EXTERNAL (decl))
1438 {
1439 dump_cgraph_node (stderr, node);
39e2db00 1440 internal_error ("failed to reclaim unneeded functionin same comdat group");
b66887e4
JJ
1441 }
1442 }
1443#endif
18d13f34
JH
1444}
1445
6744a6ab
JH
1446/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1447 in lowered gimple form.
1448
1449 Set current_function_decl and cfun to newly constructed empty function body.
1450 return basic block in the function body. */
1451
1452static basic_block
1453init_lowered_empty_function (tree decl)
1454{
1455 basic_block bb;
1456
1457 current_function_decl = decl;
1458 allocate_struct_function (decl, false);
1459 gimple_register_cfg_hooks ();
1460 init_empty_tree_cfg ();
1461 init_tree_ssa (cfun);
1462 init_ssa_operands ();
1463 cfun->gimple_df->in_ssa_p = true;
1464 DECL_INITIAL (decl) = make_node (BLOCK);
1465
1466 DECL_SAVED_TREE (decl) = error_mark_node;
1467 cfun->curr_properties |=
1468 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
febb1302 1469 PROP_ssa | PROP_gimple_any);
6744a6ab
JH
1470
1471 /* Create BB for body of the function and connect it properly. */
1472 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
4f9c574a
DV
1473 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1474 make_edge (bb, EXIT_BLOCK_PTR, 0);
6744a6ab
JH
1475
1476 return bb;
1477}
1478
1479/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1480 offset indicated by VIRTUAL_OFFSET, if that is
1481 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1482 zero for a result adjusting thunk. */
1483
1484static tree
1485thunk_adjust (gimple_stmt_iterator * bsi,
1486 tree ptr, bool this_adjusting,
1487 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1488{
1489 gimple stmt;
1490 tree ret;
1491
313333a6
RG
1492 if (this_adjusting
1493 && fixed_offset != 0)
6744a6ab 1494 {
5d49b6a7
RG
1495 stmt = gimple_build_assign
1496 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1497 ptr,
1498 fixed_offset));
6744a6ab
JH
1499 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1500 }
1501
1502 /* If there's a virtual offset, look up that value in the vtable and
1503 adjust the pointer again. */
1504 if (virtual_offset)
1505 {
1506 tree vtabletmp;
1507 tree vtabletmp2;
1508 tree vtabletmp3;
6744a6ab
JH
1509
1510 if (!vtable_entry_type)
1511 {
1512 tree vfunc_type = make_node (FUNCTION_TYPE);
1513 TREE_TYPE (vfunc_type) = integer_type_node;
1514 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1515 layout_type (vfunc_type);
1516
1517 vtable_entry_type = build_pointer_type (vfunc_type);
1518 }
1519
1520 vtabletmp =
1521 create_tmp_var (build_pointer_type
1522 (build_pointer_type (vtable_entry_type)), "vptr");
1523
1524 /* The vptr is always at offset zero in the object. */
1525 stmt = gimple_build_assign (vtabletmp,
1526 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1527 ptr));
1528 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1529 mark_symbols_for_renaming (stmt);
1530 find_referenced_vars_in (stmt);
1531
1532 /* Form the vtable address. */
1533 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1534 "vtableaddr");
1535 stmt = gimple_build_assign (vtabletmp2,
70f34814 1536 build_simple_mem_ref (vtabletmp));
6744a6ab
JH
1537 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1538 mark_symbols_for_renaming (stmt);
1539 find_referenced_vars_in (stmt);
1540
1541 /* Find the entry with the vcall offset. */
1542 stmt = gimple_build_assign (vtabletmp2,
5d49b6a7
RG
1543 fold_build_pointer_plus_loc (input_location,
1544 vtabletmp2,
1545 virtual_offset));
6744a6ab
JH
1546 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1547
1548 /* Get the offset itself. */
1549 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1550 "vcalloffset");
1551 stmt = gimple_build_assign (vtabletmp3,
70f34814 1552 build_simple_mem_ref (vtabletmp2));
6744a6ab
JH
1553 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1554 mark_symbols_for_renaming (stmt);
1555 find_referenced_vars_in (stmt);
1556
6744a6ab 1557 /* Adjust the `this' pointer. */
0d82a1c8
RG
1558 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1559 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1560 GSI_CONTINUE_LINKING);
6744a6ab
JH
1561 }
1562
313333a6
RG
1563 if (!this_adjusting
1564 && fixed_offset != 0)
6744a6ab
JH
1565 /* Adjust the pointer by the constant. */
1566 {
1567 tree ptrtmp;
1568
1569 if (TREE_CODE (ptr) == VAR_DECL)
1570 ptrtmp = ptr;
1571 else
1572 {
1573 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1574 stmt = gimple_build_assign (ptrtmp, ptr);
1575 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1576 mark_symbols_for_renaming (stmt);
1577 find_referenced_vars_in (stmt);
1578 }
5d49b6a7
RG
1579 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1580 ptrtmp, fixed_offset);
6744a6ab
JH
1581 }
1582
1583 /* Emit the statement and gimplify the adjustment expression. */
1584 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1585 stmt = gimple_build_assign (ret, ptr);
1586 mark_symbols_for_renaming (stmt);
1587 find_referenced_vars_in (stmt);
1588 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1589
1590 return ret;
1591}
1592
1593/* Produce assembler for thunk NODE. */
1594
1595static void
1596assemble_thunk (struct cgraph_node *node)
1597{
1598 bool this_adjusting = node->thunk.this_adjusting;
1599 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1600 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1601 tree virtual_offset = NULL;
1602 tree alias = node->thunk.alias;
1603 tree thunk_fndecl = node->decl;
1604 tree a = DECL_ARGUMENTS (thunk_fndecl);
1605
1606 current_function_decl = thunk_fndecl;
1607
d06865bf
DK
1608 /* Ensure thunks are emitted in their correct sections. */
1609 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1610
6744a6ab
JH
1611 if (this_adjusting
1612 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1613 virtual_value, alias))
1614 {
1615 const char *fnname;
1616 tree fn_block;
4399cf59 1617 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
6744a6ab
JH
1618
1619 DECL_RESULT (thunk_fndecl)
1620 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
4399cf59 1621 RESULT_DECL, 0, restype);
15488554 1622 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
6744a6ab
JH
1623
1624 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1625 create one. */
1626 fn_block = make_node (BLOCK);
1627 BLOCK_VARS (fn_block) = a;
1628 DECL_INITIAL (thunk_fndecl) = fn_block;
1629 init_function_start (thunk_fndecl);
1630 cfun->is_thunk = 1;
1631 assemble_start_function (thunk_fndecl, fnname);
1632
1633 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1634 fixed_offset, virtual_value, alias);
1635
1636 assemble_end_function (thunk_fndecl, fnname);
1637 init_insn_lengths ();
1638 free_after_compilation (cfun);
1639 set_cfun (NULL);
1640 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
c47d0034
JH
1641 node->thunk.thunk_p = false;
1642 node->analyzed = false;
6744a6ab
JH
1643 }
1644 else
1645 {
1646 tree restype;
1647 basic_block bb, then_bb, else_bb, return_bb;
1648 gimple_stmt_iterator bsi;
1649 int nargs = 0;
1650 tree arg;
1651 int i;
1652 tree resdecl;
1653 tree restmp = NULL;
1654 VEC(tree, heap) *vargs;
1655
1656 gimple call;
1657 gimple ret;
1658
1659 DECL_IGNORED_P (thunk_fndecl) = 1;
1660 bitmap_obstack_initialize (NULL);
1661
1662 if (node->thunk.virtual_offset_p)
1663 virtual_offset = size_int (virtual_value);
1664
1665 /* Build the return declaration for the function. */
1666 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1667 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1668 {
1669 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1670 DECL_ARTIFICIAL (resdecl) = 1;
1671 DECL_IGNORED_P (resdecl) = 1;
1672 DECL_RESULT (thunk_fndecl) = resdecl;
1673 }
1674 else
1675 resdecl = DECL_RESULT (thunk_fndecl);
1676
1677 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1678
1679 bsi = gsi_start_bb (bb);
1680
1681 /* Build call to the function being thunked. */
1682 if (!VOID_TYPE_P (restype))
1683 {
1684 if (!is_gimple_reg_type (restype))
1685 {
1686 restmp = resdecl;
c021f10b 1687 add_local_decl (cfun, restmp);
6744a6ab
JH
1688 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1689 }
1690 else
1691 restmp = create_tmp_var_raw (restype, "retval");
1692 }
1693
910ad8de 1694 for (arg = a; arg; arg = DECL_CHAIN (arg))
6744a6ab
JH
1695 nargs++;
1696 vargs = VEC_alloc (tree, heap, nargs);
1697 if (this_adjusting)
1698 VEC_quick_push (tree, vargs,
1699 thunk_adjust (&bsi,
1700 a, 1, fixed_offset,
1701 virtual_offset));
1702 else
1703 VEC_quick_push (tree, vargs, a);
910ad8de 1704 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
6744a6ab
JH
1705 VEC_quick_push (tree, vargs, arg);
1706 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1707 VEC_free (tree, heap, vargs);
6744a6ab
JH
1708 gimple_call_set_from_thunk (call, true);
1709 if (restmp)
1710 gimple_call_set_lhs (call, restmp);
1711 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1712 mark_symbols_for_renaming (call);
1713 find_referenced_vars_in (call);
1714 update_stmt (call);
1715
1716 if (restmp && !this_adjusting)
1717 {
1124098b 1718 tree true_label = NULL_TREE;
6744a6ab
JH
1719
1720 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1721 {
1722 gimple stmt;
1723 /* If the return type is a pointer, we need to
1724 protect against NULL. We know there will be an
1725 adjustment, because that's why we're emitting a
1726 thunk. */
1727 then_bb = create_basic_block (NULL, (void *) 0, bb);
1728 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1729 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1730 remove_edge (single_succ_edge (bb));
1731 true_label = gimple_block_label (then_bb);
6744a6ab 1732 stmt = gimple_build_cond (NE_EXPR, restmp,
e8160c9a 1733 build_zero_cst (TREE_TYPE (restmp)),
6744a6ab
JH
1734 NULL_TREE, NULL_TREE);
1735 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1736 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1737 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1738 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1739 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1740 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1741 bsi = gsi_last_bb (then_bb);
1742 }
1743
1744 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1745 fixed_offset, virtual_offset);
1746 if (true_label)
1747 {
1748 gimple stmt;
1749 bsi = gsi_last_bb (else_bb);
e8160c9a
NF
1750 stmt = gimple_build_assign (restmp,
1751 build_zero_cst (TREE_TYPE (restmp)));
6744a6ab
JH
1752 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1753 bsi = gsi_last_bb (return_bb);
1754 }
1755 }
1756 else
1757 gimple_call_set_tail (call, true);
1758
1759 /* Build return value. */
1760 ret = gimple_build_return (restmp);
1761 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1762
1763 delete_unreachable_blocks ();
1764 update_ssa (TODO_update_ssa);
1765
6744a6ab
JH
1766 /* Since we want to emit the thunk, we explicitly mark its name as
1767 referenced. */
c47d0034
JH
1768 node->thunk.thunk_p = false;
1769 cgraph_node_remove_callees (node);
6744a6ab
JH
1770 cgraph_add_new_function (thunk_fndecl, true);
1771 bitmap_obstack_release (NULL);
1772 }
1773 current_function_decl = NULL;
1774}
1775
c47d0034 1776
39e2db00
JH
1777
1778/* Assemble thunks and aliases asociated to NODE. */
c47d0034
JH
1779
1780static void
39e2db00 1781assemble_thunks_and_aliases (struct cgraph_node *node)
c47d0034
JH
1782{
1783 struct cgraph_edge *e;
39e2db00
JH
1784 int i;
1785 struct ipa_ref *ref;
1786
c47d0034
JH
1787 for (e = node->callers; e;)
1788 if (e->caller->thunk.thunk_p)
1789 {
1790 struct cgraph_node *thunk = e->caller;
1791
1792 e = e->next_caller;
39e2db00 1793 assemble_thunks_and_aliases (thunk);
c47d0034
JH
1794 assemble_thunk (thunk);
1795 }
1796 else
1797 e = e->next_caller;
39e2db00
JH
1798 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
1799 if (ref->use == IPA_REF_ALIAS)
1800 {
1801 struct cgraph_node *alias = ipa_ref_refering_node (ref);
42f833bc
JH
1802 bool saved_written = TREE_ASM_WRITTEN (alias->thunk.alias);
1803
1804 /* Force assemble_alias to really output the alias this time instead
1805 of buffering it in same alias pairs. */
1806 TREE_ASM_WRITTEN (alias->thunk.alias) = 1;
39e2db00
JH
1807 assemble_alias (alias->decl,
1808 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1809 assemble_thunks_and_aliases (alias);
42f833bc 1810 TREE_ASM_WRITTEN (alias->thunk.alias) = saved_written;
39e2db00 1811 }
c47d0034
JH
1812}
1813
1c4a429a 1814/* Expand function specified by NODE. */
7660e67e 1815
1c4a429a 1816static void
db0e878d 1817cgraph_expand_function (struct cgraph_node *node)
1c4a429a
JH
1818{
1819 tree decl = node->decl;
1820
18c6ada9 1821 /* We ought to not compile any inline clones. */
341c100f 1822 gcc_assert (!node->global.inlined_to);
18c6ada9 1823
7e8b322a 1824 announce_function (decl);
257eb6e3 1825 node->process = 0;
5806d9ac
JH
1826 gcc_assert (node->lowered);
1827
1828 /* Generate RTL for the body of DECL. */
1829 tree_rest_of_compilation (decl);
1830
1831 /* Make sure that BE didn't give up on compiling. */
1832 gcc_assert (TREE_ASM_WRITTEN (decl));
1833 current_function_decl = NULL;
85ad2ef5 1834 gcc_assert (!cgraph_preserve_function_body_p (node));
1bb7e8f8
JH
1835
1836 /* It would make a lot more sense to output thunks before function body to get more
1837 forward and lest backwarding jumps. This is however would need solving problem
1838 with comdats. See PR48668. Also aliases must come after function itself to
1839 make one pass assemblers, like one on AIX happy. See PR 50689.
1840 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1841 groups. */
1842 assemble_thunks_and_aliases (node);
39ecc018
JH
1843 cgraph_release_function_body (node);
1844 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1845 points to the dead function body. */
1846 cgraph_node_remove_callees (node);
6b02a499
JH
1847
1848 cgraph_function_flags_ready = true;
1c4a429a
JH
1849}
1850
18c6ada9 1851/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
b58b1157
JH
1852
1853bool
61a05df1 1854cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
b58b1157 1855{
18c6ada9
JH
1856 *reason = e->inline_failed;
1857 return !e->inline_failed;
b58b1157 1858}
18c6ada9 1859
6674a6ce 1860
6674a6ce 1861
db0e878d
AJ
1862/* Expand all functions that must be output.
1863
b58b1157
JH
1864 Attempt to topologically sort the nodes so function is output when
1865 all called functions are already assembled to allow data to be
a98ebe2e 1866 propagated across the callgraph. Use a stack to get smaller distance
d1a6adeb 1867 between a function and its callees (later we may choose to use a more
b58b1157
JH
1868 sophisticated algorithm for function reordering; we will likely want
1869 to use subsections to make the output functions appear in top-down
1870 order). */
1871
1872static void
a20af5b8 1873cgraph_expand_all_functions (void)
b58b1157
JH
1874{
1875 struct cgraph_node *node;
5ed6ace5 1876 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
f30cfcb1 1877 int order_pos, new_order_pos = 0;
b58b1157
JH
1878 int i;
1879
af8bca3c 1880 order_pos = ipa_reverse_postorder (order);
341c100f 1881 gcc_assert (order_pos == cgraph_n_nodes);
b58b1157 1882
1ae58c30 1883 /* Garbage collector may remove inline clones we eliminate during
18c6ada9
JH
1884 optimization. So we must be sure to not reference them. */
1885 for (i = 0; i < order_pos; i++)
257eb6e3 1886 if (order[i]->process)
18c6ada9
JH
1887 order[new_order_pos++] = order[i];
1888
1889 for (i = new_order_pos - 1; i >= 0; i--)
b58b1157
JH
1890 {
1891 node = order[i];
257eb6e3 1892 if (node->process)
b58b1157 1893 {
341c100f 1894 gcc_assert (node->reachable);
257eb6e3 1895 node->process = 0;
b58b1157
JH
1896 cgraph_expand_function (node);
1897 }
1898 }
f45e0ad1 1899 cgraph_process_new_functions ();
50674e96 1900
b58b1157 1901 free (order);
50674e96 1902
b58b1157
JH
1903}
1904
474eccc6
ILT
1905/* This is used to sort the node types by the cgraph order number. */
1906
24b97832
ILT
1907enum cgraph_order_sort_kind
1908{
1909 ORDER_UNDEFINED = 0,
1910 ORDER_FUNCTION,
1911 ORDER_VAR,
1912 ORDER_ASM
1913};
1914
474eccc6
ILT
1915struct cgraph_order_sort
1916{
24b97832 1917 enum cgraph_order_sort_kind kind;
474eccc6
ILT
1918 union
1919 {
1920 struct cgraph_node *f;
8a4a83ed 1921 struct varpool_node *v;
474eccc6
ILT
1922 struct cgraph_asm_node *a;
1923 } u;
1924};
1925
1926/* Output all functions, variables, and asm statements in the order
1927 according to their order fields, which is the order in which they
1928 appeared in the file. This implements -fno-toplevel-reorder. In
1929 this mode we may output functions and variables which don't really
1930 need to be output. */
1931
1932static void
1933cgraph_output_in_order (void)
1934{
1935 int max;
474eccc6
ILT
1936 struct cgraph_order_sort *nodes;
1937 int i;
1938 struct cgraph_node *pf;
8a4a83ed 1939 struct varpool_node *pv;
474eccc6
ILT
1940 struct cgraph_asm_node *pa;
1941
1942 max = cgraph_order;
33283dad 1943 nodes = XCNEWVEC (struct cgraph_order_sort, max);
474eccc6 1944
8a4a83ed 1945 varpool_analyze_pending_decls ();
474eccc6
ILT
1946
1947 for (pf = cgraph_nodes; pf; pf = pf->next)
1948 {
39e2db00 1949 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
474eccc6
ILT
1950 {
1951 i = pf->order;
1952 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1953 nodes[i].kind = ORDER_FUNCTION;
1954 nodes[i].u.f = pf;
1955 }
1956 }
1957
8a4a83ed 1958 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
474eccc6
ILT
1959 {
1960 i = pv->order;
1961 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1962 nodes[i].kind = ORDER_VAR;
1963 nodes[i].u.v = pv;
1964 }
1965
1966 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1967 {
1968 i = pa->order;
1969 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1970 nodes[i].kind = ORDER_ASM;
1971 nodes[i].u.a = pa;
1972 }
474eccc6 1973
7386e3ee
JH
1974 /* In toplevel reorder mode we output all statics; mark them as needed. */
1975 for (i = 0; i < max; ++i)
1976 {
1977 if (nodes[i].kind == ORDER_VAR)
1978 {
1979 varpool_mark_needed_node (nodes[i].u.v);
1980 }
1981 }
1982 varpool_empty_needed_queue ();
1983
7fece979
JJ
1984 for (i = 0; i < max; ++i)
1985 if (nodes[i].kind == ORDER_VAR)
1986 varpool_finalize_named_section_flags (nodes[i].u.v);
1987
474eccc6
ILT
1988 for (i = 0; i < max; ++i)
1989 {
1990 switch (nodes[i].kind)
1991 {
1992 case ORDER_FUNCTION:
257eb6e3 1993 nodes[i].u.f->process = 0;
474eccc6
ILT
1994 cgraph_expand_function (nodes[i].u.f);
1995 break;
1996
1997 case ORDER_VAR:
8a4a83ed 1998 varpool_assemble_decl (nodes[i].u.v);
474eccc6
ILT
1999 break;
2000
2001 case ORDER_ASM:
2002 assemble_asm (nodes[i].u.a->asm_str);
2003 break;
2004
2005 case ORDER_UNDEFINED:
2006 break;
2007
2008 default:
2009 gcc_unreachable ();
2010 }
2011 }
e7b9eb2c
ILT
2012
2013 cgraph_asm_nodes = NULL;
33283dad 2014 free (nodes);
474eccc6
ILT
2015}
2016
18c6ada9
JH
2017/* Return true when function body of DECL still needs to be kept around
2018 for later re-use. */
2019bool
85ad2ef5 2020cgraph_preserve_function_body_p (struct cgraph_node *node)
18c6ada9 2021{
c37f4ba4 2022 gcc_assert (cgraph_global_info_ready);
39e2db00 2023 gcc_assert (!node->alias && !node->thunk.thunk_p);
85ad2ef5 2024
18c6ada9 2025 /* Look if there is any clone around. */
9187e02d
JH
2026 if (node->clones)
2027 return true;
18c6ada9
JH
2028 return false;
2029}
2030
ef330312
PB
2031static void
2032ipa_passes (void)
2033{
db2960f4 2034 set_cfun (NULL);
04b201a2 2035 current_function_decl = NULL;
726a989a 2036 gimple_register_cfg_hooks ();
ef330312 2037 bitmap_obstack_initialize (NULL);
b20996ff 2038
090fa0ab
GF
2039 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2040
b20996ff 2041 if (!in_lto_p)
0430f80c
RG
2042 {
2043 execute_ipa_pass_list (all_small_ipa_passes);
2044 if (seen_error ())
2045 return;
2046 }
3baf459d 2047
467a8db0
JH
2048 /* We never run removal of unreachable nodes after early passes. This is
2049 because TODO is run before the subpasses. It is important to remove
2050 the unreachable functions to save works at IPA level and to get LTO
2051 symbol tables right. */
2052 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
2053
d7f09764
DN
2054 /* If pass_all_early_optimizations was not scheduled, the state of
2055 the cgraph will not be properly updated. Update it now. */
2056 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
2057 cgraph_state = CGRAPH_STATE_IPA_SSA;
3baf459d 2058
d7f09764
DN
2059 if (!in_lto_p)
2060 {
2061 /* Generate coverage variables and constructors. */
2062 coverage_finish ();
2063
2064 /* Process new functions added. */
2065 set_cfun (NULL);
2066 current_function_decl = NULL;
2067 cgraph_process_new_functions ();
d7f09764 2068
090fa0ab
GF
2069 execute_ipa_summary_passes
2070 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
fb3f88cc 2071 }
c082f9f3
SB
2072
2073 /* Some targets need to handle LTO assembler output specially. */
2074 if (flag_generate_lto)
2075 targetm.asm_out.lto_start ();
2076
d7f09764
DN
2077 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
2078
2079 if (!in_lto_p)
2080 ipa_write_summaries ();
2081
c082f9f3
SB
2082 if (flag_generate_lto)
2083 targetm.asm_out.lto_end ();
2084
cc8547a7 2085 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
fb3f88cc 2086 execute_ipa_pass_list (all_regular_ipa_passes);
090fa0ab 2087 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
3baf459d 2088
ef330312
PB
2089 bitmap_obstack_release (NULL);
2090}
2091
25e2c40d
JH
2092
2093/* Return string alias is alias of. */
2094
2095static tree
2096get_alias_symbol (tree decl)
2097{
2098 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2099 return get_identifier (TREE_STRING_POINTER
2100 (TREE_VALUE (TREE_VALUE (alias))));
2101}
2102
2103
c9552bff
JH
2104/* Weakrefs may be associated to external decls and thus not output
2105 at expansion time. Emit all neccesary aliases. */
2106
a66f86bb 2107static void
c9552bff
JH
2108output_weakrefs (void)
2109{
2110 struct cgraph_node *node;
2111 struct varpool_node *vnode;
2112 for (node = cgraph_nodes; node; node = node->next)
25e2c40d 2113 if (node->alias && DECL_EXTERNAL (node->decl)
c481ae7f
JH
2114 && !TREE_ASM_WRITTEN (node->decl)
2115 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
c9552bff 2116 assemble_alias (node->decl,
25e2c40d
JH
2117 node->thunk.alias ? DECL_ASSEMBLER_NAME (node->thunk.alias)
2118 : get_alias_symbol (node->decl));
c9552bff 2119 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
25e2c40d 2120 if (vnode->alias && DECL_EXTERNAL (vnode->decl)
c481ae7f
JH
2121 && !TREE_ASM_WRITTEN (vnode->decl)
2122 && lookup_attribute ("weakref", DECL_ATTRIBUTES (vnode->decl)))
c9552bff 2123 assemble_alias (vnode->decl,
25e2c40d
JH
2124 vnode->alias_of ? DECL_ASSEMBLER_NAME (vnode->alias_of)
2125 : get_alias_symbol (vnode->decl));
c9552bff
JH
2126}
2127
4537ec0c 2128
1c4a429a
JH
2129/* Perform simple optimizations based on callgraph. */
2130
d7f09764 2131void
db0e878d 2132cgraph_optimize (void)
1c4a429a 2133{
1da2ed5f 2134 if (seen_error ())
413803d3
VR
2135 return;
2136
18c6ada9
JH
2137#ifdef ENABLE_CHECKING
2138 verify_cgraph ();
2139#endif
7be82279 2140
cd9c7bd2
JH
2141 /* Frontend may output common variables after the unit has been finalized.
2142 It is safe to deal with them here as they are always zero initialized. */
8a4a83ed 2143 varpool_analyze_pending_decls ();
857e7259 2144
a194aa56 2145 timevar_push (TV_CGRAPHOPT);
a5573239
JH
2146 if (pre_ipa_mem_report)
2147 {
2148 fprintf (stderr, "Memory consumption before IPA\n");
2149 dump_memory_report (false);
2150 }
b58b1157 2151 if (!quiet_flag)
a418679d 2152 fprintf (stderr, "Performing interprocedural optimizations\n");
f45e0ad1 2153 cgraph_state = CGRAPH_STATE_IPA;
f30cfcb1 2154
7e2fe9d8 2155 /* Don't run the IPA passes if there was any error or sorry messages. */
1da2ed5f 2156 if (!seen_error ())
7e2fe9d8
AP
2157 ipa_passes ();
2158
cc8547a7
AK
2159 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2160 if (seen_error ()
2161 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
9ba0399e
RH
2162 {
2163 timevar_pop (TV_CGRAPHOPT);
2164 return;
2165 }
4537ec0c 2166
6b02a499
JH
2167 /* This pass remove bodies of extern inline functions we never inlined.
2168 Do this later so other IPA passes see what is really going on. */
2169 cgraph_remove_unreachable_nodes (false, dump_file);
dafc5b82 2170 cgraph_global_info_ready = true;
a194aa56
JH
2171 if (cgraph_dump_file)
2172 {
7d82fe7c 2173 fprintf (cgraph_dump_file, "Optimized ");
a194aa56 2174 dump_cgraph (cgraph_dump_file);
cd9c7bd2 2175 dump_varpool (cgraph_dump_file);
a194aa56 2176 }
a5573239
JH
2177 if (post_ipa_mem_report)
2178 {
7fa982e5 2179 fprintf (stderr, "Memory consumption after IPA\n");
a5573239
JH
2180 dump_memory_report (false);
2181 }
a194aa56 2182 timevar_pop (TV_CGRAPHOPT);
1c4a429a 2183
b58b1157 2184 /* Output everything. */
3df9609a 2185 (*debug_hooks->assembly_start) ();
7d82fe7c
KC
2186 if (!quiet_flag)
2187 fprintf (stderr, "Assembling functions:\n");
18c6ada9
JH
2188#ifdef ENABLE_CHECKING
2189 verify_cgraph ();
2190#endif
474eccc6 2191
9187e02d 2192 cgraph_materialize_all_clones ();
febb1302
JH
2193 bitmap_obstack_initialize (NULL);
2194 execute_ipa_pass_list (all_late_ipa_passes);
2195 cgraph_remove_unreachable_nodes (true, dump_file);
2196#ifdef ENABLE_CHECKING
2197 verify_cgraph ();
2198#endif
2199 bitmap_obstack_release (NULL);
6674a6ce 2200 cgraph_mark_functions_to_output ();
0850f694 2201 output_weakrefs ();
cd9c7bd2 2202
f45e0ad1 2203 cgraph_state = CGRAPH_STATE_EXPANSION;
474eccc6
ILT
2204 if (!flag_toplevel_reorder)
2205 cgraph_output_in_order ();
2206 else
2207 {
2208 cgraph_output_pending_asms ();
2209
2210 cgraph_expand_all_functions ();
8a4a83ed 2211 varpool_remove_unreferenced_decls ();
474eccc6 2212
8a4a83ed 2213 varpool_assemble_pending_decls ();
474eccc6 2214 }
c9552bff 2215
f45e0ad1
JH
2216 cgraph_process_new_functions ();
2217 cgraph_state = CGRAPH_STATE_FINISHED;
cd9c7bd2 2218
a194aa56
JH
2219 if (cgraph_dump_file)
2220 {
7d82fe7c 2221 fprintf (cgraph_dump_file, "\nFinal ");
a194aa56 2222 dump_cgraph (cgraph_dump_file);
df7705b1 2223 dump_varpool (cgraph_dump_file);
a194aa56 2224 }
18c6ada9
JH
2225#ifdef ENABLE_CHECKING
2226 verify_cgraph ();
6de9cd9a
DN
2227 /* Double check that all inline clones are gone and that all
2228 function bodies have been released from memory. */
1da2ed5f 2229 if (!seen_error ())
6de9cd9a
DN
2230 {
2231 struct cgraph_node *node;
2232 bool error_found = false;
2233
2234 for (node = cgraph_nodes; node; node = node->next)
2235 if (node->analyzed
2236 && (node->global.inlined_to
39ecc018 2237 || gimple_has_body_p (node->decl)))
6de9cd9a
DN
2238 {
2239 error_found = true;
2240 dump_cgraph_node (stderr, node);
c22cacf3 2241 }
6de9cd9a 2242 if (error_found)
f30cfcb1 2243 internal_error ("nodes with unreleased memory found");
6de9cd9a 2244 }
18c6ada9 2245#endif
1c4a429a 2246}
4537ec0c 2247
9b3e897d
PB
2248void
2249init_cgraph (void)
2250{
a05541a9
JH
2251 if (!cgraph_dump_file)
2252 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
9b3e897d 2253}
57fb5341 2254
c22cacf3 2255/* The edges representing the callers of the NEW_VERSION node were
57fb5341
RL
2256 fixed by cgraph_function_versioning (), now the call_expr in their
2257 respective tree code should be updated to call the NEW_VERSION. */
2258
2259static void
2260update_call_expr (struct cgraph_node *new_version)
2261{
2262 struct cgraph_edge *e;
2263
2264 gcc_assert (new_version);
726a989a
RB
2265
2266 /* Update the call expr on the edges to call the new version. */
57fb5341 2267 for (e = new_version->callers; e; e = e->next_caller)
c0ab1df3
AP
2268 {
2269 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2270 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
1d65f45c 2271 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
c0ab1df3 2272 }
57fb5341
RL
2273}
2274
2275
2276/* Create a new cgraph node which is the new version of
2277 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2278 edges which should be redirected to point to
2279 NEW_VERSION. ALL the callees edges of OLD_VERSION
2280 are cloned to the new version node. Return the new
91382288
JH
2281 version node.
2282
2283 If non-NULL BLOCK_TO_COPY determine what basic blocks
2284 was copied to prevent duplications of calls that are dead
2285 in the clone. */
57fb5341 2286
0a35513e 2287struct cgraph_node *
57fb5341 2288cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
b2c0ad40 2289 tree new_decl,
91382288
JH
2290 VEC(cgraph_edge_p,heap) *redirect_callers,
2291 bitmap bbs_to_copy)
2292 {
57fb5341 2293 struct cgraph_node *new_version;
ae2b0888 2294 struct cgraph_edge *e;
57fb5341
RL
2295 unsigned i;
2296
2297 gcc_assert (old_version);
c22cacf3 2298
a358e188 2299 new_version = cgraph_create_node (new_decl);
57fb5341 2300
0a35513e 2301 new_version->analyzed = old_version->analyzed;
57fb5341 2302 new_version->local = old_version->local;
036546e5
JH
2303 new_version->local.externally_visible = false;
2304 new_version->local.local = true;
57fb5341 2305 new_version->global = old_version->global;
8cf9feca 2306 new_version->rtl = old_version->rtl;
57fb5341
RL
2307 new_version->reachable = true;
2308 new_version->count = old_version->count;
2309
036546e5 2310 for (e = old_version->callees; e; e=e->next_callee)
91382288
JH
2311 if (!bbs_to_copy
2312 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2313 cgraph_clone_edge (e, new_version, e->call_stmt,
2314 e->lto_stmt_uid, REG_BR_PROB_BASE,
2315 CGRAPH_FREQ_BASE,
898b8927 2316 true);
036546e5 2317 for (e = old_version->indirect_calls; e; e=e->next_callee)
91382288
JH
2318 if (!bbs_to_copy
2319 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2320 cgraph_clone_edge (e, new_version, e->call_stmt,
2321 e->lto_stmt_uid, REG_BR_PROB_BASE,
2322 CGRAPH_FREQ_BASE,
898b8927 2323 true);
ac47786e 2324 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
b2c0ad40
KH
2325 {
2326 /* Redirect calls to the old version node to point to its new
2327 version. */
2328 cgraph_redirect_edge_callee (e, new_version);
2329 }
57fb5341 2330
b0d0a291
PM
2331 cgraph_call_node_duplication_hooks (old_version, new_version);
2332
57fb5341
RL
2333 return new_version;
2334 }
2335
2336 /* Perform function versioning.
c22cacf3 2337 Function versioning includes copying of the tree and
57fb5341
RL
2338 a callgraph update (creating a new cgraph node and updating
2339 its callees and callers).
2340
2341 REDIRECT_CALLERS varray includes the edges to be redirected
2342 to the new version.
2343
2344 TREE_MAP is a mapping of tree nodes we want to replace with
2345 new ones (according to results of prior analysis).
2346 OLD_VERSION_NODE is the node that is versioned.
1a2c27e9 2347
91382288
JH
2348 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2349 from new version.
1a2c27e9 2350 If SKIP_RETURN is true, the new version will return void.
91382288 2351 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
1a2c27e9
EB
2352 If non_NULL NEW_ENTRY determine new entry BB of the clone.
2353
2354 Return the new version's cgraph node. */
57fb5341
RL
2355
2356struct cgraph_node *
2357cgraph_function_versioning (struct cgraph_node *old_version_node,
b2c0ad40 2358 VEC(cgraph_edge_p,heap) *redirect_callers,
9187e02d 2359 VEC (ipa_replace_map_p,gc)* tree_map,
036546e5 2360 bitmap args_to_skip,
1a2c27e9 2361 bool skip_return,
91382288
JH
2362 bitmap bbs_to_copy,
2363 basic_block new_entry_block,
036546e5 2364 const char *clone_name)
57fb5341
RL
2365{
2366 tree old_decl = old_version_node->decl;
2367 struct cgraph_node *new_version_node = NULL;
2368 tree new_decl;
2369
2370 if (!tree_versionable_function_p (old_decl))
2371 return NULL;
2372
61e03ffc
JH
2373 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
2374
1a2c27e9
EB
2375 /* Make a new FUNCTION_DECL tree node for the new version. */
2376 if (!args_to_skip && !skip_return)
c6f7cfc1
JH
2377 new_decl = copy_node (old_decl);
2378 else
1a2c27e9
EB
2379 new_decl
2380 = build_function_decl_skip_args (old_decl, args_to_skip, skip_return);
57fb5341 2381
9990e02a
JH
2382 /* Generate a new name for the new version. */
2383 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2384 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2385 SET_DECL_RTL (new_decl, NULL);
2386
2062f77b
PB
2387 /* When the old decl was a con-/destructor make sure the clone isn't. */
2388 DECL_STATIC_CONSTRUCTOR(new_decl) = 0;
2389 DECL_STATIC_DESTRUCTOR(new_decl) = 0;
2390
57fb5341
RL
2391 /* Create the new version's call-graph node.
2392 and update the edges of the new node. */
2393 new_version_node =
2394 cgraph_copy_node_for_versioning (old_version_node, new_decl,
91382288 2395 redirect_callers, bbs_to_copy);
57fb5341
RL
2396
2397 /* Copy the OLD_VERSION_NODE function tree to the new version. */
91382288 2398 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
1a2c27e9 2399 skip_return, bbs_to_copy, new_entry_block);
57fb5341 2400
c22cacf3 2401 /* Update the new version's properties.
c0ab1df3
AP
2402 Make The new version visible only within this translation unit. Make sure
2403 that is not weak also.
c22cacf3 2404 ??? We cannot use COMDAT linkage because there is no
57fb5341 2405 ABI support for this. */
715a4e08 2406 cgraph_make_decl_local (new_version_node->decl);
e6e1c050 2407 DECL_VIRTUAL_P (new_version_node->decl) = 0;
57fb5341
RL
2408 new_version_node->local.externally_visible = 0;
2409 new_version_node->local.local = 1;
2410 new_version_node->lowered = true;
e6e1c050 2411
c0ab1df3
AP
2412 /* Update the call_expr on the edges to call the new version node. */
2413 update_call_expr (new_version_node);
b8698a0f 2414
129a37fc 2415 cgraph_call_function_insertion_hooks (new_version_node);
57fb5341
RL
2416 return new_version_node;
2417}
ea99e0be 2418
9187e02d
JH
2419/* Given virtual clone, turn it into actual clone. */
2420static void
2421cgraph_materialize_clone (struct cgraph_node *node)
2422{
2423 bitmap_obstack_initialize (NULL);
e466e2ce
JH
2424 node->former_clone_of = node->clone_of->decl;
2425 if (node->clone_of->former_clone_of)
2426 node->former_clone_of = node->clone_of->former_clone_of;
9187e02d
JH
2427 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2428 tree_function_versioning (node->clone_of->decl, node->decl,
2429 node->clone.tree_map, true,
1a2c27e9
EB
2430 node->clone.args_to_skip, false,
2431 NULL, NULL);
08ad1d6d
JH
2432 if (cgraph_dump_file)
2433 {
2434 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2435 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2436 }
9187e02d
JH
2437
2438 /* Function is no longer clone. */
2439 if (node->next_sibling_clone)
2440 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2441 if (node->prev_sibling_clone)
2442 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2443 else
2444 node->clone_of->clones = node->next_sibling_clone;
2445 node->next_sibling_clone = NULL;
2446 node->prev_sibling_clone = NULL;
0e3776db 2447 if (!node->clone_of->analyzed && !node->clone_of->clones)
f0c418dc
JH
2448 {
2449 cgraph_release_function_body (node->clone_of);
2450 cgraph_node_remove_callees (node->clone_of);
2451 ipa_remove_all_references (&node->clone_of->ref_list);
2452 }
9187e02d
JH
2453 node->clone_of = NULL;
2454 bitmap_obstack_release (NULL);
2455}
2456
8132a837
MJ
2457/* If necessary, change the function declaration in the call statement
2458 associated with E so that it corresponds to the edge callee. */
2459
2460gimple
2461cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2462{
2463 tree decl = gimple_call_fndecl (e->call_stmt);
2464 gimple new_stmt;
ceeffab0 2465 gimple_stmt_iterator gsi;
437ffe7b
JH
2466#ifdef ENABLE_CHECKING
2467 struct cgraph_node *node;
2468#endif
8132a837 2469
3949c4a7 2470 if (e->indirect_unknown_callee
51093fca 2471 || decl == e->callee->decl)
8132a837
MJ
2472 return e->call_stmt;
2473
437ffe7b 2474#ifdef ENABLE_CHECKING
3949c4a7
MJ
2475 if (decl)
2476 {
2477 node = cgraph_get_node (decl);
2478 gcc_assert (!node || !node->clone.combined_args_to_skip);
2479 }
437ffe7b 2480#endif
e466e2ce 2481
8132a837
MJ
2482 if (cgraph_dump_file)
2483 {
2484 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2485 cgraph_node_name (e->caller), e->caller->uid,
2486 cgraph_node_name (e->callee), e->callee->uid);
2487 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e466e2ce 2488 if (e->callee->clone.combined_args_to_skip)
8d2adc24
EB
2489 {
2490 fprintf (cgraph_dump_file, " combined args to skip: ");
2491 dump_bitmap (cgraph_dump_file,
2492 e->callee->clone.combined_args_to_skip);
e466e2ce 2493 }
8132a837
MJ
2494 }
2495
2496 if (e->callee->clone.combined_args_to_skip)
8d2adc24 2497 {
1b7d2dd1 2498 int lp_nr;
8d2adc24
EB
2499
2500 new_stmt
2501 = gimple_call_copy_skip_args (e->call_stmt,
2502 e->callee->clone.combined_args_to_skip);
3d113394 2503 gimple_call_set_fndecl (new_stmt, e->callee->decl);
8d2adc24
EB
2504
2505 if (gimple_vdef (new_stmt)
2506 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2507 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2508
81fa35bd 2509 gsi = gsi_for_stmt (e->call_stmt);
72351fa3 2510 gsi_replace (&gsi, new_stmt, false);
1b7d2dd1
RG
2511 /* We need to defer cleaning EH info on the new statement to
2512 fixup-cfg. We may not have dominator information at this point
2513 and thus would end up with unreachable blocks and have no way
2514 to communicate that we need to run CFG cleanup then. */
2515 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2516 if (lp_nr != 0)
2517 {
2518 remove_stmt_from_eh_lp (e->call_stmt);
2519 add_stmt_to_eh_lp (new_stmt, lp_nr);
2520 }
8d2adc24 2521 }
8132a837 2522 else
3d113394
RG
2523 {
2524 new_stmt = e->call_stmt;
2525 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2526 update_stmt (new_stmt);
2527 }
8132a837 2528
8132a837
MJ
2529 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2530
2531 if (cgraph_dump_file)
2532 {
2533 fprintf (cgraph_dump_file, " updated to:");
2534 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2535 }
2536 return new_stmt;
2537}
2538
9187e02d 2539/* Once all functions from compilation unit are in memory, produce all clones
8132a837
MJ
2540 and update all calls. We might also do this on demand if we don't want to
2541 bring all functions to memory prior compilation, but current WHOPR
2542 implementation does that and it is is bit easier to keep everything right in
2543 this order. */
9187e02d
JH
2544void
2545cgraph_materialize_all_clones (void)
2546{
2547 struct cgraph_node *node;
2548 bool stabilized = false;
2549
2550 if (cgraph_dump_file)
2551 fprintf (cgraph_dump_file, "Materializing clones\n");
2552#ifdef ENABLE_CHECKING
2553 verify_cgraph ();
2554#endif
2555
2556 /* We can also do topological order, but number of iterations should be
2557 bounded by number of IPA passes since single IPA pass is probably not
2558 going to create clones of clones it created itself. */
2559 while (!stabilized)
2560 {
2561 stabilized = true;
2562 for (node = cgraph_nodes; node; node = node->next)
2563 {
2564 if (node->clone_of && node->decl != node->clone_of->decl
2565 && !gimple_has_body_p (node->decl))
2566 {
2567 if (gimple_has_body_p (node->clone_of->decl))
2568 {
2569 if (cgraph_dump_file)
08ad1d6d 2570 {
61502ca8 2571 fprintf (cgraph_dump_file, "cloning %s to %s\n",
08ad1d6d
JH
2572 cgraph_node_name (node->clone_of),
2573 cgraph_node_name (node));
2574 if (node->clone.tree_map)
2575 {
2576 unsigned int i;
2577 fprintf (cgraph_dump_file, " replace map: ");
2578 for (i = 0; i < VEC_length (ipa_replace_map_p,
2579 node->clone.tree_map);
2580 i++)
2581 {
2582 struct ipa_replace_map *replace_info;
2583 replace_info = VEC_index (ipa_replace_map_p,
2584 node->clone.tree_map,
2585 i);
2586 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2587 fprintf (cgraph_dump_file, " -> ");
2588 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2589 fprintf (cgraph_dump_file, "%s%s;",
2590 replace_info->replace_p ? "(replace)":"",
2591 replace_info->ref_p ? "(ref)":"");
2592 }
2593 fprintf (cgraph_dump_file, "\n");
2594 }
2595 if (node->clone.args_to_skip)
2596 {
2597 fprintf (cgraph_dump_file, " args_to_skip: ");
2598 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2599 }
2600 if (node->clone.args_to_skip)
2601 {
2602 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2603 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2604 }
2605 }
9187e02d 2606 cgraph_materialize_clone (node);
36576655 2607 stabilized = false;
9187e02d 2608 }
9187e02d
JH
2609 }
2610 }
2611 }
47cb0d7d
JH
2612 for (node = cgraph_nodes; node; node = node->next)
2613 if (!node->analyzed && node->callees)
2614 cgraph_node_remove_callees (node);
8132a837
MJ
2615 if (cgraph_dump_file)
2616 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
9a23acef
JH
2617#ifdef ENABLE_CHECKING
2618 verify_cgraph ();
2619#endif
9187e02d
JH
2620 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2621}
2622
7be82279 2623#include "gt-cgraphunit.h"