]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
expr.c (expand_expr_real_2): Reduce all integral types to bitfield precision.
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
a418679d 1/* Callgraph based interprocedural optimizations.
d06865bf
DK
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
1c4a429a
JH
4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1c4a429a
JH
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
1c4a429a 21
18c6ada9 22/* This module implements main driver of compilation process as well as
a418679d 23 few basic interprocedural optimizers.
18c6ada9
JH
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
efe75b6f
JH
35 (There is one exception needed for implementing GCC extern inline
36 function.)
18c6ada9 37
8a4a83ed 38 - varpool_finalize_variable
18c6ada9 39
1ae58c30 40 This function has same behavior as the above but is used for static
18c6ada9
JH
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
efe75b6f
JH
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
18c6ada9 47
dd5a833e
MS
48 In the call-graph construction and local function analysis takes
49 place here. Bodies of unreachable functions are released to
50 conserve memory usage.
18c6ada9 51
efe75b6f
JH
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
18c6ada9
JH
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
18c6ada9 62 - cgraph_mark_needed_node
8a4a83ed 63 - varpool_mark_needed_node
18c6ada9 64
efe75b6f
JH
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
dbb23ff7 69 used by C++ frontend to explicitly mark the keyed methods.
18c6ada9
JH
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
7e8b322a 80 Analyzing of all functions is deferred
18c6ada9
JH
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
e1990f69 91 The intra-procedural information is produced and its existence
18c6ada9
JH
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
7e8b322a 106 where reference has been optimized out. */
9b3e897d 107
6674a6ce 108
1c4a429a
JH
109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
c9b9aa64 114#include "rtl.h"
6674a6ce 115#include "tree-flow.h"
1c4a429a
JH
116#include "tree-inline.h"
117#include "langhooks.h"
0c58f841 118#include "pointer-set.h"
1c4a429a
JH
119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
dafc5b82 125#include "diagnostic.h"
cf835838
JM
126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
a194aa56 128#include "timevar.h"
b58b1157
JH
129#include "params.h"
130#include "fibheap.h"
dc0bfe6a 131#include "intl.h"
902edd36 132#include "function.h"
57fb5341 133#include "ipa-prop.h"
726a989a
RB
134#include "gimple.h"
135#include "tree-iterator.h"
b4861090 136#include "tree-pass.h"
a406865a 137#include "tree-dump.h"
cd9c7bd2 138#include "output.h"
3baf459d 139#include "coverage.h"
090fa0ab 140#include "plugin.h"
632b4f8e 141#include "ipa-inline.h"
af8bca3c 142#include "ipa-utils.h"
47c79d56 143#include "lto-streamer.h"
b58b1157 144
a20af5b8 145static void cgraph_expand_all_functions (void);
db0e878d
AJ
146static void cgraph_mark_functions_to_output (void);
147static void cgraph_expand_function (struct cgraph_node *);
21c4a6a7 148static void cgraph_output_pending_asms (void);
7dff32e6 149
0a5fa5a1 150FILE *cgraph_dump_file;
9b3e897d 151
6744a6ab
JH
152/* Used for vtable lookup in thunk adjusting. */
153static GTY (()) tree vtable_entry_type;
154
8dafba3c
RH
155/* Determine if function DECL is needed. That is, visible to something
156 either outside this translation unit, something magic in the system
7e8b322a 157 configury. */
8dafba3c 158
d7f09764
DN
159bool
160cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
8dafba3c 161{
e7d6beb0 162 /* If the user told us it is used, then it must be so. */
386b46cf
JH
163 if (node->local.externally_visible)
164 return true;
165
e7d6beb0
JH
166 /* ??? If the assembler name is set by hand, it is possible to assemble
167 the name later after finalizing the function and the fact is noticed
168 in assemble_name then. This is arguably a bug. */
169 if (DECL_ASSEMBLER_NAME_SET_P (decl)
39e2db00 170 && (!node->thunk.thunk_p && !node->same_body_alias)
e7d6beb0
JH
171 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
172 return true;
173
a1d31187
JH
174 /* With -fkeep-inline-functions we are keeping all inline functions except
175 for extern inline ones. */
176 if (flag_keep_inline_functions
177 && DECL_DECLARED_INLINE_P (decl)
b521dcbe 178 && !DECL_EXTERNAL (decl)
e7f23018 179 && !DECL_DISREGARD_INLINE_LIMITS (decl))
a1d31187
JH
180 return true;
181
8dafba3c
RH
182 /* If we decided it was needed before, but at the time we didn't have
183 the body of the function available, then it's still needed. We have
184 to go back and re-check its dependencies now. */
185 if (node->needed)
186 return true;
187
188 /* Externally visible functions must be output. The exception is
c22cacf3 189 COMDAT functions that must be output only when they are needed.
04f77d0f
JH
190
191 When not optimizing, also output the static functions. (see
46f5f7f2 192 PR24561), but don't do so for always_inline functions, functions
c5d01958 193 declared inline and nested functions. These were optimized out
b633db7b 194 in the original implementation and it is unclear whether we want
6fc0bb99 195 to change the behavior here. */
5d342ef9 196 if (((TREE_PUBLIC (decl)
c5d01958 197 || (!optimize
e7f23018 198 && !DECL_DISREGARD_INLINE_LIMITS (decl)
b633db7b 199 && !DECL_DECLARED_INLINE_P (decl)
c5d01958
EB
200 && !(DECL_CONTEXT (decl)
201 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
b20996ff 202 && !flag_whole_program
014d92e1 203 && !flag_lto)
ce91e74c 204 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
8dafba3c
RH
205 return true;
206
8dafba3c
RH
207 return false;
208}
209
d60ab196 210/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
f45e0ad1
JH
211 functions into callgraph in a way so they look like ordinary reachable
212 functions inserted into callgraph already at construction time. */
213
214bool
215cgraph_process_new_functions (void)
216{
217 bool output = false;
218 tree fndecl;
219 struct cgraph_node *node;
220
2942c502 221 varpool_analyze_pending_decls ();
f45e0ad1
JH
222 /* Note that this queue may grow as its being processed, as the new
223 functions may generate new ones. */
224 while (cgraph_new_nodes)
225 {
226 node = cgraph_new_nodes;
227 fndecl = node->decl;
228 cgraph_new_nodes = cgraph_new_nodes->next_needed;
229 switch (cgraph_state)
230 {
231 case CGRAPH_STATE_CONSTRUCTION:
232 /* At construction time we just need to finalize function and move
233 it into reachable functions list. */
234
235 node->next_needed = NULL;
236 cgraph_finalize_function (fndecl, false);
237 cgraph_mark_reachable_node (node);
238 output = true;
4d5dcfb2 239 cgraph_call_function_insertion_hooks (node);
f45e0ad1
JH
240 break;
241
242 case CGRAPH_STATE_IPA:
7a388ee4 243 case CGRAPH_STATE_IPA_SSA:
f45e0ad1
JH
244 /* When IPA optimization already started, do all essential
245 transformations that has been already performed on the whole
246 cgraph but not on this function. */
247
726a989a 248 gimple_register_cfg_hooks ();
f45e0ad1
JH
249 if (!node->analyzed)
250 cgraph_analyze_function (node);
251 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
252 current_function_decl = fndecl;
7a388ee4
JH
253 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
254 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
255 /* When not optimizing, be sure we run early local passes anyway
256 to expand OMP. */
257 || !optimize)
8ddbbcae 258 execute_pass_list (pass_early_local_passes.pass.sub);
bb7e6d55 259 else
632b4f8e 260 compute_inline_parameters (node, true);
f45e0ad1
JH
261 free_dominance_info (CDI_POST_DOMINATORS);
262 free_dominance_info (CDI_DOMINATORS);
263 pop_cfun ();
264 current_function_decl = NULL;
4d5dcfb2 265 cgraph_call_function_insertion_hooks (node);
f45e0ad1
JH
266 break;
267
268 case CGRAPH_STATE_EXPANSION:
269 /* Functions created during expansion shall be compiled
270 directly. */
257eb6e3 271 node->process = 0;
4d5dcfb2 272 cgraph_call_function_insertion_hooks (node);
f45e0ad1
JH
273 cgraph_expand_function (node);
274 break;
275
276 default:
277 gcc_unreachable ();
278 break;
279 }
2942c502 280 varpool_analyze_pending_decls ();
f45e0ad1
JH
281 }
282 return output;
283}
284
d71cc23f
JH
285/* As an GCC extension we allow redefinition of the function. The
286 semantics when both copies of bodies differ is not well defined.
287 We replace the old body with new body so in unit at a time mode
288 we always use new body, while in normal mode we may end up with
289 old body inlined into some functions and new body expanded and
290 inlined in others.
291
292 ??? It may make more sense to use one body for inlining and other
293 body for expanding the function but this is difficult to do. */
294
295static void
296cgraph_reset_node (struct cgraph_node *node)
297{
257eb6e3 298 /* If node->process is set, then we have already begun whole-unit analysis.
7e8b322a
JH
299 This is *not* testing for whether we've already emitted the function.
300 That case can be sort-of legitimately seen with real function redefinition
301 errors. I would argue that the front end should never present us with
302 such a case, but don't enforce that for now. */
257eb6e3 303 gcc_assert (!node->process);
d71cc23f
JH
304
305 /* Reset our data structures so we can analyze the function again. */
306 memset (&node->local, 0, sizeof (node->local));
307 memset (&node->global, 0, sizeof (node->global));
308 memset (&node->rtl, 0, sizeof (node->rtl));
309 node->analyzed = false;
d71cc23f
JH
310 node->local.finalized = false;
311
d71cc23f 312 cgraph_node_remove_callees (node);
d71cc23f 313}
d853a20e 314
953ff289
DN
315static void
316cgraph_lower_function (struct cgraph_node *node)
317{
318 if (node->lowered)
319 return;
a406865a
RG
320
321 if (node->nested)
322 lower_nested_functions (node->decl);
323 gcc_assert (!node->nested);
324
953ff289
DN
325 tree_lowering_passes (node->decl);
326 node->lowered = true;
327}
328
6b00c969
RH
329/* DECL has been parsed. Take it, queue it, compile it at the whim of the
330 logic in effect. If NESTED is true, then our caller cannot stand to have
331 the garbage collector run at the moment. We would need to either create
332 a new GC context, or just not compile right now. */
1c4a429a
JH
333
334void
6b00c969 335cgraph_finalize_function (tree decl, bool nested)
1c4a429a 336{
a358e188 337 struct cgraph_node *node = cgraph_get_create_node (decl);
1c4a429a 338
d853a20e 339 if (node->local.finalized)
b125ad45
JH
340 {
341 cgraph_reset_node (node);
342 node->local.redefined_extern_inline = true;
343 }
6b00c969 344
d853a20e 345 notice_global_symbol (decl);
f6981e16 346 node->local.finalized = true;
e21aff8a 347 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
1c4a429a 348
d7f09764 349 if (cgraph_decide_is_function_needed (node, decl))
8dafba3c
RH
350 cgraph_mark_needed_node (node);
351
ff5c4582 352 /* Since we reclaim unreachable nodes at the end of every language
e7d6beb0
JH
353 level unit, we need to be conservative about possible entry points
354 there. */
508e4757
JH
355 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
356 || DECL_STATIC_CONSTRUCTOR (decl)
9b389a5e
JH
357 || DECL_STATIC_DESTRUCTOR (decl)
358 /* COMDAT virtual functions may be referenced by vtable from
61502ca8 359 other compilation unit. Still we want to devirtualize calls
9b389a5e
JH
360 to those so we need to analyze them.
361 FIXME: We should introduce may edges for this purpose and update
362 their handling in unreachable function removal and inliner too. */
c47d0034
JH
363 || (DECL_VIRTUAL_P (decl)
364 && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
e7d6beb0
JH
365 cgraph_mark_reachable_node (node);
366
8dafba3c 367 /* If we've not yet emitted decl, tell the debug info about it. */
6b00c969 368 if (!TREE_ASM_WRITTEN (decl))
8dafba3c 369 (*debug_hooks->deferred_inline_function) (decl);
d173e685 370
902edd36
JH
371 /* Possibly warn about unused parameters. */
372 if (warn_unused_parameter)
373 do_warn_unused_parameter (decl);
7e8b322a
JH
374
375 if (!nested)
376 ggc_collect ();
1c4a429a
JH
377}
378
f0c882ab
JH
379/* C99 extern inline keywords allow changing of declaration after function
380 has been finalized. We need to re-decide if we want to mark the function as
381 needed then. */
382
383void
384cgraph_mark_if_needed (tree decl)
385{
581985d7 386 struct cgraph_node *node = cgraph_get_node (decl);
d7f09764 387 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
f0c882ab
JH
388 cgraph_mark_needed_node (node);
389}
390
9187e02d
JH
391/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
392static bool
393clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
394{
39e2db00
JH
395 node = cgraph_function_or_thunk_node (node, NULL);
396 node2 = cgraph_function_or_thunk_node (node2, NULL);
9187e02d
JH
397 while (node != node2 && node2)
398 node2 = node2->clone_of;
399 return node2 != NULL;
400}
401
02ec6988
MJ
402/* Verify edge E count and frequency. */
403
404static bool
405verify_edge_count_and_frequency (struct cgraph_edge *e)
406{
407 bool error_found = false;
408 if (e->count < 0)
409 {
410 error ("caller edge count is negative");
411 error_found = true;
412 }
413 if (e->frequency < 0)
414 {
415 error ("caller edge frequency is negative");
416 error_found = true;
417 }
418 if (e->frequency > CGRAPH_FREQ_MAX)
419 {
420 error ("caller edge frequency is too large");
421 error_found = true;
422 }
423 if (gimple_has_body_p (e->caller->decl)
424 && !e->caller->global.inlined_to
74605a11
JH
425 /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
426 Remove this once edges are actualy removed from the function at that time. */
427 && (e->frequency
428 || (inline_edge_summary_vec
429 && !inline_edge_summary (e)->predicate))
02ec6988
MJ
430 && (e->frequency
431 != compute_call_stmt_bb_frequency (e->caller->decl,
432 gimple_bb (e->call_stmt))))
433 {
61502ca8 434 error ("caller edge frequency %i does not match BB frequency %i",
02ec6988
MJ
435 e->frequency,
436 compute_call_stmt_bb_frequency (e->caller->decl,
437 gimple_bb (e->call_stmt)));
438 error_found = true;
439 }
440 return error_found;
441}
442
89843f5d
JJ
443/* Switch to THIS_CFUN if needed and print STMT to stderr. */
444static void
445cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
446{
447 /* debug_gimple_stmt needs correct cfun */
448 if (cfun != this_cfun)
449 set_cfun (this_cfun);
450 debug_gimple_stmt (stmt);
451}
452
18c6ada9 453/* Verify cgraph nodes of given cgraph node. */
24e47c76 454DEBUG_FUNCTION void
18c6ada9
JH
455verify_cgraph_node (struct cgraph_node *node)
456{
457 struct cgraph_edge *e;
e21aff8a
SB
458 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
459 basic_block this_block;
726a989a 460 gimple_stmt_iterator gsi;
e0704a46 461 bool error_found = false;
18c6ada9 462
1da2ed5f 463 if (seen_error ())
5771bd91
RG
464 return;
465
18c6ada9 466 timevar_push (TV_CGRAPH_VERIFY);
18c6ada9
JH
467 for (e = node->callees; e; e = e->next_callee)
468 if (e->aux)
469 {
ab532386 470 error ("aux field set for edge %s->%s",
4f1e4960
JM
471 identifier_to_locale (cgraph_node_name (e->caller)),
472 identifier_to_locale (cgraph_node_name (e->callee)));
18c6ada9
JH
473 error_found = true;
474 }
06191a23
JH
475 if (node->count < 0)
476 {
d8a07487 477 error ("execution count is negative");
06191a23
JH
478 error_found = true;
479 }
b20996ff
JH
480 if (node->global.inlined_to && node->local.externally_visible)
481 {
d8a07487 482 error ("externally visible inline clone");
b20996ff
JH
483 error_found = true;
484 }
485 if (node->global.inlined_to && node->address_taken)
486 {
d8a07487 487 error ("inline clone with address taken");
b20996ff
JH
488 error_found = true;
489 }
490 if (node->global.inlined_to && node->needed)
491 {
d8a07487 492 error ("inline clone is needed");
b20996ff
JH
493 error_found = true;
494 }
e33c6cd6
MJ
495 for (e = node->indirect_calls; e; e = e->next_callee)
496 {
497 if (e->aux)
498 {
499 error ("aux field set for indirect edge from %s",
500 identifier_to_locale (cgraph_node_name (e->caller)));
501 error_found = true;
502 }
503 if (!e->indirect_unknown_callee
504 || !e->indirect_info)
505 {
506 error ("An indirect edge from %s is not marked as indirect or has "
507 "associated indirect_info, the corresponding statement is: ",
508 identifier_to_locale (cgraph_node_name (e->caller)));
89843f5d 509 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
e33c6cd6
MJ
510 error_found = true;
511 }
512 }
18c6ada9
JH
513 for (e = node->callers; e; e = e->next_caller)
514 {
02ec6988
MJ
515 if (verify_edge_count_and_frequency (e))
516 error_found = true;
18c6ada9
JH
517 if (!e->inline_failed)
518 {
519 if (node->global.inlined_to
520 != (e->caller->global.inlined_to
521 ? e->caller->global.inlined_to : e->caller))
522 {
ab532386 523 error ("inlined_to pointer is wrong");
18c6ada9
JH
524 error_found = true;
525 }
526 if (node->callers->next_caller)
527 {
ab532386 528 error ("multiple inline callers");
18c6ada9
JH
529 error_found = true;
530 }
531 }
532 else
533 if (node->global.inlined_to)
534 {
ab532386 535 error ("inlined_to pointer set for noninline callers");
18c6ada9
JH
536 error_found = true;
537 }
538 }
02ec6988
MJ
539 for (e = node->indirect_calls; e; e = e->next_callee)
540 if (verify_edge_count_and_frequency (e))
541 error_found = true;
18c6ada9
JH
542 if (!node->callers && node->global.inlined_to)
543 {
95a52ebb 544 error ("inlined_to pointer is set but no predecessors found");
18c6ada9
JH
545 error_found = true;
546 }
547 if (node->global.inlined_to == node)
548 {
ab532386 549 error ("inlined_to pointer refers to itself");
18c6ada9
JH
550 error_found = true;
551 }
552
62ecfeb8 553 if (!cgraph_get_node (node->decl))
18c6ada9 554 {
69fb1284 555 error ("node not found in cgraph_hash");
18c6ada9
JH
556 error_found = true;
557 }
c22cacf3 558
9187e02d
JH
559 if (node->clone_of)
560 {
561 struct cgraph_node *n;
562 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
563 if (n == node)
564 break;
565 if (!n)
566 {
567 error ("node has wrong clone_of");
568 error_found = true;
569 }
570 }
571 if (node->clones)
572 {
573 struct cgraph_node *n;
574 for (n = node->clones; n; n = n->next_sibling_clone)
575 if (n->clone_of != node)
576 break;
577 if (n)
578 {
579 error ("node has wrong clone list");
580 error_found = true;
581 }
582 }
583 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
584 {
585 error ("node is in clone list but it is not clone");
586 error_found = true;
587 }
588 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
589 {
590 error ("node has wrong prev_clone pointer");
591 error_found = true;
592 }
593 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
594 {
595 error ("double linked list of clones corrupted");
596 error_found = true;
597 }
78eaf7bf
MJ
598 if (node->same_comdat_group)
599 {
600 struct cgraph_node *n = node->same_comdat_group;
601
602 if (!DECL_ONE_ONLY (node->decl))
603 {
604 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
605 error_found = true;
606 }
607 if (n == node)
608 {
609 error ("node is alone in a comdat group");
610 error_found = true;
611 }
612 do
613 {
614 if (!n->same_comdat_group)
615 {
616 error ("same_comdat_group is not a circular list");
617 error_found = true;
618 break;
619 }
620 n = n->same_comdat_group;
621 }
622 while (n != node);
623 }
9187e02d 624
39e2db00
JH
625 if (node->analyzed && node->alias)
626 {
627 bool ref_found = false;
628 int i;
629 struct ipa_ref *ref;
630
631 if (node->callees)
632 {
633 error ("Alias has call edges");
634 error_found = true;
635 }
636 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
637 if (ref->use != IPA_REF_ALIAS)
638 {
639 error ("Alias has non-alias refernece");
640 error_found = true;
641 }
642 else if (ref_found)
643 {
644 error ("Alias has more than one alias reference");
645 error_found = true;
646 }
647 else
648 ref_found = true;
649 if (!ref_found)
650 {
651 error ("Analyzed alias has no reference");
652 error_found = true;
653 }
654 }
c47d0034
JH
655 if (node->analyzed && node->thunk.thunk_p)
656 {
657 if (!node->callees)
658 {
659 error ("No edge out of thunk node");
660 error_found = true;
661 }
662 else if (node->callees->next_callee)
663 {
664 error ("More than one edge out of thunk node");
665 error_found = true;
666 }
667 if (gimple_has_body_p (node->decl))
668 {
669 error ("Thunk is not supposed to have body");
670 error_found = true;
671 }
672 }
673 else if (node->analyzed && gimple_has_body_p (node->decl)
674 && !TREE_ASM_WRITTEN (node->decl)
675 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
676 && !flag_wpa)
18c6ada9 677 {
e21aff8a
SB
678 if (this_cfun->cfg)
679 {
680 /* The nodes we're interested in are never shared, so walk
681 the tree ignoring duplicates. */
2dee695b 682 struct pointer_set_t *visited_nodes = pointer_set_create ();
e21aff8a
SB
683 /* Reach the trees by walking over the CFG, and note the
684 enclosing basic-blocks in the call edges. */
685 FOR_EACH_BB_FN (this_block, this_cfun)
726a989a
RB
686 for (gsi = gsi_start_bb (this_block);
687 !gsi_end_p (gsi);
688 gsi_next (&gsi))
e0704a46 689 {
726a989a 690 gimple stmt = gsi_stmt (gsi);
e33c6cd6 691 if (is_gimple_call (stmt))
e0704a46
JH
692 {
693 struct cgraph_edge *e = cgraph_edge (node, stmt);
e33c6cd6 694 tree decl = gimple_call_fndecl (stmt);
e0704a46
JH
695 if (e)
696 {
697 if (e->aux)
698 {
ab532386 699 error ("shared call_stmt:");
89843f5d 700 cgraph_debug_gimple_stmt (this_cfun, stmt);
e0704a46
JH
701 error_found = true;
702 }
e33c6cd6 703 if (!e->indirect_unknown_callee)
6744a6ab 704 {
39e2db00
JH
705 if (!e->callee->global.inlined_to
706 && decl
707 && cgraph_get_node (decl)
708 && (e->callee->former_clone_of
709 != cgraph_get_node (decl)->decl)
710 /* IPA-CP sometimes redirect edge to clone and then back to the former
711 function. This ping-pong has to go, eventaully. */
712 && (cgraph_function_or_thunk_node (cgraph_get_node (decl), NULL)
713 != cgraph_function_or_thunk_node (e->callee, NULL))
714 && !clone_of_p (cgraph_get_node (decl),
715 e->callee))
e33c6cd6
MJ
716 {
717 error ("edge points to wrong declaration:");
718 debug_tree (e->callee->decl);
719 fprintf (stderr," Instead of:");
720 debug_tree (decl);
721 error_found = true;
722 }
6744a6ab 723 }
e33c6cd6 724 else if (decl)
e0704a46 725 {
e33c6cd6
MJ
726 error ("an indirect edge with unknown callee "
727 "corresponding to a call_stmt with "
728 "a known declaration:");
47cb0d7d 729 error_found = true;
89843f5d 730 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
e0704a46
JH
731 }
732 e->aux = (void *)1;
733 }
e33c6cd6 734 else if (decl)
e0704a46 735 {
ab532386 736 error ("missing callgraph edge for call stmt:");
89843f5d 737 cgraph_debug_gimple_stmt (this_cfun, stmt);
e0704a46
JH
738 error_found = true;
739 }
740 }
741 }
e21aff8a 742 pointer_set_destroy (visited_nodes);
e21aff8a
SB
743 }
744 else
745 /* No CFG available?! */
746 gcc_unreachable ();
747
18c6ada9
JH
748 for (e = node->callees; e; e = e->next_callee)
749 {
e33c6cd6 750 if (!e->aux)
18c6ada9 751 {
ab532386 752 error ("edge %s->%s has no corresponding call_stmt",
4f1e4960
JM
753 identifier_to_locale (cgraph_node_name (e->caller)),
754 identifier_to_locale (cgraph_node_name (e->callee)));
89843f5d 755 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
18c6ada9
JH
756 error_found = true;
757 }
758 e->aux = 0;
759 }
e33c6cd6
MJ
760 for (e = node->indirect_calls; e; e = e->next_callee)
761 {
762 if (!e->aux)
763 {
764 error ("an indirect edge from %s has no corresponding call_stmt",
765 identifier_to_locale (cgraph_node_name (e->caller)));
89843f5d 766 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
e33c6cd6
MJ
767 error_found = true;
768 }
769 e->aux = 0;
770 }
18c6ada9
JH
771 }
772 if (error_found)
773 {
774 dump_cgraph_node (stderr, node);
ab532386 775 internal_error ("verify_cgraph_node failed");
18c6ada9
JH
776 }
777 timevar_pop (TV_CGRAPH_VERIFY);
778}
779
780/* Verify whole cgraph structure. */
24e47c76 781DEBUG_FUNCTION void
18c6ada9
JH
782verify_cgraph (void)
783{
784 struct cgraph_node *node;
785
1da2ed5f 786 if (seen_error ())
89480522
JH
787 return;
788
18c6ada9
JH
789 for (node = cgraph_nodes; node; node = node->next)
790 verify_cgraph_node (node);
791}
792
474eccc6
ILT
793/* Output all asm statements we have stored up to be output. */
794
795static void
796cgraph_output_pending_asms (void)
797{
798 struct cgraph_asm_node *can;
799
1da2ed5f 800 if (seen_error ())
474eccc6
ILT
801 return;
802
803 for (can = cgraph_asm_nodes; can; can = can->next)
804 assemble_asm (can->asm_str);
805 cgraph_asm_nodes = NULL;
806}
807
e767b5be 808/* Analyze the function scheduled to be output. */
322dd859 809void
e767b5be
JH
810cgraph_analyze_function (struct cgraph_node *node)
811{
a406865a 812 tree save = current_function_decl;
e767b5be
JH
813 tree decl = node->decl;
814
39e2db00
JH
815 if (node->alias && node->thunk.alias)
816 {
817 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
818 if (!VEC_length (ipa_ref_t, node->ref_list.references))
819 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
820 if (node->same_body_alias)
821 {
822 DECL_VIRTUAL_P (node->decl) = DECL_VIRTUAL_P (node->thunk.alias);
823 DECL_DECLARED_INLINE_P (node->decl)
824 = DECL_DECLARED_INLINE_P (node->thunk.alias);
825 DECL_DISREGARD_INLINE_LIMITS (node->decl)
826 = DECL_DISREGARD_INLINE_LIMITS (node->thunk.alias);
827 }
828
829 /* Fixup visibility nonsences C++ frontend produce on same body aliases. */
830 if (TREE_PUBLIC (node->decl) && node->same_body_alias)
831 {
832 DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (node->thunk.alias);
ee6f1177 833 if (DECL_ONE_ONLY (node->thunk.alias))
39e2db00 834 {
ee6f1177 835 DECL_COMDAT (node->decl) = DECL_COMDAT (node->thunk.alias);
39e2db00
JH
836 DECL_COMDAT_GROUP (node->decl) = DECL_COMDAT_GROUP (node->thunk.alias);
837 if (DECL_ONE_ONLY (node->thunk.alias) && !node->same_comdat_group)
838 {
839 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
840 node->same_comdat_group = tgt;
841 if (!tgt->same_comdat_group)
842 tgt->same_comdat_group = node;
843 else
844 {
845 struct cgraph_node *n;
846 for (n = tgt->same_comdat_group;
847 n->same_comdat_group != tgt;
848 n = n->same_comdat_group)
849 ;
850 n->same_comdat_group = node;
851 }
852 }
853 }
854 }
855 cgraph_mark_reachable_node (cgraph_alias_aliased_node (node));
856 if (node->address_taken)
857 cgraph_mark_address_taken_node (cgraph_alias_aliased_node (node));
858 if (cgraph_decide_is_function_needed (node, node->decl))
859 cgraph_mark_needed_node (node);
860 }
861 else if (node->thunk.thunk_p)
c47d0034
JH
862 {
863 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
864 NULL, 0, CGRAPH_FREQ_BASE);
865 }
866 else
867 {
868 current_function_decl = decl;
869 push_cfun (DECL_STRUCT_FUNCTION (decl));
a406865a 870
c47d0034 871 assign_assembler_name_if_neeeded (node->decl);
0e0a1359 872
c47d0034
JH
873 /* Make sure to gimplify bodies only once. During analyzing a
874 function we lower it, which will require gimplified nested
875 functions, so we can end up here with an already gimplified
876 body. */
877 if (!gimple_body (decl))
878 gimplify_function_tree (decl);
879 dump_function (TDI_generic, decl);
a406865a 880
c47d0034
JH
881 cgraph_lower_function (node);
882 pop_cfun ();
883 }
6a84c098 884 node->analyzed = true;
e767b5be 885
a406865a 886 current_function_decl = save;
e767b5be
JH
887}
888
39e2db00
JH
889/* C++ frontend produce same body aliases all over the place, even before PCH
890 gets streamed out. It relies on us linking the aliases with their function
891 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
892 first produce aliases without links, but once C++ FE is sure he won't sream
893 PCH we build the links via this function. */
894
895void
896cgraph_process_same_body_aliases (void)
897{
898 struct cgraph_node *node;
899 for (node = cgraph_nodes; node; node = node->next)
900 if (node->same_body_alias
901 && !VEC_length (ipa_ref_t, node->ref_list.references))
902 {
903 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
904 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
905 }
906 same_body_aliases_done = true;
907}
908
768e3c60
RG
909/* Process attributes common for vars and functions. */
910
911static void
912process_common_attributes (tree decl)
913{
914 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
915
916 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
917 {
918 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
919 "%<weakref%> attribute should be accompanied with"
920 " an %<alias%> attribute");
921 DECL_WEAK (decl) = 0;
779d4b91
JH
922 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
923 DECL_ATTRIBUTES (decl));
768e3c60
RG
924 }
925}
926
386b46cf
JH
927/* Look for externally_visible and used attributes and mark cgraph nodes
928 accordingly.
929
930 We cannot mark the nodes at the point the attributes are processed (in
931 handle_*_attribute) because the copy of the declarations available at that
932 point may not be canonical. For example, in:
933
934 void f();
935 void f() __attribute__((used));
936
937 the declaration we see in handle_used_attribute will be the second
938 declaration -- but the front end will subsequently merge that declaration
939 with the original declaration and discard the second declaration.
940
941 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
942
943 void f() {}
944 void f() __attribute__((externally_visible));
945
946 is valid.
947
948 So, we walk the nodes at the end of the translation unit, applying the
949 attributes at that point. */
950
951static void
952process_function_and_variable_attributes (struct cgraph_node *first,
8a4a83ed 953 struct varpool_node *first_var)
386b46cf
JH
954{
955 struct cgraph_node *node;
8a4a83ed 956 struct varpool_node *vnode;
386b46cf
JH
957
958 for (node = cgraph_nodes; node != first; node = node->next)
959 {
960 tree decl = node->decl;
b42186f1 961 if (DECL_PRESERVE_P (decl))
152464d2 962 cgraph_mark_needed_node (node);
9d602c59
KT
963 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
964 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
965 && TREE_PUBLIC (node->decl))
966 {
967 if (node->local.finalized)
968 cgraph_mark_needed_node (node);
969 }
970 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
386b46cf 971 {
343d4b27 972 if (! TREE_PUBLIC (node->decl))
c5d75364
MLI
973 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
974 "%<externally_visible%>"
975 " attribute have effect only on public objects");
b20996ff
JH
976 else if (node->local.finalized)
977 cgraph_mark_needed_node (node);
386b46cf 978 }
779d4b91 979 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
39e2db00 980 && (node->local.finalized && !node->alias))
779d4b91
JH
981 {
982 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
983 "%<weakref%> attribute ignored"
984 " because function is defined");
985 DECL_WEAK (decl) = 0;
986 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
987 DECL_ATTRIBUTES (decl));
988 }
768e3c60 989 process_common_attributes (decl);
386b46cf 990 }
8a4a83ed 991 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
386b46cf
JH
992 {
993 tree decl = vnode->decl;
b42186f1 994 if (DECL_PRESERVE_P (decl))
386b46cf 995 {
a8289259 996 vnode->force_output = true;
386b46cf 997 if (vnode->finalized)
8a4a83ed 998 varpool_mark_needed_node (vnode);
386b46cf 999 }
9d602c59
KT
1000 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
1001 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
9659ff6e 1002 && TREE_PUBLIC (vnode->decl))
9d602c59
KT
1003 {
1004 if (vnode->finalized)
1005 varpool_mark_needed_node (vnode);
1006 }
1007 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
386b46cf 1008 {
343d4b27 1009 if (! TREE_PUBLIC (vnode->decl))
c5d75364
MLI
1010 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1011 "%<externally_visible%>"
1012 " attribute have effect only on public objects");
b20996ff
JH
1013 else if (vnode->finalized)
1014 varpool_mark_needed_node (vnode);
386b46cf 1015 }
779d4b91
JH
1016 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
1017 && vnode->finalized
1018 && DECL_INITIAL (decl))
1019 {
1020 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1021 "%<weakref%> attribute ignored"
1022 " because variable is initialized");
1023 DECL_WEAK (decl) = 0;
1024 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1025 DECL_ATTRIBUTES (decl));
1026 }
768e3c60 1027 process_common_attributes (decl);
386b46cf
JH
1028 }
1029}
1030
151e6f24
JH
1031/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1032 each reachable functions) and build cgraph.
1033 The function can be called multiple times after inserting new nodes
88512ba0 1034 into beginning of queue. Just the new part of queue is re-scanned then. */
1c4a429a 1035
151e6f24
JH
1036static void
1037cgraph_analyze_functions (void)
1c4a429a 1038{
cd9c7bd2 1039 /* Keep track of already processed nodes when called multiple times for
aabcd309 1040 intermodule optimization. */
cd9c7bd2 1041 static struct cgraph_node *first_analyzed;
61e00a5e 1042 struct cgraph_node *first_processed = first_analyzed;
8a4a83ed 1043 static struct varpool_node *first_analyzed_var;
151e6f24 1044 struct cgraph_node *node, *next;
1c4a429a 1045
1389294c 1046 bitmap_obstack_initialize (NULL);
61e00a5e
JH
1047 process_function_and_variable_attributes (first_processed,
1048 first_analyzed_var);
1049 first_processed = cgraph_nodes;
8a4a83ed
JH
1050 first_analyzed_var = varpool_nodes;
1051 varpool_analyze_pending_decls ();
a194aa56 1052 if (cgraph_dump_file)
1c4a429a 1053 {
7d82fe7c 1054 fprintf (cgraph_dump_file, "Initial entry points:");
cd9c7bd2 1055 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
39ecc018 1056 if (node->needed)
a194aa56
JH
1057 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1058 fprintf (cgraph_dump_file, "\n");
1c4a429a 1059 }
151e6f24 1060 cgraph_process_new_functions ();
1c4a429a 1061
7660e67e
SB
1062 /* Propagate reachability flag and lower representation of all reachable
1063 functions. In the future, lowering will introduce new functions and
1064 new entry points on the way (by template instantiation and virtual
1065 method table generation for instance). */
1668aabc 1066 while (cgraph_nodes_queue)
1c4a429a 1067 {
e767b5be 1068 struct cgraph_edge *edge;
1668aabc
JH
1069 tree decl = cgraph_nodes_queue->decl;
1070
1071 node = cgraph_nodes_queue;
8bd87c4e 1072 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
18c6ada9 1073 node->next_needed = NULL;
1c4a429a 1074
cd4dea62 1075 /* ??? It is possible to create extern inline function and later using
9d203871 1076 weak alias attribute to kill its body. See
cd4dea62 1077 gcc.c-torture/compile/20011119-1.c */
c47d0034 1078 if (!DECL_STRUCT_FUNCTION (decl)
39e2db00 1079 && (!node->alias || !node->thunk.alias)
c47d0034 1080 && !node->thunk.thunk_p)
d71cc23f
JH
1081 {
1082 cgraph_reset_node (node);
b125ad45 1083 node->local.redefined_extern_inline = true;
d71cc23f
JH
1084 continue;
1085 }
cd4dea62 1086
d7f09764
DN
1087 if (!node->analyzed)
1088 cgraph_analyze_function (node);
8dafba3c 1089
1c4a429a 1090 for (edge = node->callees; edge; edge = edge->next_callee)
e767b5be 1091 if (!edge->callee->reachable)
8dafba3c 1092 cgraph_mark_reachable_node (edge->callee);
c47d0034
JH
1093 for (edge = node->callers; edge; edge = edge->next_caller)
1094 if (!edge->caller->reachable && edge->caller->thunk.thunk_p)
1095 cgraph_mark_reachable_node (edge->caller);
8dafba3c 1096
b66887e4
JJ
1097 if (node->same_comdat_group)
1098 {
1099 for (next = node->same_comdat_group;
1100 next != node;
1101 next = next->same_comdat_group)
1102 cgraph_mark_reachable_node (next);
1103 }
1104
6b20f353
DS
1105 /* If decl is a clone of an abstract function, mark that abstract
1106 function so that we don't release its body. The DECL_INITIAL() of that
581985d7
MJ
1107 abstract function declaration will be later needed to output debug
1108 info. */
6b20f353
DS
1109 if (DECL_ABSTRACT_ORIGIN (decl))
1110 {
581985d7
MJ
1111 struct cgraph_node *origin_node;
1112 origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
6b20f353
DS
1113 origin_node->abstract_and_needed = true;
1114 }
1115
61e00a5e
JH
1116 /* We finalize local static variables during constructing callgraph
1117 edges. Process their attributes too. */
1118 process_function_and_variable_attributes (first_processed,
1119 first_analyzed_var);
1120 first_processed = cgraph_nodes;
8a4a83ed
JH
1121 first_analyzed_var = varpool_nodes;
1122 varpool_analyze_pending_decls ();
151e6f24 1123 cgraph_process_new_functions ();
1c4a429a 1124 }
8dafba3c 1125
564738df 1126 /* Collect entry points to the unit. */
a194aa56 1127 if (cgraph_dump_file)
1668aabc 1128 {
7d82fe7c 1129 fprintf (cgraph_dump_file, "Unit entry points:");
cd9c7bd2 1130 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
39ecc018 1131 if (node->needed)
a194aa56 1132 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
7d82fe7c 1133 fprintf (cgraph_dump_file, "\n\nInitial ");
e767b5be 1134 dump_cgraph (cgraph_dump_file);
df7705b1 1135 dump_varpool (cgraph_dump_file);
1668aabc 1136 }
7660e67e 1137
a194aa56
JH
1138 if (cgraph_dump_file)
1139 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1c4a429a 1140
96fc428c 1141 for (node = cgraph_nodes; node != first_analyzed; node = next)
1c4a429a
JH
1142 {
1143 tree decl = node->decl;
96fc428c 1144 next = node->next;
1c4a429a 1145
c47d0034 1146 if (node->local.finalized && !gimple_has_body_p (decl)
39e2db00 1147 && (!node->alias || !node->thunk.alias)
c47d0034 1148 && !node->thunk.thunk_p)
c22cacf3 1149 cgraph_reset_node (node);
d71cc23f 1150
c47d0034 1151 if (!node->reachable
39e2db00
JH
1152 && (gimple_has_body_p (decl) || node->thunk.thunk_p
1153 || (node->alias && node->thunk.alias)))
1c4a429a 1154 {
a194aa56
JH
1155 if (cgraph_dump_file)
1156 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
18c6ada9 1157 cgraph_remove_node (node);
d71cc23f 1158 continue;
1c4a429a 1159 }
9b0436b7
JH
1160 else
1161 node->next_needed = NULL;
c47d0034 1162 gcc_assert (!node->local.finalized || node->thunk.thunk_p
39e2db00 1163 || node->alias
c47d0034 1164 || gimple_has_body_p (decl));
d71cc23f 1165 gcc_assert (node->analyzed == node->local.finalized);
1c4a429a 1166 }
a194aa56 1167 if (cgraph_dump_file)
7d82fe7c
KC
1168 {
1169 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1170 dump_cgraph (cgraph_dump_file);
df7705b1 1171 dump_varpool (cgraph_dump_file);
7d82fe7c 1172 }
1389294c 1173 bitmap_obstack_release (NULL);
cd9c7bd2 1174 first_analyzed = cgraph_nodes;
1c4a429a 1175 ggc_collect ();
151e6f24
JH
1176}
1177
5f1a9ebb 1178
151e6f24
JH
1179/* Analyze the whole compilation unit once it is parsed completely. */
1180
1181void
1182cgraph_finalize_compilation_unit (void)
1183{
90097c67
RG
1184 timevar_push (TV_CGRAPH);
1185
47c79d56
DN
1186 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
1187 if (flag_lto)
1188 lto_streamer_hooks_init ();
1189
0038d4e0
MM
1190 /* If we're here there's no current function anymore. Some frontends
1191 are lazy in clearing these. */
1192 current_function_decl = NULL;
1193 set_cfun (NULL);
1194
a406865a
RG
1195 /* Do not skip analyzing the functions if there were errors, we
1196 miss diagnostics for following functions otherwise. */
151e6f24 1197
5f1a9ebb 1198 /* Emit size functions we didn't inline. */
f82a627c 1199 finalize_size_functions ();
5f1a9ebb 1200
90097c67
RG
1201 /* Mark alias targets necessary and emit diagnostics. */
1202 finish_aliases_1 ();
1203
151e6f24
JH
1204 if (!quiet_flag)
1205 {
1206 fprintf (stderr, "\nAnalyzing compilation unit\n");
1207 fflush (stderr);
1208 }
1209
deced1e2
XDL
1210 if (flag_dump_passes)
1211 dump_passes ();
1212
90097c67
RG
1213 /* Gimplify and lower all functions, compute reachability and
1214 remove unreachable nodes. */
1215 cgraph_analyze_functions ();
1216
5f1a9ebb
RG
1217 /* Mark alias targets necessary and emit diagnostics. */
1218 finish_aliases_1 ();
1219
90097c67 1220 /* Gimplify and lower thunks. */
151e6f24 1221 cgraph_analyze_functions ();
a406865a 1222
90097c67 1223 /* Finally drive the pass manager. */
a406865a 1224 cgraph_optimize ();
90097c67
RG
1225
1226 timevar_pop (TV_CGRAPH);
1c4a429a 1227}
3baf459d
DN
1228
1229
1c4a429a
JH
1230/* Figure out what functions we want to assemble. */
1231
1232static void
db0e878d 1233cgraph_mark_functions_to_output (void)
1c4a429a
JH
1234{
1235 struct cgraph_node *node;
b66887e4
JJ
1236#ifdef ENABLE_CHECKING
1237 bool check_same_comdat_groups = false;
1238
1239 for (node = cgraph_nodes; node; node = node->next)
1240 gcc_assert (!node->process);
1241#endif
1c4a429a 1242
1c4a429a
JH
1243 for (node = cgraph_nodes; node; node = node->next)
1244 {
1245 tree decl = node->decl;
b58b1157 1246 struct cgraph_edge *e;
c22cacf3 1247
b66887e4
JJ
1248 gcc_assert (!node->process || node->same_comdat_group);
1249 if (node->process)
1250 continue;
b58b1157
JH
1251
1252 for (e = node->callers; e; e = e->next_caller)
dc0bfe6a 1253 if (e->inline_failed)
b58b1157 1254 break;
1c4a429a 1255
7660e67e
SB
1256 /* We need to output all local functions that are used and not
1257 always inlined, as well as those that are reachable from
1258 outside the current compilation unit. */
39ecc018 1259 if (node->analyzed
c47d0034 1260 && !node->thunk.thunk_p
39e2db00 1261 && !node->alias
18c6ada9 1262 && !node->global.inlined_to
508e4757 1263 && (!cgraph_only_called_directly_p (node)
39e2db00
JH
1264 || ((e || ipa_ref_has_aliases_p (&node->ref_list))
1265 && node->reachable))
6de9cd9a 1266 && !TREE_ASM_WRITTEN (decl)
1c4a429a 1267 && !DECL_EXTERNAL (decl))
b66887e4
JJ
1268 {
1269 node->process = 1;
1270 if (node->same_comdat_group)
1271 {
1272 struct cgraph_node *next;
1273 for (next = node->same_comdat_group;
1274 next != node;
1275 next = next->same_comdat_group)
39e2db00 1276 if (!next->thunk.thunk_p && !next->alias)
c47d0034 1277 next->process = 1;
b66887e4
JJ
1278 }
1279 }
1280 else if (node->same_comdat_group)
1281 {
1282#ifdef ENABLE_CHECKING
1283 check_same_comdat_groups = true;
1284#endif
1285 }
341c100f 1286 else
1a2caa7a
NS
1287 {
1288 /* We should've reclaimed all functions that are not needed. */
1289#ifdef ENABLE_CHECKING
726a989a 1290 if (!node->global.inlined_to
39ecc018 1291 && gimple_has_body_p (decl)
a837268b
JH
1292 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1293 are inside partition, we can end up not removing the body since we no longer
1294 have analyzed node pointing to it. */
1295 && !node->in_other_partition
39e2db00 1296 && !node->alias
1a2caa7a
NS
1297 && !DECL_EXTERNAL (decl))
1298 {
1299 dump_cgraph_node (stderr, node);
1300 internal_error ("failed to reclaim unneeded function");
1301 }
1302#endif
726a989a 1303 gcc_assert (node->global.inlined_to
39ecc018 1304 || !gimple_has_body_p (decl)
a837268b 1305 || node->in_other_partition
1a2caa7a
NS
1306 || DECL_EXTERNAL (decl));
1307
1308 }
c22cacf3 1309
18d13f34 1310 }
b66887e4
JJ
1311#ifdef ENABLE_CHECKING
1312 if (check_same_comdat_groups)
1313 for (node = cgraph_nodes; node; node = node->next)
1314 if (node->same_comdat_group && !node->process)
1315 {
1316 tree decl = node->decl;
1317 if (!node->global.inlined_to
1318 && gimple_has_body_p (decl)
a837268b
JH
1319 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1320 are inside partition, we can end up not removing the body since we no longer
1321 have analyzed node pointing to it. */
1322 && !node->in_other_partition
b66887e4
JJ
1323 && !DECL_EXTERNAL (decl))
1324 {
1325 dump_cgraph_node (stderr, node);
39e2db00 1326 internal_error ("failed to reclaim unneeded functionin same comdat group");
b66887e4
JJ
1327 }
1328 }
1329#endif
18d13f34
JH
1330}
1331
6744a6ab
JH
1332/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1333 in lowered gimple form.
1334
1335 Set current_function_decl and cfun to newly constructed empty function body.
1336 return basic block in the function body. */
1337
1338static basic_block
1339init_lowered_empty_function (tree decl)
1340{
1341 basic_block bb;
1342
1343 current_function_decl = decl;
1344 allocate_struct_function (decl, false);
1345 gimple_register_cfg_hooks ();
1346 init_empty_tree_cfg ();
1347 init_tree_ssa (cfun);
1348 init_ssa_operands ();
1349 cfun->gimple_df->in_ssa_p = true;
1350 DECL_INITIAL (decl) = make_node (BLOCK);
1351
1352 DECL_SAVED_TREE (decl) = error_mark_node;
1353 cfun->curr_properties |=
1354 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1355 PROP_ssa);
1356
1357 /* Create BB for body of the function and connect it properly. */
1358 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1359 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1360 make_edge (bb, EXIT_BLOCK_PTR, 0);
1361
1362 return bb;
1363}
1364
1365/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1366 offset indicated by VIRTUAL_OFFSET, if that is
1367 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1368 zero for a result adjusting thunk. */
1369
1370static tree
1371thunk_adjust (gimple_stmt_iterator * bsi,
1372 tree ptr, bool this_adjusting,
1373 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1374{
1375 gimple stmt;
1376 tree ret;
1377
313333a6
RG
1378 if (this_adjusting
1379 && fixed_offset != 0)
6744a6ab
JH
1380 {
1381 stmt = gimple_build_assign (ptr,
1382 fold_build2_loc (input_location,
1383 POINTER_PLUS_EXPR,
1384 TREE_TYPE (ptr), ptr,
1385 size_int (fixed_offset)));
1386 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1387 }
1388
1389 /* If there's a virtual offset, look up that value in the vtable and
1390 adjust the pointer again. */
1391 if (virtual_offset)
1392 {
1393 tree vtabletmp;
1394 tree vtabletmp2;
1395 tree vtabletmp3;
1396 tree offsettmp;
1397
1398 if (!vtable_entry_type)
1399 {
1400 tree vfunc_type = make_node (FUNCTION_TYPE);
1401 TREE_TYPE (vfunc_type) = integer_type_node;
1402 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1403 layout_type (vfunc_type);
1404
1405 vtable_entry_type = build_pointer_type (vfunc_type);
1406 }
1407
1408 vtabletmp =
1409 create_tmp_var (build_pointer_type
1410 (build_pointer_type (vtable_entry_type)), "vptr");
1411
1412 /* The vptr is always at offset zero in the object. */
1413 stmt = gimple_build_assign (vtabletmp,
1414 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1415 ptr));
1416 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1417 mark_symbols_for_renaming (stmt);
1418 find_referenced_vars_in (stmt);
1419
1420 /* Form the vtable address. */
1421 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1422 "vtableaddr");
1423 stmt = gimple_build_assign (vtabletmp2,
70f34814 1424 build_simple_mem_ref (vtabletmp));
6744a6ab
JH
1425 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1426 mark_symbols_for_renaming (stmt);
1427 find_referenced_vars_in (stmt);
1428
1429 /* Find the entry with the vcall offset. */
1430 stmt = gimple_build_assign (vtabletmp2,
1431 fold_build2_loc (input_location,
1432 POINTER_PLUS_EXPR,
1433 TREE_TYPE (vtabletmp2),
1434 vtabletmp2,
1435 fold_convert (sizetype,
1436 virtual_offset)));
1437 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1438
1439 /* Get the offset itself. */
1440 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1441 "vcalloffset");
1442 stmt = gimple_build_assign (vtabletmp3,
70f34814 1443 build_simple_mem_ref (vtabletmp2));
6744a6ab
JH
1444 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1445 mark_symbols_for_renaming (stmt);
1446 find_referenced_vars_in (stmt);
1447
1448 /* Cast to sizetype. */
1449 offsettmp = create_tmp_var (sizetype, "offset");
1450 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1451 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1452 mark_symbols_for_renaming (stmt);
1453 find_referenced_vars_in (stmt);
1454
1455 /* Adjust the `this' pointer. */
1456 ptr = fold_build2_loc (input_location,
1457 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1458 offsettmp);
1459 }
1460
313333a6
RG
1461 if (!this_adjusting
1462 && fixed_offset != 0)
6744a6ab
JH
1463 /* Adjust the pointer by the constant. */
1464 {
1465 tree ptrtmp;
1466
1467 if (TREE_CODE (ptr) == VAR_DECL)
1468 ptrtmp = ptr;
1469 else
1470 {
1471 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1472 stmt = gimple_build_assign (ptrtmp, ptr);
1473 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1474 mark_symbols_for_renaming (stmt);
1475 find_referenced_vars_in (stmt);
1476 }
1477 ptr = fold_build2_loc (input_location,
1478 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1479 size_int (fixed_offset));
1480 }
1481
1482 /* Emit the statement and gimplify the adjustment expression. */
1483 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1484 stmt = gimple_build_assign (ret, ptr);
1485 mark_symbols_for_renaming (stmt);
1486 find_referenced_vars_in (stmt);
1487 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1488
1489 return ret;
1490}
1491
1492/* Produce assembler for thunk NODE. */
1493
1494static void
1495assemble_thunk (struct cgraph_node *node)
1496{
1497 bool this_adjusting = node->thunk.this_adjusting;
1498 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1499 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1500 tree virtual_offset = NULL;
1501 tree alias = node->thunk.alias;
1502 tree thunk_fndecl = node->decl;
1503 tree a = DECL_ARGUMENTS (thunk_fndecl);
1504
1505 current_function_decl = thunk_fndecl;
1506
d06865bf
DK
1507 /* Ensure thunks are emitted in their correct sections. */
1508 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1509
6744a6ab
JH
1510 if (this_adjusting
1511 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1512 virtual_value, alias))
1513 {
1514 const char *fnname;
1515 tree fn_block;
1516
1517 DECL_RESULT (thunk_fndecl)
1518 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1519 RESULT_DECL, 0, integer_type_node);
15488554 1520 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
6744a6ab
JH
1521
1522 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1523 create one. */
1524 fn_block = make_node (BLOCK);
1525 BLOCK_VARS (fn_block) = a;
1526 DECL_INITIAL (thunk_fndecl) = fn_block;
1527 init_function_start (thunk_fndecl);
1528 cfun->is_thunk = 1;
1529 assemble_start_function (thunk_fndecl, fnname);
1530
1531 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1532 fixed_offset, virtual_value, alias);
1533
1534 assemble_end_function (thunk_fndecl, fnname);
1535 init_insn_lengths ();
1536 free_after_compilation (cfun);
1537 set_cfun (NULL);
1538 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
c47d0034
JH
1539 node->thunk.thunk_p = false;
1540 node->analyzed = false;
6744a6ab
JH
1541 }
1542 else
1543 {
1544 tree restype;
1545 basic_block bb, then_bb, else_bb, return_bb;
1546 gimple_stmt_iterator bsi;
1547 int nargs = 0;
1548 tree arg;
1549 int i;
1550 tree resdecl;
1551 tree restmp = NULL;
1552 VEC(tree, heap) *vargs;
1553
1554 gimple call;
1555 gimple ret;
1556
1557 DECL_IGNORED_P (thunk_fndecl) = 1;
1558 bitmap_obstack_initialize (NULL);
1559
1560 if (node->thunk.virtual_offset_p)
1561 virtual_offset = size_int (virtual_value);
1562
1563 /* Build the return declaration for the function. */
1564 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1565 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1566 {
1567 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1568 DECL_ARTIFICIAL (resdecl) = 1;
1569 DECL_IGNORED_P (resdecl) = 1;
1570 DECL_RESULT (thunk_fndecl) = resdecl;
1571 }
1572 else
1573 resdecl = DECL_RESULT (thunk_fndecl);
1574
1575 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1576
1577 bsi = gsi_start_bb (bb);
1578
1579 /* Build call to the function being thunked. */
1580 if (!VOID_TYPE_P (restype))
1581 {
1582 if (!is_gimple_reg_type (restype))
1583 {
1584 restmp = resdecl;
c021f10b 1585 add_local_decl (cfun, restmp);
6744a6ab
JH
1586 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1587 }
1588 else
1589 restmp = create_tmp_var_raw (restype, "retval");
1590 }
1591
910ad8de 1592 for (arg = a; arg; arg = DECL_CHAIN (arg))
6744a6ab
JH
1593 nargs++;
1594 vargs = VEC_alloc (tree, heap, nargs);
1595 if (this_adjusting)
1596 VEC_quick_push (tree, vargs,
1597 thunk_adjust (&bsi,
1598 a, 1, fixed_offset,
1599 virtual_offset));
1600 else
1601 VEC_quick_push (tree, vargs, a);
910ad8de 1602 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
6744a6ab
JH
1603 VEC_quick_push (tree, vargs, arg);
1604 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1605 VEC_free (tree, heap, vargs);
1606 gimple_call_set_cannot_inline (call, true);
1607 gimple_call_set_from_thunk (call, true);
1608 if (restmp)
1609 gimple_call_set_lhs (call, restmp);
1610 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1611 mark_symbols_for_renaming (call);
1612 find_referenced_vars_in (call);
1613 update_stmt (call);
1614
1615 if (restmp && !this_adjusting)
1616 {
1124098b 1617 tree true_label = NULL_TREE;
6744a6ab
JH
1618
1619 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1620 {
1621 gimple stmt;
1622 /* If the return type is a pointer, we need to
1623 protect against NULL. We know there will be an
1624 adjustment, because that's why we're emitting a
1625 thunk. */
1626 then_bb = create_basic_block (NULL, (void *) 0, bb);
1627 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1628 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1629 remove_edge (single_succ_edge (bb));
1630 true_label = gimple_block_label (then_bb);
6744a6ab 1631 stmt = gimple_build_cond (NE_EXPR, restmp,
e8160c9a 1632 build_zero_cst (TREE_TYPE (restmp)),
6744a6ab
JH
1633 NULL_TREE, NULL_TREE);
1634 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1635 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1636 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1637 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1638 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1639 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1640 bsi = gsi_last_bb (then_bb);
1641 }
1642
1643 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1644 fixed_offset, virtual_offset);
1645 if (true_label)
1646 {
1647 gimple stmt;
1648 bsi = gsi_last_bb (else_bb);
e8160c9a
NF
1649 stmt = gimple_build_assign (restmp,
1650 build_zero_cst (TREE_TYPE (restmp)));
6744a6ab
JH
1651 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1652 bsi = gsi_last_bb (return_bb);
1653 }
1654 }
1655 else
1656 gimple_call_set_tail (call, true);
1657
1658 /* Build return value. */
1659 ret = gimple_build_return (restmp);
1660 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1661
1662 delete_unreachable_blocks ();
1663 update_ssa (TODO_update_ssa);
1664
6744a6ab
JH
1665 /* Since we want to emit the thunk, we explicitly mark its name as
1666 referenced. */
c47d0034
JH
1667 node->thunk.thunk_p = false;
1668 cgraph_node_remove_callees (node);
6744a6ab
JH
1669 cgraph_add_new_function (thunk_fndecl, true);
1670 bitmap_obstack_release (NULL);
1671 }
1672 current_function_decl = NULL;
1673}
1674
c47d0034 1675
39e2db00
JH
1676
1677/* Assemble thunks and aliases asociated to NODE. */
c47d0034
JH
1678
1679static void
39e2db00 1680assemble_thunks_and_aliases (struct cgraph_node *node)
c47d0034
JH
1681{
1682 struct cgraph_edge *e;
39e2db00
JH
1683 int i;
1684 struct ipa_ref *ref;
1685
c47d0034
JH
1686 for (e = node->callers; e;)
1687 if (e->caller->thunk.thunk_p)
1688 {
1689 struct cgraph_node *thunk = e->caller;
1690
1691 e = e->next_caller;
39e2db00 1692 assemble_thunks_and_aliases (thunk);
c47d0034
JH
1693 assemble_thunk (thunk);
1694 }
1695 else
1696 e = e->next_caller;
39e2db00
JH
1697 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
1698 if (ref->use == IPA_REF_ALIAS)
1699 {
1700 struct cgraph_node *alias = ipa_ref_refering_node (ref);
1701 assemble_alias (alias->decl,
1702 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1703 assemble_thunks_and_aliases (alias);
1704 }
c47d0034
JH
1705}
1706
1c4a429a 1707/* Expand function specified by NODE. */
7660e67e 1708
1c4a429a 1709static void
db0e878d 1710cgraph_expand_function (struct cgraph_node *node)
1c4a429a
JH
1711{
1712 tree decl = node->decl;
1713
18c6ada9 1714 /* We ought to not compile any inline clones. */
341c100f 1715 gcc_assert (!node->global.inlined_to);
18c6ada9 1716
7e8b322a 1717 announce_function (decl);
257eb6e3 1718 node->process = 0;
39e2db00 1719 assemble_thunks_and_aliases (node);
5806d9ac
JH
1720 gcc_assert (node->lowered);
1721
1722 /* Generate RTL for the body of DECL. */
1723 tree_rest_of_compilation (decl);
1724
1725 /* Make sure that BE didn't give up on compiling. */
1726 gcc_assert (TREE_ASM_WRITTEN (decl));
1727 current_function_decl = NULL;
85ad2ef5 1728 gcc_assert (!cgraph_preserve_function_body_p (node));
39ecc018
JH
1729 cgraph_release_function_body (node);
1730 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1731 points to the dead function body. */
1732 cgraph_node_remove_callees (node);
6b02a499
JH
1733
1734 cgraph_function_flags_ready = true;
1c4a429a
JH
1735}
1736
18c6ada9 1737/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
b58b1157
JH
1738
1739bool
61a05df1 1740cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
b58b1157 1741{
18c6ada9
JH
1742 *reason = e->inline_failed;
1743 return !e->inline_failed;
b58b1157 1744}
18c6ada9 1745
6674a6ce 1746
6674a6ce 1747
db0e878d
AJ
1748/* Expand all functions that must be output.
1749
b58b1157
JH
1750 Attempt to topologically sort the nodes so function is output when
1751 all called functions are already assembled to allow data to be
a98ebe2e 1752 propagated across the callgraph. Use a stack to get smaller distance
d1a6adeb 1753 between a function and its callees (later we may choose to use a more
b58b1157
JH
1754 sophisticated algorithm for function reordering; we will likely want
1755 to use subsections to make the output functions appear in top-down
1756 order). */
1757
1758static void
a20af5b8 1759cgraph_expand_all_functions (void)
b58b1157
JH
1760{
1761 struct cgraph_node *node;
5ed6ace5 1762 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
f30cfcb1 1763 int order_pos, new_order_pos = 0;
b58b1157
JH
1764 int i;
1765
af8bca3c 1766 order_pos = ipa_reverse_postorder (order);
341c100f 1767 gcc_assert (order_pos == cgraph_n_nodes);
b58b1157 1768
1ae58c30 1769 /* Garbage collector may remove inline clones we eliminate during
18c6ada9
JH
1770 optimization. So we must be sure to not reference them. */
1771 for (i = 0; i < order_pos; i++)
257eb6e3 1772 if (order[i]->process)
18c6ada9
JH
1773 order[new_order_pos++] = order[i];
1774
1775 for (i = new_order_pos - 1; i >= 0; i--)
b58b1157
JH
1776 {
1777 node = order[i];
257eb6e3 1778 if (node->process)
b58b1157 1779 {
341c100f 1780 gcc_assert (node->reachable);
257eb6e3 1781 node->process = 0;
b58b1157
JH
1782 cgraph_expand_function (node);
1783 }
1784 }
f45e0ad1 1785 cgraph_process_new_functions ();
50674e96 1786
b58b1157 1787 free (order);
50674e96 1788
b58b1157
JH
1789}
1790
474eccc6
ILT
1791/* This is used to sort the node types by the cgraph order number. */
1792
24b97832
ILT
1793enum cgraph_order_sort_kind
1794{
1795 ORDER_UNDEFINED = 0,
1796 ORDER_FUNCTION,
1797 ORDER_VAR,
1798 ORDER_ASM
1799};
1800
474eccc6
ILT
1801struct cgraph_order_sort
1802{
24b97832 1803 enum cgraph_order_sort_kind kind;
474eccc6
ILT
1804 union
1805 {
1806 struct cgraph_node *f;
8a4a83ed 1807 struct varpool_node *v;
474eccc6
ILT
1808 struct cgraph_asm_node *a;
1809 } u;
1810};
1811
1812/* Output all functions, variables, and asm statements in the order
1813 according to their order fields, which is the order in which they
1814 appeared in the file. This implements -fno-toplevel-reorder. In
1815 this mode we may output functions and variables which don't really
1816 need to be output. */
1817
1818static void
1819cgraph_output_in_order (void)
1820{
1821 int max;
474eccc6
ILT
1822 struct cgraph_order_sort *nodes;
1823 int i;
1824 struct cgraph_node *pf;
8a4a83ed 1825 struct varpool_node *pv;
474eccc6
ILT
1826 struct cgraph_asm_node *pa;
1827
1828 max = cgraph_order;
33283dad 1829 nodes = XCNEWVEC (struct cgraph_order_sort, max);
474eccc6 1830
8a4a83ed 1831 varpool_analyze_pending_decls ();
474eccc6
ILT
1832
1833 for (pf = cgraph_nodes; pf; pf = pf->next)
1834 {
39e2db00 1835 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
474eccc6
ILT
1836 {
1837 i = pf->order;
1838 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1839 nodes[i].kind = ORDER_FUNCTION;
1840 nodes[i].u.f = pf;
1841 }
1842 }
1843
8a4a83ed 1844 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
474eccc6
ILT
1845 {
1846 i = pv->order;
1847 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1848 nodes[i].kind = ORDER_VAR;
1849 nodes[i].u.v = pv;
1850 }
1851
1852 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1853 {
1854 i = pa->order;
1855 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1856 nodes[i].kind = ORDER_ASM;
1857 nodes[i].u.a = pa;
1858 }
474eccc6 1859
7386e3ee
JH
1860 /* In toplevel reorder mode we output all statics; mark them as needed. */
1861 for (i = 0; i < max; ++i)
1862 {
1863 if (nodes[i].kind == ORDER_VAR)
1864 {
1865 varpool_mark_needed_node (nodes[i].u.v);
1866 }
1867 }
1868 varpool_empty_needed_queue ();
1869
7fece979
JJ
1870 for (i = 0; i < max; ++i)
1871 if (nodes[i].kind == ORDER_VAR)
1872 varpool_finalize_named_section_flags (nodes[i].u.v);
1873
474eccc6
ILT
1874 for (i = 0; i < max; ++i)
1875 {
1876 switch (nodes[i].kind)
1877 {
1878 case ORDER_FUNCTION:
257eb6e3 1879 nodes[i].u.f->process = 0;
474eccc6
ILT
1880 cgraph_expand_function (nodes[i].u.f);
1881 break;
1882
1883 case ORDER_VAR:
8a4a83ed 1884 varpool_assemble_decl (nodes[i].u.v);
474eccc6
ILT
1885 break;
1886
1887 case ORDER_ASM:
1888 assemble_asm (nodes[i].u.a->asm_str);
1889 break;
1890
1891 case ORDER_UNDEFINED:
1892 break;
1893
1894 default:
1895 gcc_unreachable ();
1896 }
1897 }
e7b9eb2c
ILT
1898
1899 cgraph_asm_nodes = NULL;
33283dad 1900 free (nodes);
474eccc6
ILT
1901}
1902
18c6ada9
JH
1903/* Return true when function body of DECL still needs to be kept around
1904 for later re-use. */
1905bool
85ad2ef5 1906cgraph_preserve_function_body_p (struct cgraph_node *node)
18c6ada9 1907{
c37f4ba4 1908 gcc_assert (cgraph_global_info_ready);
39e2db00 1909 gcc_assert (!node->alias && !node->thunk.thunk_p);
85ad2ef5 1910
18c6ada9 1911 /* Look if there is any clone around. */
9187e02d
JH
1912 if (node->clones)
1913 return true;
18c6ada9
JH
1914 return false;
1915}
1916
ef330312
PB
1917static void
1918ipa_passes (void)
1919{
db2960f4 1920 set_cfun (NULL);
04b201a2 1921 current_function_decl = NULL;
726a989a 1922 gimple_register_cfg_hooks ();
ef330312 1923 bitmap_obstack_initialize (NULL);
b20996ff 1924
090fa0ab
GF
1925 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1926
b20996ff 1927 if (!in_lto_p)
0430f80c
RG
1928 {
1929 execute_ipa_pass_list (all_small_ipa_passes);
1930 if (seen_error ())
1931 return;
1932 }
3baf459d 1933
d7f09764
DN
1934 /* If pass_all_early_optimizations was not scheduled, the state of
1935 the cgraph will not be properly updated. Update it now. */
1936 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1937 cgraph_state = CGRAPH_STATE_IPA_SSA;
3baf459d 1938
d7f09764
DN
1939 if (!in_lto_p)
1940 {
1941 /* Generate coverage variables and constructors. */
1942 coverage_finish ();
1943
1944 /* Process new functions added. */
1945 set_cfun (NULL);
1946 current_function_decl = NULL;
1947 cgraph_process_new_functions ();
d7f09764 1948
090fa0ab
GF
1949 execute_ipa_summary_passes
1950 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
fb3f88cc 1951 }
c082f9f3
SB
1952
1953 /* Some targets need to handle LTO assembler output specially. */
1954 if (flag_generate_lto)
1955 targetm.asm_out.lto_start ();
1956
d7f09764
DN
1957 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1958
1959 if (!in_lto_p)
1960 ipa_write_summaries ();
1961
c082f9f3
SB
1962 if (flag_generate_lto)
1963 targetm.asm_out.lto_end ();
1964
fb3f88cc
JH
1965 if (!flag_ltrans)
1966 execute_ipa_pass_list (all_regular_ipa_passes);
090fa0ab 1967 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
3baf459d 1968
ef330312
PB
1969 bitmap_obstack_release (NULL);
1970}
1971
4537ec0c 1972
1c4a429a
JH
1973/* Perform simple optimizations based on callgraph. */
1974
d7f09764 1975void
db0e878d 1976cgraph_optimize (void)
1c4a429a 1977{
1da2ed5f 1978 if (seen_error ())
413803d3
VR
1979 return;
1980
18c6ada9
JH
1981#ifdef ENABLE_CHECKING
1982 verify_cgraph ();
1983#endif
7be82279 1984
cd9c7bd2
JH
1985 /* Frontend may output common variables after the unit has been finalized.
1986 It is safe to deal with them here as they are always zero initialized. */
8a4a83ed 1987 varpool_analyze_pending_decls ();
857e7259 1988
a194aa56 1989 timevar_push (TV_CGRAPHOPT);
a5573239
JH
1990 if (pre_ipa_mem_report)
1991 {
1992 fprintf (stderr, "Memory consumption before IPA\n");
1993 dump_memory_report (false);
1994 }
b58b1157 1995 if (!quiet_flag)
a418679d 1996 fprintf (stderr, "Performing interprocedural optimizations\n");
f45e0ad1 1997 cgraph_state = CGRAPH_STATE_IPA;
f30cfcb1 1998
7e2fe9d8 1999 /* Don't run the IPA passes if there was any error or sorry messages. */
1da2ed5f 2000 if (!seen_error ())
7e2fe9d8
AP
2001 ipa_passes ();
2002
4537ec0c 2003 /* Do nothing else if any IPA pass found errors. */
1da2ed5f 2004 if (seen_error ())
9ba0399e
RH
2005 {
2006 timevar_pop (TV_CGRAPHOPT);
2007 return;
2008 }
4537ec0c 2009
6b02a499
JH
2010 /* This pass remove bodies of extern inline functions we never inlined.
2011 Do this later so other IPA passes see what is really going on. */
2012 cgraph_remove_unreachable_nodes (false, dump_file);
dafc5b82 2013 cgraph_global_info_ready = true;
a194aa56
JH
2014 if (cgraph_dump_file)
2015 {
7d82fe7c 2016 fprintf (cgraph_dump_file, "Optimized ");
a194aa56 2017 dump_cgraph (cgraph_dump_file);
cd9c7bd2 2018 dump_varpool (cgraph_dump_file);
a194aa56 2019 }
a5573239
JH
2020 if (post_ipa_mem_report)
2021 {
7fa982e5 2022 fprintf (stderr, "Memory consumption after IPA\n");
a5573239
JH
2023 dump_memory_report (false);
2024 }
a194aa56 2025 timevar_pop (TV_CGRAPHOPT);
1c4a429a 2026
b58b1157 2027 /* Output everything. */
3df9609a 2028 (*debug_hooks->assembly_start) ();
7d82fe7c
KC
2029 if (!quiet_flag)
2030 fprintf (stderr, "Assembling functions:\n");
18c6ada9
JH
2031#ifdef ENABLE_CHECKING
2032 verify_cgraph ();
2033#endif
474eccc6 2034
9187e02d 2035 cgraph_materialize_all_clones ();
6674a6ce 2036 cgraph_mark_functions_to_output ();
cd9c7bd2 2037
f45e0ad1 2038 cgraph_state = CGRAPH_STATE_EXPANSION;
474eccc6
ILT
2039 if (!flag_toplevel_reorder)
2040 cgraph_output_in_order ();
2041 else
2042 {
2043 cgraph_output_pending_asms ();
2044
2045 cgraph_expand_all_functions ();
8a4a83ed 2046 varpool_remove_unreferenced_decls ();
474eccc6 2047
8a4a83ed 2048 varpool_assemble_pending_decls ();
474eccc6 2049 }
f45e0ad1
JH
2050 cgraph_process_new_functions ();
2051 cgraph_state = CGRAPH_STATE_FINISHED;
cd9c7bd2 2052
a194aa56
JH
2053 if (cgraph_dump_file)
2054 {
7d82fe7c 2055 fprintf (cgraph_dump_file, "\nFinal ");
a194aa56 2056 dump_cgraph (cgraph_dump_file);
df7705b1 2057 dump_varpool (cgraph_dump_file);
a194aa56 2058 }
18c6ada9
JH
2059#ifdef ENABLE_CHECKING
2060 verify_cgraph ();
6de9cd9a
DN
2061 /* Double check that all inline clones are gone and that all
2062 function bodies have been released from memory. */
1da2ed5f 2063 if (!seen_error ())
6de9cd9a
DN
2064 {
2065 struct cgraph_node *node;
2066 bool error_found = false;
2067
2068 for (node = cgraph_nodes; node; node = node->next)
2069 if (node->analyzed
2070 && (node->global.inlined_to
39ecc018 2071 || gimple_has_body_p (node->decl)))
6de9cd9a
DN
2072 {
2073 error_found = true;
2074 dump_cgraph_node (stderr, node);
c22cacf3 2075 }
6de9cd9a 2076 if (error_found)
f30cfcb1 2077 internal_error ("nodes with unreleased memory found");
6de9cd9a 2078 }
18c6ada9 2079#endif
1c4a429a 2080}
4537ec0c 2081
9b3e897d
PB
2082void
2083init_cgraph (void)
2084{
a05541a9
JH
2085 if (!cgraph_dump_file)
2086 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
9b3e897d 2087}
57fb5341 2088
c22cacf3 2089/* The edges representing the callers of the NEW_VERSION node were
57fb5341
RL
2090 fixed by cgraph_function_versioning (), now the call_expr in their
2091 respective tree code should be updated to call the NEW_VERSION. */
2092
2093static void
2094update_call_expr (struct cgraph_node *new_version)
2095{
2096 struct cgraph_edge *e;
2097
2098 gcc_assert (new_version);
726a989a
RB
2099
2100 /* Update the call expr on the edges to call the new version. */
57fb5341 2101 for (e = new_version->callers; e; e = e->next_caller)
c0ab1df3
AP
2102 {
2103 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2104 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
1d65f45c 2105 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
c0ab1df3 2106 }
57fb5341
RL
2107}
2108
2109
2110/* Create a new cgraph node which is the new version of
2111 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2112 edges which should be redirected to point to
2113 NEW_VERSION. ALL the callees edges of OLD_VERSION
2114 are cloned to the new version node. Return the new
91382288
JH
2115 version node.
2116
2117 If non-NULL BLOCK_TO_COPY determine what basic blocks
2118 was copied to prevent duplications of calls that are dead
2119 in the clone. */
57fb5341
RL
2120
2121static struct cgraph_node *
2122cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
b2c0ad40 2123 tree new_decl,
91382288
JH
2124 VEC(cgraph_edge_p,heap) *redirect_callers,
2125 bitmap bbs_to_copy)
2126 {
57fb5341 2127 struct cgraph_node *new_version;
ae2b0888 2128 struct cgraph_edge *e;
57fb5341
RL
2129 unsigned i;
2130
2131 gcc_assert (old_version);
c22cacf3 2132
a358e188 2133 new_version = cgraph_create_node (new_decl);
57fb5341
RL
2134
2135 new_version->analyzed = true;
2136 new_version->local = old_version->local;
036546e5
JH
2137 new_version->local.externally_visible = false;
2138 new_version->local.local = true;
57fb5341 2139 new_version->global = old_version->global;
8cf9feca 2140 new_version->rtl = old_version->rtl;
57fb5341
RL
2141 new_version->reachable = true;
2142 new_version->count = old_version->count;
2143
036546e5 2144 for (e = old_version->callees; e; e=e->next_callee)
91382288
JH
2145 if (!bbs_to_copy
2146 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2147 cgraph_clone_edge (e, new_version, e->call_stmt,
2148 e->lto_stmt_uid, REG_BR_PROB_BASE,
2149 CGRAPH_FREQ_BASE,
898b8927 2150 true);
036546e5 2151 for (e = old_version->indirect_calls; e; e=e->next_callee)
91382288
JH
2152 if (!bbs_to_copy
2153 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2154 cgraph_clone_edge (e, new_version, e->call_stmt,
2155 e->lto_stmt_uid, REG_BR_PROB_BASE,
2156 CGRAPH_FREQ_BASE,
898b8927 2157 true);
ac47786e 2158 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
b2c0ad40
KH
2159 {
2160 /* Redirect calls to the old version node to point to its new
2161 version. */
2162 cgraph_redirect_edge_callee (e, new_version);
2163 }
57fb5341
RL
2164
2165 return new_version;
2166 }
2167
2168 /* Perform function versioning.
c22cacf3 2169 Function versioning includes copying of the tree and
57fb5341
RL
2170 a callgraph update (creating a new cgraph node and updating
2171 its callees and callers).
2172
2173 REDIRECT_CALLERS varray includes the edges to be redirected
2174 to the new version.
2175
2176 TREE_MAP is a mapping of tree nodes we want to replace with
2177 new ones (according to results of prior analysis).
2178 OLD_VERSION_NODE is the node that is versioned.
b8698a0f 2179 It returns the new version's cgraph node.
91382288
JH
2180 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2181 from new version.
2182 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2183 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
57fb5341
RL
2184
2185struct cgraph_node *
2186cgraph_function_versioning (struct cgraph_node *old_version_node,
b2c0ad40 2187 VEC(cgraph_edge_p,heap) *redirect_callers,
9187e02d 2188 VEC (ipa_replace_map_p,gc)* tree_map,
036546e5 2189 bitmap args_to_skip,
91382288
JH
2190 bitmap bbs_to_copy,
2191 basic_block new_entry_block,
036546e5 2192 const char *clone_name)
57fb5341
RL
2193{
2194 tree old_decl = old_version_node->decl;
2195 struct cgraph_node *new_version_node = NULL;
2196 tree new_decl;
2197
2198 if (!tree_versionable_function_p (old_decl))
2199 return NULL;
2200
61e03ffc
JH
2201 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
2202
57fb5341
RL
2203 /* Make a new FUNCTION_DECL tree node for the
2204 new version. */
c6f7cfc1
JH
2205 if (!args_to_skip)
2206 new_decl = copy_node (old_decl);
2207 else
2208 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
57fb5341 2209
9990e02a
JH
2210 /* Generate a new name for the new version. */
2211 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2212 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2213 SET_DECL_RTL (new_decl, NULL);
2214
57fb5341
RL
2215 /* Create the new version's call-graph node.
2216 and update the edges of the new node. */
2217 new_version_node =
2218 cgraph_copy_node_for_versioning (old_version_node, new_decl,
91382288 2219 redirect_callers, bbs_to_copy);
57fb5341
RL
2220
2221 /* Copy the OLD_VERSION_NODE function tree to the new version. */
91382288
JH
2222 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2223 bbs_to_copy, new_entry_block);
57fb5341 2224
c22cacf3 2225 /* Update the new version's properties.
c0ab1df3
AP
2226 Make The new version visible only within this translation unit. Make sure
2227 that is not weak also.
c22cacf3 2228 ??? We cannot use COMDAT linkage because there is no
57fb5341 2229 ABI support for this. */
715a4e08 2230 cgraph_make_decl_local (new_version_node->decl);
e6e1c050 2231 DECL_VIRTUAL_P (new_version_node->decl) = 0;
57fb5341
RL
2232 new_version_node->local.externally_visible = 0;
2233 new_version_node->local.local = 1;
2234 new_version_node->lowered = true;
e6e1c050 2235
c0ab1df3
AP
2236 /* Update the call_expr on the edges to call the new version node. */
2237 update_call_expr (new_version_node);
b8698a0f 2238
129a37fc 2239 cgraph_call_function_insertion_hooks (new_version_node);
57fb5341
RL
2240 return new_version_node;
2241}
ea99e0be 2242
9187e02d
JH
2243/* Given virtual clone, turn it into actual clone. */
2244static void
2245cgraph_materialize_clone (struct cgraph_node *node)
2246{
2247 bitmap_obstack_initialize (NULL);
e466e2ce
JH
2248 node->former_clone_of = node->clone_of->decl;
2249 if (node->clone_of->former_clone_of)
2250 node->former_clone_of = node->clone_of->former_clone_of;
9187e02d
JH
2251 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2252 tree_function_versioning (node->clone_of->decl, node->decl,
2253 node->clone.tree_map, true,
91382288 2254 node->clone.args_to_skip, NULL, NULL);
08ad1d6d
JH
2255 if (cgraph_dump_file)
2256 {
2257 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2258 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2259 }
9187e02d
JH
2260
2261 /* Function is no longer clone. */
2262 if (node->next_sibling_clone)
2263 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2264 if (node->prev_sibling_clone)
2265 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2266 else
2267 node->clone_of->clones = node->next_sibling_clone;
2268 node->next_sibling_clone = NULL;
2269 node->prev_sibling_clone = NULL;
0e3776db 2270 if (!node->clone_of->analyzed && !node->clone_of->clones)
f0c418dc
JH
2271 {
2272 cgraph_release_function_body (node->clone_of);
2273 cgraph_node_remove_callees (node->clone_of);
2274 ipa_remove_all_references (&node->clone_of->ref_list);
2275 }
9187e02d
JH
2276 node->clone_of = NULL;
2277 bitmap_obstack_release (NULL);
2278}
2279
8132a837
MJ
2280/* If necessary, change the function declaration in the call statement
2281 associated with E so that it corresponds to the edge callee. */
2282
2283gimple
2284cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2285{
2286 tree decl = gimple_call_fndecl (e->call_stmt);
2287 gimple new_stmt;
ceeffab0
MJ
2288 gimple_stmt_iterator gsi;
2289 bool gsi_computed = false;
437ffe7b
JH
2290#ifdef ENABLE_CHECKING
2291 struct cgraph_node *node;
2292#endif
8132a837 2293
3949c4a7
MJ
2294 if (e->indirect_unknown_callee
2295 || decl == e->callee->decl
8132a837 2296 /* Don't update call from same body alias to the real function. */
3949c4a7 2297 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
8132a837
MJ
2298 return e->call_stmt;
2299
437ffe7b 2300#ifdef ENABLE_CHECKING
3949c4a7
MJ
2301 if (decl)
2302 {
2303 node = cgraph_get_node (decl);
2304 gcc_assert (!node || !node->clone.combined_args_to_skip);
2305 }
437ffe7b 2306#endif
e466e2ce 2307
8132a837
MJ
2308 if (cgraph_dump_file)
2309 {
2310 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2311 cgraph_node_name (e->caller), e->caller->uid,
2312 cgraph_node_name (e->callee), e->callee->uid);
2313 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e466e2ce 2314 if (e->callee->clone.combined_args_to_skip)
8d2adc24
EB
2315 {
2316 fprintf (cgraph_dump_file, " combined args to skip: ");
2317 dump_bitmap (cgraph_dump_file,
2318 e->callee->clone.combined_args_to_skip);
e466e2ce 2319 }
8132a837
MJ
2320 }
2321
ce47fda3
MJ
2322 if (e->indirect_info &&
2323 e->indirect_info->thunk_delta != 0
ceeffab0
MJ
2324 && (!e->callee->clone.combined_args_to_skip
2325 || !bitmap_bit_p (e->callee->clone.combined_args_to_skip, 0)))
2326 {
2327 if (cgraph_dump_file)
ce47fda3
MJ
2328 fprintf (cgraph_dump_file, " Thunk delta is "
2329 HOST_WIDE_INT_PRINT_DEC "\n", e->indirect_info->thunk_delta);
ceeffab0
MJ
2330 gsi = gsi_for_stmt (e->call_stmt);
2331 gsi_computed = true;
ce47fda3
MJ
2332 gimple_adjust_this_by_delta (&gsi,
2333 build_int_cst (sizetype,
2334 e->indirect_info->thunk_delta));
2335 e->indirect_info->thunk_delta = 0;
ceeffab0
MJ
2336 }
2337
8132a837 2338 if (e->callee->clone.combined_args_to_skip)
8d2adc24 2339 {
1b7d2dd1 2340 int lp_nr;
8d2adc24
EB
2341
2342 new_stmt
2343 = gimple_call_copy_skip_args (e->call_stmt,
2344 e->callee->clone.combined_args_to_skip);
3d113394 2345 gimple_call_set_fndecl (new_stmt, e->callee->decl);
8d2adc24
EB
2346
2347 if (gimple_vdef (new_stmt)
2348 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2349 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2350
ceeffab0
MJ
2351 if (!gsi_computed)
2352 gsi = gsi_for_stmt (e->call_stmt);
72351fa3 2353 gsi_replace (&gsi, new_stmt, false);
1b7d2dd1
RG
2354 /* We need to defer cleaning EH info on the new statement to
2355 fixup-cfg. We may not have dominator information at this point
2356 and thus would end up with unreachable blocks and have no way
2357 to communicate that we need to run CFG cleanup then. */
2358 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2359 if (lp_nr != 0)
2360 {
2361 remove_stmt_from_eh_lp (e->call_stmt);
2362 add_stmt_to_eh_lp (new_stmt, lp_nr);
2363 }
8d2adc24 2364 }
8132a837 2365 else
3d113394
RG
2366 {
2367 new_stmt = e->call_stmt;
2368 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2369 update_stmt (new_stmt);
2370 }
8132a837 2371
8132a837
MJ
2372 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2373
2374 if (cgraph_dump_file)
2375 {
2376 fprintf (cgraph_dump_file, " updated to:");
2377 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2378 }
2379 return new_stmt;
2380}
2381
9187e02d 2382/* Once all functions from compilation unit are in memory, produce all clones
8132a837
MJ
2383 and update all calls. We might also do this on demand if we don't want to
2384 bring all functions to memory prior compilation, but current WHOPR
2385 implementation does that and it is is bit easier to keep everything right in
2386 this order. */
9187e02d
JH
2387void
2388cgraph_materialize_all_clones (void)
2389{
2390 struct cgraph_node *node;
2391 bool stabilized = false;
2392
2393 if (cgraph_dump_file)
2394 fprintf (cgraph_dump_file, "Materializing clones\n");
2395#ifdef ENABLE_CHECKING
2396 verify_cgraph ();
2397#endif
2398
2399 /* We can also do topological order, but number of iterations should be
2400 bounded by number of IPA passes since single IPA pass is probably not
2401 going to create clones of clones it created itself. */
2402 while (!stabilized)
2403 {
2404 stabilized = true;
2405 for (node = cgraph_nodes; node; node = node->next)
2406 {
2407 if (node->clone_of && node->decl != node->clone_of->decl
2408 && !gimple_has_body_p (node->decl))
2409 {
2410 if (gimple_has_body_p (node->clone_of->decl))
2411 {
2412 if (cgraph_dump_file)
08ad1d6d 2413 {
61502ca8 2414 fprintf (cgraph_dump_file, "cloning %s to %s\n",
08ad1d6d
JH
2415 cgraph_node_name (node->clone_of),
2416 cgraph_node_name (node));
2417 if (node->clone.tree_map)
2418 {
2419 unsigned int i;
2420 fprintf (cgraph_dump_file, " replace map: ");
2421 for (i = 0; i < VEC_length (ipa_replace_map_p,
2422 node->clone.tree_map);
2423 i++)
2424 {
2425 struct ipa_replace_map *replace_info;
2426 replace_info = VEC_index (ipa_replace_map_p,
2427 node->clone.tree_map,
2428 i);
2429 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2430 fprintf (cgraph_dump_file, " -> ");
2431 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2432 fprintf (cgraph_dump_file, "%s%s;",
2433 replace_info->replace_p ? "(replace)":"",
2434 replace_info->ref_p ? "(ref)":"");
2435 }
2436 fprintf (cgraph_dump_file, "\n");
2437 }
2438 if (node->clone.args_to_skip)
2439 {
2440 fprintf (cgraph_dump_file, " args_to_skip: ");
2441 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2442 }
2443 if (node->clone.args_to_skip)
2444 {
2445 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2446 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2447 }
2448 }
9187e02d 2449 cgraph_materialize_clone (node);
36576655 2450 stabilized = false;
9187e02d 2451 }
9187e02d
JH
2452 }
2453 }
2454 }
47cb0d7d
JH
2455 for (node = cgraph_nodes; node; node = node->next)
2456 if (!node->analyzed && node->callees)
2457 cgraph_node_remove_callees (node);
8132a837
MJ
2458 if (cgraph_dump_file)
2459 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
9a23acef
JH
2460#ifdef ENABLE_CHECKING
2461 verify_cgraph ();
2462#endif
9187e02d
JH
2463 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2464}
2465
7be82279 2466#include "gt-cgraphunit.h"