]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
2011-06-15 Richard Guenther <rguenther@suse.de>
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
cd6bca02 1/* Callgraph based interprocedural optimizations.
aed6e608 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
ae01b312 4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
ae01b312 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
cd6bca02 23 few basic interprocedural optimizers.
b0cdf642 24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
b326746d 35 (There is one exception needed for implementing GCC extern inline
36 function.)
b0cdf642 37
1d416bd7 38 - varpool_finalize_variable
b0cdf642 39
7bd28bba 40 This function has same behavior as the above but is used for static
b0cdf642 41 variables.
42
43 - cgraph_finalize_compilation_unit
44
b326746d 45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
b0cdf642 47
851d9296 48 In the call-graph construction and local function analysis takes
49 place here. Bodies of unreachable functions are released to
50 conserve memory usage.
b0cdf642 51
b326746d 52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
b0cdf642 54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
b0cdf642 62 - cgraph_mark_needed_node
1d416bd7 63 - varpool_mark_needed_node
b0cdf642 64
b326746d 65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
ccd2f3d1 69 used by C++ frontend to explicitly mark the keyed methods.
b0cdf642 70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
6329636b 80 Analyzing of all functions is deferred
b0cdf642 81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
ca67a72b 91 The intra-procedural information is produced and its existence
b0cdf642 92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
6329636b 106 where reference has been optimized out. */
121f3051 107
acc70efa 108
ae01b312 109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
b5530559 114#include "rtl.h"
acc70efa 115#include "tree-flow.h"
ae01b312 116#include "tree-inline.h"
117#include "langhooks.h"
c6224531 118#include "pointer-set.h"
ae01b312 119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
80a85d8a 125#include "diagnostic.h"
ce084dfc 126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
f79b6507 128#include "timevar.h"
d7c6d889 129#include "params.h"
130#include "fibheap.h"
611e5405 131#include "intl.h"
b69eb0ff 132#include "function.h"
b5d36404 133#include "ipa-prop.h"
75a70cf9 134#include "gimple.h"
135#include "tree-iterator.h"
f1e2a033 136#include "tree-pass.h"
bfec3452 137#include "tree-dump.h"
c1dcd13c 138#include "output.h"
9ed5b1f5 139#include "coverage.h"
c9036234 140#include "plugin.h"
a41f2a28 141#include "ipa-inline.h"
7771d558 142#include "ipa-utils.h"
a0605d65 143#include "lto-streamer.h"
d7c6d889 144
a6868229 145static void cgraph_expand_all_functions (void);
d9d9733a 146static void cgraph_mark_functions_to_output (void);
147static void cgraph_expand_function (struct cgraph_node *);
f788fff2 148static void cgraph_output_pending_asms (void);
25bb88de 149
ecb08119 150FILE *cgraph_dump_file;
121f3051 151
28454517 152/* Used for vtable lookup in thunk adjusting. */
153static GTY (()) tree vtable_entry_type;
154
2c0b522d 155/* Determine if function DECL is needed. That is, visible to something
156 either outside this translation unit, something magic in the system
6329636b 157 configury. */
2c0b522d 158
7bfefa9d 159bool
160cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
2c0b522d 161{
3f82b628 162 /* If the user told us it is used, then it must be so. */
05806473 163 if (node->local.externally_visible)
164 return true;
165
3f82b628 166 /* ??? If the assembler name is set by hand, it is possible to assemble
167 the name later after finalizing the function and the fact is noticed
168 in assemble_name then. This is arguably a bug. */
169 if (DECL_ASSEMBLER_NAME_SET_P (decl)
c70f46b0 170 && (!node->thunk.thunk_p && !node->same_body_alias)
3f82b628 171 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
172 return true;
173
55680bef 174 /* With -fkeep-inline-functions we are keeping all inline functions except
175 for extern inline ones. */
176 if (flag_keep_inline_functions
177 && DECL_DECLARED_INLINE_P (decl)
316ef6d8 178 && !DECL_EXTERNAL (decl)
cbd7f5a0 179 && !DECL_DISREGARD_INLINE_LIMITS (decl))
55680bef 180 return true;
181
2c0b522d 182 /* If we decided it was needed before, but at the time we didn't have
183 the body of the function available, then it's still needed. We have
184 to go back and re-check its dependencies now. */
185 if (node->needed)
186 return true;
187
188 /* Externally visible functions must be output. The exception is
a0c938f0 189 COMDAT functions that must be output only when they are needed.
8baa9d15 190
191 When not optimizing, also output the static functions. (see
95da6220 192 PR24561), but don't do so for always_inline functions, functions
0f9238c0 193 declared inline and nested functions. These were optimized out
d3d410e1 194 in the original implementation and it is unclear whether we want
554f2707 195 to change the behavior here. */
bba7ddf8 196 if (((TREE_PUBLIC (decl)
0f9238c0 197 || (!optimize
cbd7f5a0 198 && !DECL_DISREGARD_INLINE_LIMITS (decl)
d3d410e1 199 && !DECL_DECLARED_INLINE_P (decl)
0f9238c0 200 && !(DECL_CONTEXT (decl)
201 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
59dd4830 202 && !flag_whole_program
cbcf2791 203 && !flag_lto)
62eec3b4 204 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 205 return true;
206
2c0b522d 207 return false;
208}
209
bdc40eb8 210/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
523c1122 211 functions into callgraph in a way so they look like ordinary reachable
212 functions inserted into callgraph already at construction time. */
213
214bool
215cgraph_process_new_functions (void)
216{
217 bool output = false;
218 tree fndecl;
219 struct cgraph_node *node;
220
0cddb138 221 varpool_analyze_pending_decls ();
523c1122 222 /* Note that this queue may grow as its being processed, as the new
223 functions may generate new ones. */
224 while (cgraph_new_nodes)
225 {
226 node = cgraph_new_nodes;
227 fndecl = node->decl;
228 cgraph_new_nodes = cgraph_new_nodes->next_needed;
229 switch (cgraph_state)
230 {
231 case CGRAPH_STATE_CONSTRUCTION:
232 /* At construction time we just need to finalize function and move
233 it into reachable functions list. */
234
235 node->next_needed = NULL;
236 cgraph_finalize_function (fndecl, false);
237 cgraph_mark_reachable_node (node);
238 output = true;
4f7a1122 239 cgraph_call_function_insertion_hooks (node);
523c1122 240 break;
241
242 case CGRAPH_STATE_IPA:
f517b36e 243 case CGRAPH_STATE_IPA_SSA:
523c1122 244 /* When IPA optimization already started, do all essential
245 transformations that has been already performed on the whole
246 cgraph but not on this function. */
247
75a70cf9 248 gimple_register_cfg_hooks ();
523c1122 249 if (!node->analyzed)
250 cgraph_analyze_function (node);
251 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
252 current_function_decl = fndecl;
f517b36e 253 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
254 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
255 /* When not optimizing, be sure we run early local passes anyway
256 to expand OMP. */
257 || !optimize)
20099e35 258 execute_pass_list (pass_early_local_passes.pass.sub);
649597af 259 else
a41f2a28 260 compute_inline_parameters (node, true);
523c1122 261 free_dominance_info (CDI_POST_DOMINATORS);
262 free_dominance_info (CDI_DOMINATORS);
263 pop_cfun ();
264 current_function_decl = NULL;
4f7a1122 265 cgraph_call_function_insertion_hooks (node);
523c1122 266 break;
267
268 case CGRAPH_STATE_EXPANSION:
269 /* Functions created during expansion shall be compiled
270 directly. */
09fc9532 271 node->process = 0;
4f7a1122 272 cgraph_call_function_insertion_hooks (node);
523c1122 273 cgraph_expand_function (node);
274 break;
275
276 default:
277 gcc_unreachable ();
278 break;
279 }
0cddb138 280 varpool_analyze_pending_decls ();
523c1122 281 }
282 return output;
283}
284
9b8fb23a 285/* As an GCC extension we allow redefinition of the function. The
286 semantics when both copies of bodies differ is not well defined.
287 We replace the old body with new body so in unit at a time mode
288 we always use new body, while in normal mode we may end up with
289 old body inlined into some functions and new body expanded and
290 inlined in others.
291
292 ??? It may make more sense to use one body for inlining and other
293 body for expanding the function but this is difficult to do. */
294
295static void
296cgraph_reset_node (struct cgraph_node *node)
297{
09fc9532 298 /* If node->process is set, then we have already begun whole-unit analysis.
6329636b 299 This is *not* testing for whether we've already emitted the function.
300 That case can be sort-of legitimately seen with real function redefinition
301 errors. I would argue that the front end should never present us with
302 such a case, but don't enforce that for now. */
09fc9532 303 gcc_assert (!node->process);
9b8fb23a 304
305 /* Reset our data structures so we can analyze the function again. */
306 memset (&node->local, 0, sizeof (node->local));
307 memset (&node->global, 0, sizeof (node->global));
308 memset (&node->rtl, 0, sizeof (node->rtl));
309 node->analyzed = false;
9b8fb23a 310 node->local.finalized = false;
311
9b8fb23a 312 cgraph_node_remove_callees (node);
9b8fb23a 313}
c08871a9 314
1e8e9920 315static void
316cgraph_lower_function (struct cgraph_node *node)
317{
318 if (node->lowered)
319 return;
bfec3452 320
321 if (node->nested)
322 lower_nested_functions (node->decl);
323 gcc_assert (!node->nested);
324
1e8e9920 325 tree_lowering_passes (node->decl);
326 node->lowered = true;
327}
328
28df663b 329/* DECL has been parsed. Take it, queue it, compile it at the whim of the
330 logic in effect. If NESTED is true, then our caller cannot stand to have
331 the garbage collector run at the moment. We would need to either create
332 a new GC context, or just not compile right now. */
ae01b312 333
334void
28df663b 335cgraph_finalize_function (tree decl, bool nested)
ae01b312 336{
5a90471f 337 struct cgraph_node *node = cgraph_get_create_node (decl);
ae01b312 338
c08871a9 339 if (node->local.finalized)
443089c1 340 {
341 cgraph_reset_node (node);
342 node->local.redefined_extern_inline = true;
343 }
28df663b 344
c08871a9 345 notice_global_symbol (decl);
79bb87b4 346 node->local.finalized = true;
e27482aa 347 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
ae01b312 348
7bfefa9d 349 if (cgraph_decide_is_function_needed (node, decl))
2c0b522d 350 cgraph_mark_needed_node (node);
351
ecda6e51 352 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 353 level unit, we need to be conservative about possible entry points
354 there. */
1e3aebec 355 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
356 || DECL_STATIC_CONSTRUCTOR (decl)
d050bafd 357 || DECL_STATIC_DESTRUCTOR (decl)
358 /* COMDAT virtual functions may be referenced by vtable from
0a10fd82 359 other compilation unit. Still we want to devirtualize calls
d050bafd 360 to those so we need to analyze them.
361 FIXME: We should introduce may edges for this purpose and update
362 their handling in unreachable function removal and inliner too. */
91bf9d9a 363 || (DECL_VIRTUAL_P (decl)
364 && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
3f82b628 365 cgraph_mark_reachable_node (node);
366
2c0b522d 367 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 368 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 369 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 370
b69eb0ff 371 /* Possibly warn about unused parameters. */
372 if (warn_unused_parameter)
373 do_warn_unused_parameter (decl);
6329636b 374
375 if (!nested)
376 ggc_collect ();
ae01b312 377}
378
0da03d11 379/* C99 extern inline keywords allow changing of declaration after function
380 has been finalized. We need to re-decide if we want to mark the function as
381 needed then. */
382
383void
384cgraph_mark_if_needed (tree decl)
385{
fd6a3c41 386 struct cgraph_node *node = cgraph_get_node (decl);
7bfefa9d 387 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
0da03d11 388 cgraph_mark_needed_node (node);
389}
390
ccf4ab6b 391/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
392static bool
393clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
394{
c70f46b0 395 node = cgraph_function_or_thunk_node (node, NULL);
396 node2 = cgraph_function_or_thunk_node (node2, NULL);
ccf4ab6b 397 while (node != node2 && node2)
398 node2 = node2->clone_of;
399 return node2 != NULL;
400}
401
1a036a3b 402/* Verify edge E count and frequency. */
403
404static bool
405verify_edge_count_and_frequency (struct cgraph_edge *e)
406{
407 bool error_found = false;
408 if (e->count < 0)
409 {
410 error ("caller edge count is negative");
411 error_found = true;
412 }
413 if (e->frequency < 0)
414 {
415 error ("caller edge frequency is negative");
416 error_found = true;
417 }
418 if (e->frequency > CGRAPH_FREQ_MAX)
419 {
420 error ("caller edge frequency is too large");
421 error_found = true;
422 }
423 if (gimple_has_body_p (e->caller->decl)
424 && !e->caller->global.inlined_to
8bae3ea4 425 /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
426 Remove this once edges are actualy removed from the function at that time. */
427 && (e->frequency
428 || (inline_edge_summary_vec
429 && !inline_edge_summary (e)->predicate))
1a036a3b 430 && (e->frequency
431 != compute_call_stmt_bb_frequency (e->caller->decl,
432 gimple_bb (e->call_stmt))))
433 {
0a10fd82 434 error ("caller edge frequency %i does not match BB frequency %i",
1a036a3b 435 e->frequency,
436 compute_call_stmt_bb_frequency (e->caller->decl,
437 gimple_bb (e->call_stmt)));
438 error_found = true;
439 }
440 return error_found;
441}
442
7b29dd2f 443/* Switch to THIS_CFUN if needed and print STMT to stderr. */
444static void
445cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
446{
447 /* debug_gimple_stmt needs correct cfun */
448 if (cfun != this_cfun)
449 set_cfun (this_cfun);
450 debug_gimple_stmt (stmt);
451}
452
b0cdf642 453/* Verify cgraph nodes of given cgraph node. */
4b987fac 454DEBUG_FUNCTION void
b0cdf642 455verify_cgraph_node (struct cgraph_node *node)
456{
457 struct cgraph_edge *e;
e27482aa 458 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
459 basic_block this_block;
75a70cf9 460 gimple_stmt_iterator gsi;
9bfec7c2 461 bool error_found = false;
b0cdf642 462
852f689e 463 if (seen_error ())
bd09cd3e 464 return;
465
b0cdf642 466 timevar_push (TV_CGRAPH_VERIFY);
b0cdf642 467 for (e = node->callees; e; e = e->next_callee)
468 if (e->aux)
469 {
0a81f5a0 470 error ("aux field set for edge %s->%s",
abd3e6b5 471 identifier_to_locale (cgraph_node_name (e->caller)),
472 identifier_to_locale (cgraph_node_name (e->callee)));
b0cdf642 473 error_found = true;
474 }
a2cb9b3b 475 if (node->count < 0)
476 {
bf776685 477 error ("execution count is negative");
a2cb9b3b 478 error_found = true;
479 }
59dd4830 480 if (node->global.inlined_to && node->local.externally_visible)
481 {
bf776685 482 error ("externally visible inline clone");
59dd4830 483 error_found = true;
484 }
485 if (node->global.inlined_to && node->address_taken)
486 {
bf776685 487 error ("inline clone with address taken");
59dd4830 488 error_found = true;
489 }
490 if (node->global.inlined_to && node->needed)
491 {
bf776685 492 error ("inline clone is needed");
59dd4830 493 error_found = true;
494 }
799c8711 495 for (e = node->indirect_calls; e; e = e->next_callee)
496 {
497 if (e->aux)
498 {
499 error ("aux field set for indirect edge from %s",
500 identifier_to_locale (cgraph_node_name (e->caller)));
501 error_found = true;
502 }
503 if (!e->indirect_unknown_callee
504 || !e->indirect_info)
505 {
506 error ("An indirect edge from %s is not marked as indirect or has "
507 "associated indirect_info, the corresponding statement is: ",
508 identifier_to_locale (cgraph_node_name (e->caller)));
7b29dd2f 509 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
799c8711 510 error_found = true;
511 }
512 }
b0cdf642 513 for (e = node->callers; e; e = e->next_caller)
514 {
1a036a3b 515 if (verify_edge_count_and_frequency (e))
516 error_found = true;
b0cdf642 517 if (!e->inline_failed)
518 {
519 if (node->global.inlined_to
520 != (e->caller->global.inlined_to
521 ? e->caller->global.inlined_to : e->caller))
522 {
0a81f5a0 523 error ("inlined_to pointer is wrong");
b0cdf642 524 error_found = true;
525 }
526 if (node->callers->next_caller)
527 {
0a81f5a0 528 error ("multiple inline callers");
b0cdf642 529 error_found = true;
530 }
531 }
532 else
533 if (node->global.inlined_to)
534 {
0a81f5a0 535 error ("inlined_to pointer set for noninline callers");
b0cdf642 536 error_found = true;
537 }
538 }
1a036a3b 539 for (e = node->indirect_calls; e; e = e->next_callee)
540 if (verify_edge_count_and_frequency (e))
541 error_found = true;
b0cdf642 542 if (!node->callers && node->global.inlined_to)
543 {
5cd75817 544 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 545 error_found = true;
546 }
547 if (node->global.inlined_to == node)
548 {
0a81f5a0 549 error ("inlined_to pointer refers to itself");
b0cdf642 550 error_found = true;
551 }
552
7019fd3f 553 if (!cgraph_get_node (node->decl))
b0cdf642 554 {
0f6439b9 555 error ("node not found in cgraph_hash");
b0cdf642 556 error_found = true;
557 }
a0c938f0 558
ccf4ab6b 559 if (node->clone_of)
560 {
561 struct cgraph_node *n;
562 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
563 if (n == node)
564 break;
565 if (!n)
566 {
567 error ("node has wrong clone_of");
568 error_found = true;
569 }
570 }
571 if (node->clones)
572 {
573 struct cgraph_node *n;
574 for (n = node->clones; n; n = n->next_sibling_clone)
575 if (n->clone_of != node)
576 break;
577 if (n)
578 {
579 error ("node has wrong clone list");
580 error_found = true;
581 }
582 }
583 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
584 {
585 error ("node is in clone list but it is not clone");
586 error_found = true;
587 }
588 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
589 {
590 error ("node has wrong prev_clone pointer");
591 error_found = true;
592 }
593 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
594 {
595 error ("double linked list of clones corrupted");
596 error_found = true;
597 }
c524ac5d 598 if (node->same_comdat_group)
599 {
600 struct cgraph_node *n = node->same_comdat_group;
601
602 if (!DECL_ONE_ONLY (node->decl))
603 {
604 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
605 error_found = true;
606 }
607 if (n == node)
608 {
609 error ("node is alone in a comdat group");
610 error_found = true;
611 }
612 do
613 {
614 if (!n->same_comdat_group)
615 {
616 error ("same_comdat_group is not a circular list");
617 error_found = true;
618 break;
619 }
620 n = n->same_comdat_group;
621 }
622 while (n != node);
623 }
ccf4ab6b 624
c70f46b0 625 if (node->analyzed && node->alias)
626 {
627 bool ref_found = false;
628 int i;
629 struct ipa_ref *ref;
630
631 if (node->callees)
632 {
633 error ("Alias has call edges");
634 error_found = true;
635 }
636 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
637 if (ref->use != IPA_REF_ALIAS)
638 {
639 error ("Alias has non-alias refernece");
640 error_found = true;
641 }
642 else if (ref_found)
643 {
644 error ("Alias has more than one alias reference");
645 error_found = true;
646 }
647 else
648 ref_found = true;
649 if (!ref_found)
650 {
651 error ("Analyzed alias has no reference");
652 error_found = true;
653 }
654 }
91bf9d9a 655 if (node->analyzed && node->thunk.thunk_p)
656 {
657 if (!node->callees)
658 {
659 error ("No edge out of thunk node");
660 error_found = true;
661 }
662 else if (node->callees->next_callee)
663 {
664 error ("More than one edge out of thunk node");
665 error_found = true;
666 }
667 if (gimple_has_body_p (node->decl))
668 {
669 error ("Thunk is not supposed to have body");
670 error_found = true;
671 }
672 }
673 else if (node->analyzed && gimple_has_body_p (node->decl)
674 && !TREE_ASM_WRITTEN (node->decl)
675 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
676 && !flag_wpa)
b0cdf642 677 {
e27482aa 678 if (this_cfun->cfg)
679 {
680 /* The nodes we're interested in are never shared, so walk
681 the tree ignoring duplicates. */
e7c352d1 682 struct pointer_set_t *visited_nodes = pointer_set_create ();
e27482aa 683 /* Reach the trees by walking over the CFG, and note the
684 enclosing basic-blocks in the call edges. */
685 FOR_EACH_BB_FN (this_block, this_cfun)
75a70cf9 686 for (gsi = gsi_start_bb (this_block);
687 !gsi_end_p (gsi);
688 gsi_next (&gsi))
9bfec7c2 689 {
75a70cf9 690 gimple stmt = gsi_stmt (gsi);
799c8711 691 if (is_gimple_call (stmt))
9bfec7c2 692 {
693 struct cgraph_edge *e = cgraph_edge (node, stmt);
799c8711 694 tree decl = gimple_call_fndecl (stmt);
9bfec7c2 695 if (e)
696 {
697 if (e->aux)
698 {
0a81f5a0 699 error ("shared call_stmt:");
7b29dd2f 700 cgraph_debug_gimple_stmt (this_cfun, stmt);
9bfec7c2 701 error_found = true;
702 }
799c8711 703 if (!e->indirect_unknown_callee)
28454517 704 {
c70f46b0 705 if (!e->callee->global.inlined_to
706 && decl
707 && cgraph_get_node (decl)
708 && (e->callee->former_clone_of
709 != cgraph_get_node (decl)->decl)
710 /* IPA-CP sometimes redirect edge to clone and then back to the former
711 function. This ping-pong has to go, eventaully. */
712 && (cgraph_function_or_thunk_node (cgraph_get_node (decl), NULL)
713 != cgraph_function_or_thunk_node (e->callee, NULL))
714 && !clone_of_p (cgraph_get_node (decl),
715 e->callee))
799c8711 716 {
717 error ("edge points to wrong declaration:");
718 debug_tree (e->callee->decl);
719 fprintf (stderr," Instead of:");
720 debug_tree (decl);
721 error_found = true;
722 }
28454517 723 }
799c8711 724 else if (decl)
9bfec7c2 725 {
799c8711 726 error ("an indirect edge with unknown callee "
727 "corresponding to a call_stmt with "
728 "a known declaration:");
ee3f5fc0 729 error_found = true;
7b29dd2f 730 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
9bfec7c2 731 }
732 e->aux = (void *)1;
733 }
799c8711 734 else if (decl)
9bfec7c2 735 {
0a81f5a0 736 error ("missing callgraph edge for call stmt:");
7b29dd2f 737 cgraph_debug_gimple_stmt (this_cfun, stmt);
9bfec7c2 738 error_found = true;
739 }
740 }
741 }
e27482aa 742 pointer_set_destroy (visited_nodes);
e27482aa 743 }
744 else
745 /* No CFG available?! */
746 gcc_unreachable ();
747
b0cdf642 748 for (e = node->callees; e; e = e->next_callee)
749 {
799c8711 750 if (!e->aux)
b0cdf642 751 {
0a81f5a0 752 error ("edge %s->%s has no corresponding call_stmt",
abd3e6b5 753 identifier_to_locale (cgraph_node_name (e->caller)),
754 identifier_to_locale (cgraph_node_name (e->callee)));
7b29dd2f 755 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
b0cdf642 756 error_found = true;
757 }
758 e->aux = 0;
759 }
799c8711 760 for (e = node->indirect_calls; e; e = e->next_callee)
761 {
762 if (!e->aux)
763 {
764 error ("an indirect edge from %s has no corresponding call_stmt",
765 identifier_to_locale (cgraph_node_name (e->caller)));
7b29dd2f 766 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
799c8711 767 error_found = true;
768 }
769 e->aux = 0;
770 }
b0cdf642 771 }
772 if (error_found)
773 {
774 dump_cgraph_node (stderr, node);
0a81f5a0 775 internal_error ("verify_cgraph_node failed");
b0cdf642 776 }
777 timevar_pop (TV_CGRAPH_VERIFY);
778}
779
780/* Verify whole cgraph structure. */
4b987fac 781DEBUG_FUNCTION void
b0cdf642 782verify_cgraph (void)
783{
784 struct cgraph_node *node;
785
852f689e 786 if (seen_error ())
8ec2a798 787 return;
788
b0cdf642 789 for (node = cgraph_nodes; node; node = node->next)
790 verify_cgraph_node (node);
791}
792
56af936e 793/* Output all asm statements we have stored up to be output. */
794
795static void
796cgraph_output_pending_asms (void)
797{
798 struct cgraph_asm_node *can;
799
852f689e 800 if (seen_error ())
56af936e 801 return;
802
803 for (can = cgraph_asm_nodes; can; can = can->next)
804 assemble_asm (can->asm_str);
805 cgraph_asm_nodes = NULL;
806}
807
0785e435 808/* Analyze the function scheduled to be output. */
222bc9b9 809void
0785e435 810cgraph_analyze_function (struct cgraph_node *node)
811{
bfec3452 812 tree save = current_function_decl;
0785e435 813 tree decl = node->decl;
814
c70f46b0 815 if (node->alias && node->thunk.alias)
816 {
817 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
818 if (!VEC_length (ipa_ref_t, node->ref_list.references))
819 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
820 if (node->same_body_alias)
821 {
822 DECL_VIRTUAL_P (node->decl) = DECL_VIRTUAL_P (node->thunk.alias);
823 DECL_DECLARED_INLINE_P (node->decl)
824 = DECL_DECLARED_INLINE_P (node->thunk.alias);
825 DECL_DISREGARD_INLINE_LIMITS (node->decl)
826 = DECL_DISREGARD_INLINE_LIMITS (node->thunk.alias);
827 }
828
829 /* Fixup visibility nonsences C++ frontend produce on same body aliases. */
830 if (TREE_PUBLIC (node->decl) && node->same_body_alias)
831 {
832 DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (node->thunk.alias);
89bf5ca9 833 if (DECL_ONE_ONLY (node->thunk.alias))
c70f46b0 834 {
89bf5ca9 835 DECL_COMDAT (node->decl) = DECL_COMDAT (node->thunk.alias);
c70f46b0 836 DECL_COMDAT_GROUP (node->decl) = DECL_COMDAT_GROUP (node->thunk.alias);
837 if (DECL_ONE_ONLY (node->thunk.alias) && !node->same_comdat_group)
838 {
839 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
840 node->same_comdat_group = tgt;
841 if (!tgt->same_comdat_group)
842 tgt->same_comdat_group = node;
843 else
844 {
845 struct cgraph_node *n;
846 for (n = tgt->same_comdat_group;
847 n->same_comdat_group != tgt;
848 n = n->same_comdat_group)
849 ;
850 n->same_comdat_group = node;
851 }
852 }
853 }
854 }
855 cgraph_mark_reachable_node (cgraph_alias_aliased_node (node));
856 if (node->address_taken)
857 cgraph_mark_address_taken_node (cgraph_alias_aliased_node (node));
858 if (cgraph_decide_is_function_needed (node, node->decl))
859 cgraph_mark_needed_node (node);
860 }
861 else if (node->thunk.thunk_p)
91bf9d9a 862 {
863 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
864 NULL, 0, CGRAPH_FREQ_BASE);
865 }
866 else
867 {
868 current_function_decl = decl;
869 push_cfun (DECL_STRUCT_FUNCTION (decl));
bfec3452 870
91bf9d9a 871 assign_assembler_name_if_neeeded (node->decl);
6816d0c4 872
91bf9d9a 873 /* Make sure to gimplify bodies only once. During analyzing a
874 function we lower it, which will require gimplified nested
875 functions, so we can end up here with an already gimplified
876 body. */
877 if (!gimple_body (decl))
878 gimplify_function_tree (decl);
879 dump_function (TDI_generic, decl);
bfec3452 880
91bf9d9a 881 cgraph_lower_function (node);
882 pop_cfun ();
883 }
6e8d6e86 884 node->analyzed = true;
0785e435 885
bfec3452 886 current_function_decl = save;
0785e435 887}
888
c70f46b0 889/* C++ frontend produce same body aliases all over the place, even before PCH
890 gets streamed out. It relies on us linking the aliases with their function
891 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
892 first produce aliases without links, but once C++ FE is sure he won't sream
893 PCH we build the links via this function. */
894
895void
896cgraph_process_same_body_aliases (void)
897{
898 struct cgraph_node *node;
899 for (node = cgraph_nodes; node; node = node->next)
900 if (node->same_body_alias
901 && !VEC_length (ipa_ref_t, node->ref_list.references))
902 {
903 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
904 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
905 }
906 same_body_aliases_done = true;
907}
908
d05db70d 909/* Process attributes common for vars and functions. */
910
911static void
912process_common_attributes (tree decl)
913{
914 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
915
916 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
917 {
918 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
919 "%<weakref%> attribute should be accompanied with"
920 " an %<alias%> attribute");
921 DECL_WEAK (decl) = 0;
40b32d93 922 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
923 DECL_ATTRIBUTES (decl));
d05db70d 924 }
925}
926
05806473 927/* Look for externally_visible and used attributes and mark cgraph nodes
928 accordingly.
929
930 We cannot mark the nodes at the point the attributes are processed (in
931 handle_*_attribute) because the copy of the declarations available at that
932 point may not be canonical. For example, in:
933
934 void f();
935 void f() __attribute__((used));
936
937 the declaration we see in handle_used_attribute will be the second
938 declaration -- but the front end will subsequently merge that declaration
939 with the original declaration and discard the second declaration.
940
941 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
942
943 void f() {}
944 void f() __attribute__((externally_visible));
945
946 is valid.
947
948 So, we walk the nodes at the end of the translation unit, applying the
949 attributes at that point. */
950
951static void
952process_function_and_variable_attributes (struct cgraph_node *first,
1d416bd7 953 struct varpool_node *first_var)
05806473 954{
955 struct cgraph_node *node;
1d416bd7 956 struct varpool_node *vnode;
05806473 957
958 for (node = cgraph_nodes; node != first; node = node->next)
959 {
960 tree decl = node->decl;
83a23b05 961 if (DECL_PRESERVE_P (decl))
0b49f8f8 962 cgraph_mark_needed_node (node);
62433d51 963 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
964 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
965 && TREE_PUBLIC (node->decl))
966 {
967 if (node->local.finalized)
968 cgraph_mark_needed_node (node);
969 }
970 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
05806473 971 {
ba12ea31 972 if (! TREE_PUBLIC (node->decl))
712d2297 973 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
974 "%<externally_visible%>"
975 " attribute have effect only on public objects");
59dd4830 976 else if (node->local.finalized)
977 cgraph_mark_needed_node (node);
05806473 978 }
40b32d93 979 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
c70f46b0 980 && (node->local.finalized && !node->alias))
40b32d93 981 {
982 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
983 "%<weakref%> attribute ignored"
984 " because function is defined");
985 DECL_WEAK (decl) = 0;
986 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
987 DECL_ATTRIBUTES (decl));
988 }
d05db70d 989 process_common_attributes (decl);
05806473 990 }
1d416bd7 991 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
05806473 992 {
993 tree decl = vnode->decl;
83a23b05 994 if (DECL_PRESERVE_P (decl))
05806473 995 {
22671757 996 vnode->force_output = true;
05806473 997 if (vnode->finalized)
1d416bd7 998 varpool_mark_needed_node (vnode);
05806473 999 }
62433d51 1000 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
1001 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
0d9d5d69 1002 && TREE_PUBLIC (vnode->decl))
62433d51 1003 {
1004 if (vnode->finalized)
1005 varpool_mark_needed_node (vnode);
1006 }
1007 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
05806473 1008 {
ba12ea31 1009 if (! TREE_PUBLIC (vnode->decl))
712d2297 1010 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1011 "%<externally_visible%>"
1012 " attribute have effect only on public objects");
59dd4830 1013 else if (vnode->finalized)
1014 varpool_mark_needed_node (vnode);
05806473 1015 }
40b32d93 1016 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
1017 && vnode->finalized
1018 && DECL_INITIAL (decl))
1019 {
1020 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1021 "%<weakref%> attribute ignored"
1022 " because variable is initialized");
1023 DECL_WEAK (decl) = 0;
1024 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1025 DECL_ATTRIBUTES (decl));
1026 }
d05db70d 1027 process_common_attributes (decl);
05806473 1028 }
1029}
1030
aeeb194b 1031/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1032 each reachable functions) and build cgraph.
1033 The function can be called multiple times after inserting new nodes
0d424440 1034 into beginning of queue. Just the new part of queue is re-scanned then. */
ae01b312 1035
aeeb194b 1036static void
1037cgraph_analyze_functions (void)
ae01b312 1038{
c1dcd13c 1039 /* Keep track of already processed nodes when called multiple times for
06b27565 1040 intermodule optimization. */
c1dcd13c 1041 static struct cgraph_node *first_analyzed;
c17d0de1 1042 struct cgraph_node *first_processed = first_analyzed;
1d416bd7 1043 static struct varpool_node *first_analyzed_var;
aeeb194b 1044 struct cgraph_node *node, *next;
ae01b312 1045
f1c35659 1046 bitmap_obstack_initialize (NULL);
c17d0de1 1047 process_function_and_variable_attributes (first_processed,
1048 first_analyzed_var);
1049 first_processed = cgraph_nodes;
1d416bd7 1050 first_analyzed_var = varpool_nodes;
1051 varpool_analyze_pending_decls ();
f79b6507 1052 if (cgraph_dump_file)
ae01b312 1053 {
e4200070 1054 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 1055 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1056 if (node->needed)
f79b6507 1057 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1058 fprintf (cgraph_dump_file, "\n");
ae01b312 1059 }
aeeb194b 1060 cgraph_process_new_functions ();
ae01b312 1061
e6d2b2d8 1062 /* Propagate reachability flag and lower representation of all reachable
1063 functions. In the future, lowering will introduce new functions and
1064 new entry points on the way (by template instantiation and virtual
1065 method table generation for instance). */
3d7bfc56 1066 while (cgraph_nodes_queue)
ae01b312 1067 {
0785e435 1068 struct cgraph_edge *edge;
3d7bfc56 1069 tree decl = cgraph_nodes_queue->decl;
1070
1071 node = cgraph_nodes_queue;
d87976fb 1072 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 1073 node->next_needed = NULL;
ae01b312 1074
638531ad 1075 /* ??? It is possible to create extern inline function and later using
bbd5cba2 1076 weak alias attribute to kill its body. See
638531ad 1077 gcc.c-torture/compile/20011119-1.c */
91bf9d9a 1078 if (!DECL_STRUCT_FUNCTION (decl)
c70f46b0 1079 && (!node->alias || !node->thunk.alias)
91bf9d9a 1080 && !node->thunk.thunk_p)
9b8fb23a 1081 {
1082 cgraph_reset_node (node);
443089c1 1083 node->local.redefined_extern_inline = true;
9b8fb23a 1084 continue;
1085 }
638531ad 1086
7bfefa9d 1087 if (!node->analyzed)
1088 cgraph_analyze_function (node);
2c0b522d 1089
ae01b312 1090 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 1091 if (!edge->callee->reachable)
2c0b522d 1092 cgraph_mark_reachable_node (edge->callee);
91bf9d9a 1093 for (edge = node->callers; edge; edge = edge->next_caller)
1094 if (!edge->caller->reachable && edge->caller->thunk.thunk_p)
1095 cgraph_mark_reachable_node (edge->caller);
2c0b522d 1096
61c2c7b1 1097 if (node->same_comdat_group)
1098 {
1099 for (next = node->same_comdat_group;
1100 next != node;
1101 next = next->same_comdat_group)
1102 cgraph_mark_reachable_node (next);
1103 }
1104
d544ceff 1105 /* If decl is a clone of an abstract function, mark that abstract
1106 function so that we don't release its body. The DECL_INITIAL() of that
fd6a3c41 1107 abstract function declaration will be later needed to output debug
1108 info. */
d544ceff 1109 if (DECL_ABSTRACT_ORIGIN (decl))
1110 {
fd6a3c41 1111 struct cgraph_node *origin_node;
1112 origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
d544ceff 1113 origin_node->abstract_and_needed = true;
1114 }
1115
c17d0de1 1116 /* We finalize local static variables during constructing callgraph
1117 edges. Process their attributes too. */
1118 process_function_and_variable_attributes (first_processed,
1119 first_analyzed_var);
1120 first_processed = cgraph_nodes;
1d416bd7 1121 first_analyzed_var = varpool_nodes;
1122 varpool_analyze_pending_decls ();
aeeb194b 1123 cgraph_process_new_functions ();
ae01b312 1124 }
2c0b522d 1125
aa5e06c7 1126 /* Collect entry points to the unit. */
f79b6507 1127 if (cgraph_dump_file)
3d7bfc56 1128 {
e4200070 1129 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 1130 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1131 if (node->needed)
f79b6507 1132 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 1133 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 1134 dump_cgraph (cgraph_dump_file);
7410370b 1135 dump_varpool (cgraph_dump_file);
3d7bfc56 1136 }
e6d2b2d8 1137
f79b6507 1138 if (cgraph_dump_file)
1139 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 1140
f4ec5ce1 1141 for (node = cgraph_nodes; node != first_analyzed; node = next)
ae01b312 1142 {
1143 tree decl = node->decl;
f4ec5ce1 1144 next = node->next;
ae01b312 1145
91bf9d9a 1146 if (node->local.finalized && !gimple_has_body_p (decl)
c70f46b0 1147 && (!node->alias || !node->thunk.alias)
91bf9d9a 1148 && !node->thunk.thunk_p)
a0c938f0 1149 cgraph_reset_node (node);
9b8fb23a 1150
91bf9d9a 1151 if (!node->reachable
c70f46b0 1152 && (gimple_has_body_p (decl) || node->thunk.thunk_p
1153 || (node->alias && node->thunk.alias)))
ae01b312 1154 {
f79b6507 1155 if (cgraph_dump_file)
1156 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 1157 cgraph_remove_node (node);
9b8fb23a 1158 continue;
ae01b312 1159 }
bc5cab3b 1160 else
1161 node->next_needed = NULL;
91bf9d9a 1162 gcc_assert (!node->local.finalized || node->thunk.thunk_p
c70f46b0 1163 || node->alias
91bf9d9a 1164 || gimple_has_body_p (decl));
9b8fb23a 1165 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 1166 }
f79b6507 1167 if (cgraph_dump_file)
e4200070 1168 {
1169 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1170 dump_cgraph (cgraph_dump_file);
7410370b 1171 dump_varpool (cgraph_dump_file);
e4200070 1172 }
f1c35659 1173 bitmap_obstack_release (NULL);
c1dcd13c 1174 first_analyzed = cgraph_nodes;
ae01b312 1175 ggc_collect ();
aeeb194b 1176}
1177
8f69fd82 1178
aeeb194b 1179/* Analyze the whole compilation unit once it is parsed completely. */
1180
1181void
1182cgraph_finalize_compilation_unit (void)
1183{
9929334e 1184 timevar_push (TV_CGRAPH);
1185
a0605d65 1186 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
1187 if (flag_lto)
1188 lto_streamer_hooks_init ();
1189
bb903e9c 1190 /* If we're here there's no current function anymore. Some frontends
1191 are lazy in clearing these. */
1192 current_function_decl = NULL;
1193 set_cfun (NULL);
1194
bfec3452 1195 /* Do not skip analyzing the functions if there were errors, we
1196 miss diagnostics for following functions otherwise. */
aeeb194b 1197
8f69fd82 1198 /* Emit size functions we didn't inline. */
4189e677 1199 finalize_size_functions ();
8f69fd82 1200
9929334e 1201 /* Mark alias targets necessary and emit diagnostics. */
1202 finish_aliases_1 ();
1203
aeeb194b 1204 if (!quiet_flag)
1205 {
1206 fprintf (stderr, "\nAnalyzing compilation unit\n");
1207 fflush (stderr);
1208 }
1209
ec4791a8 1210 if (flag_dump_passes)
1211 dump_passes ();
1212
9929334e 1213 /* Gimplify and lower all functions, compute reachability and
1214 remove unreachable nodes. */
1215 cgraph_analyze_functions ();
1216
8f69fd82 1217 /* Mark alias targets necessary and emit diagnostics. */
1218 finish_aliases_1 ();
1219
9929334e 1220 /* Gimplify and lower thunks. */
aeeb194b 1221 cgraph_analyze_functions ();
bfec3452 1222
9929334e 1223 /* Finally drive the pass manager. */
bfec3452 1224 cgraph_optimize ();
9929334e 1225
1226 timevar_pop (TV_CGRAPH);
ae01b312 1227}
9ed5b1f5 1228
1229
ae01b312 1230/* Figure out what functions we want to assemble. */
1231
1232static void
d9d9733a 1233cgraph_mark_functions_to_output (void)
ae01b312 1234{
1235 struct cgraph_node *node;
61c2c7b1 1236#ifdef ENABLE_CHECKING
1237 bool check_same_comdat_groups = false;
1238
1239 for (node = cgraph_nodes; node; node = node->next)
1240 gcc_assert (!node->process);
1241#endif
ae01b312 1242
ae01b312 1243 for (node = cgraph_nodes; node; node = node->next)
1244 {
1245 tree decl = node->decl;
d7c6d889 1246 struct cgraph_edge *e;
a0c938f0 1247
61c2c7b1 1248 gcc_assert (!node->process || node->same_comdat_group);
1249 if (node->process)
1250 continue;
d7c6d889 1251
1252 for (e = node->callers; e; e = e->next_caller)
611e5405 1253 if (e->inline_failed)
d7c6d889 1254 break;
ae01b312 1255
e6d2b2d8 1256 /* We need to output all local functions that are used and not
1257 always inlined, as well as those that are reachable from
1258 outside the current compilation unit. */
1a1a827a 1259 if (node->analyzed
91bf9d9a 1260 && !node->thunk.thunk_p
c70f46b0 1261 && !node->alias
b0cdf642 1262 && !node->global.inlined_to
1e3aebec 1263 && (!cgraph_only_called_directly_p (node)
c70f46b0 1264 || ((e || ipa_ref_has_aliases_p (&node->ref_list))
1265 && node->reachable))
4ee9c684 1266 && !TREE_ASM_WRITTEN (decl)
ae01b312 1267 && !DECL_EXTERNAL (decl))
61c2c7b1 1268 {
1269 node->process = 1;
1270 if (node->same_comdat_group)
1271 {
1272 struct cgraph_node *next;
1273 for (next = node->same_comdat_group;
1274 next != node;
1275 next = next->same_comdat_group)
c70f46b0 1276 if (!next->thunk.thunk_p && !next->alias)
91bf9d9a 1277 next->process = 1;
61c2c7b1 1278 }
1279 }
1280 else if (node->same_comdat_group)
1281 {
1282#ifdef ENABLE_CHECKING
1283 check_same_comdat_groups = true;
1284#endif
1285 }
cc636d56 1286 else
9cee7c3f 1287 {
1288 /* We should've reclaimed all functions that are not needed. */
1289#ifdef ENABLE_CHECKING
75a70cf9 1290 if (!node->global.inlined_to
1a1a827a 1291 && gimple_has_body_p (decl)
08843223 1292 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1293 are inside partition, we can end up not removing the body since we no longer
1294 have analyzed node pointing to it. */
1295 && !node->in_other_partition
c70f46b0 1296 && !node->alias
9cee7c3f 1297 && !DECL_EXTERNAL (decl))
1298 {
1299 dump_cgraph_node (stderr, node);
1300 internal_error ("failed to reclaim unneeded function");
1301 }
1302#endif
75a70cf9 1303 gcc_assert (node->global.inlined_to
1a1a827a 1304 || !gimple_has_body_p (decl)
08843223 1305 || node->in_other_partition
9cee7c3f 1306 || DECL_EXTERNAL (decl));
1307
1308 }
a0c938f0 1309
961e3b13 1310 }
61c2c7b1 1311#ifdef ENABLE_CHECKING
1312 if (check_same_comdat_groups)
1313 for (node = cgraph_nodes; node; node = node->next)
1314 if (node->same_comdat_group && !node->process)
1315 {
1316 tree decl = node->decl;
1317 if (!node->global.inlined_to
1318 && gimple_has_body_p (decl)
08843223 1319 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1320 are inside partition, we can end up not removing the body since we no longer
1321 have analyzed node pointing to it. */
1322 && !node->in_other_partition
61c2c7b1 1323 && !DECL_EXTERNAL (decl))
1324 {
1325 dump_cgraph_node (stderr, node);
c70f46b0 1326 internal_error ("failed to reclaim unneeded functionin same comdat group");
61c2c7b1 1327 }
1328 }
1329#endif
961e3b13 1330}
1331
28454517 1332/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1333 in lowered gimple form.
1334
1335 Set current_function_decl and cfun to newly constructed empty function body.
1336 return basic block in the function body. */
1337
1338static basic_block
1339init_lowered_empty_function (tree decl)
1340{
1341 basic_block bb;
1342
1343 current_function_decl = decl;
1344 allocate_struct_function (decl, false);
1345 gimple_register_cfg_hooks ();
1346 init_empty_tree_cfg ();
1347 init_tree_ssa (cfun);
1348 init_ssa_operands ();
1349 cfun->gimple_df->in_ssa_p = true;
1350 DECL_INITIAL (decl) = make_node (BLOCK);
1351
1352 DECL_SAVED_TREE (decl) = error_mark_node;
1353 cfun->curr_properties |=
1354 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1355 PROP_ssa);
1356
1357 /* Create BB for body of the function and connect it properly. */
1358 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1359 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1360 make_edge (bb, EXIT_BLOCK_PTR, 0);
1361
1362 return bb;
1363}
1364
1365/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1366 offset indicated by VIRTUAL_OFFSET, if that is
1367 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1368 zero for a result adjusting thunk. */
1369
1370static tree
1371thunk_adjust (gimple_stmt_iterator * bsi,
1372 tree ptr, bool this_adjusting,
1373 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1374{
1375 gimple stmt;
1376 tree ret;
1377
55d6cb23 1378 if (this_adjusting
1379 && fixed_offset != 0)
28454517 1380 {
1381 stmt = gimple_build_assign (ptr,
1382 fold_build2_loc (input_location,
1383 POINTER_PLUS_EXPR,
1384 TREE_TYPE (ptr), ptr,
1385 size_int (fixed_offset)));
1386 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1387 }
1388
1389 /* If there's a virtual offset, look up that value in the vtable and
1390 adjust the pointer again. */
1391 if (virtual_offset)
1392 {
1393 tree vtabletmp;
1394 tree vtabletmp2;
1395 tree vtabletmp3;
1396 tree offsettmp;
1397
1398 if (!vtable_entry_type)
1399 {
1400 tree vfunc_type = make_node (FUNCTION_TYPE);
1401 TREE_TYPE (vfunc_type) = integer_type_node;
1402 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1403 layout_type (vfunc_type);
1404
1405 vtable_entry_type = build_pointer_type (vfunc_type);
1406 }
1407
1408 vtabletmp =
1409 create_tmp_var (build_pointer_type
1410 (build_pointer_type (vtable_entry_type)), "vptr");
1411
1412 /* The vptr is always at offset zero in the object. */
1413 stmt = gimple_build_assign (vtabletmp,
1414 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1415 ptr));
1416 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1417 mark_symbols_for_renaming (stmt);
1418 find_referenced_vars_in (stmt);
1419
1420 /* Form the vtable address. */
1421 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1422 "vtableaddr");
1423 stmt = gimple_build_assign (vtabletmp2,
182cf5a9 1424 build_simple_mem_ref (vtabletmp));
28454517 1425 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1426 mark_symbols_for_renaming (stmt);
1427 find_referenced_vars_in (stmt);
1428
1429 /* Find the entry with the vcall offset. */
1430 stmt = gimple_build_assign (vtabletmp2,
1431 fold_build2_loc (input_location,
1432 POINTER_PLUS_EXPR,
1433 TREE_TYPE (vtabletmp2),
1434 vtabletmp2,
1435 fold_convert (sizetype,
1436 virtual_offset)));
1437 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1438
1439 /* Get the offset itself. */
1440 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1441 "vcalloffset");
1442 stmt = gimple_build_assign (vtabletmp3,
182cf5a9 1443 build_simple_mem_ref (vtabletmp2));
28454517 1444 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1445 mark_symbols_for_renaming (stmt);
1446 find_referenced_vars_in (stmt);
1447
1448 /* Cast to sizetype. */
1449 offsettmp = create_tmp_var (sizetype, "offset");
1450 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1451 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1452 mark_symbols_for_renaming (stmt);
1453 find_referenced_vars_in (stmt);
1454
1455 /* Adjust the `this' pointer. */
1456 ptr = fold_build2_loc (input_location,
1457 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1458 offsettmp);
1459 }
1460
55d6cb23 1461 if (!this_adjusting
1462 && fixed_offset != 0)
28454517 1463 /* Adjust the pointer by the constant. */
1464 {
1465 tree ptrtmp;
1466
1467 if (TREE_CODE (ptr) == VAR_DECL)
1468 ptrtmp = ptr;
1469 else
1470 {
1471 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1472 stmt = gimple_build_assign (ptrtmp, ptr);
1473 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1474 mark_symbols_for_renaming (stmt);
1475 find_referenced_vars_in (stmt);
1476 }
1477 ptr = fold_build2_loc (input_location,
1478 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1479 size_int (fixed_offset));
1480 }
1481
1482 /* Emit the statement and gimplify the adjustment expression. */
1483 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1484 stmt = gimple_build_assign (ret, ptr);
1485 mark_symbols_for_renaming (stmt);
1486 find_referenced_vars_in (stmt);
1487 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1488
1489 return ret;
1490}
1491
1492/* Produce assembler for thunk NODE. */
1493
1494static void
1495assemble_thunk (struct cgraph_node *node)
1496{
1497 bool this_adjusting = node->thunk.this_adjusting;
1498 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1499 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1500 tree virtual_offset = NULL;
1501 tree alias = node->thunk.alias;
1502 tree thunk_fndecl = node->decl;
1503 tree a = DECL_ARGUMENTS (thunk_fndecl);
1504
1505 current_function_decl = thunk_fndecl;
1506
aed6e608 1507 /* Ensure thunks are emitted in their correct sections. */
1508 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1509
28454517 1510 if (this_adjusting
1511 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1512 virtual_value, alias))
1513 {
1514 const char *fnname;
1515 tree fn_block;
1516
1517 DECL_RESULT (thunk_fndecl)
1518 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1519 RESULT_DECL, 0, integer_type_node);
22ea3b47 1520 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
28454517 1521
1522 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1523 create one. */
1524 fn_block = make_node (BLOCK);
1525 BLOCK_VARS (fn_block) = a;
1526 DECL_INITIAL (thunk_fndecl) = fn_block;
1527 init_function_start (thunk_fndecl);
1528 cfun->is_thunk = 1;
1529 assemble_start_function (thunk_fndecl, fnname);
1530
1531 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1532 fixed_offset, virtual_value, alias);
1533
1534 assemble_end_function (thunk_fndecl, fnname);
1535 init_insn_lengths ();
1536 free_after_compilation (cfun);
1537 set_cfun (NULL);
1538 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
91bf9d9a 1539 node->thunk.thunk_p = false;
1540 node->analyzed = false;
28454517 1541 }
1542 else
1543 {
1544 tree restype;
1545 basic_block bb, then_bb, else_bb, return_bb;
1546 gimple_stmt_iterator bsi;
1547 int nargs = 0;
1548 tree arg;
1549 int i;
1550 tree resdecl;
1551 tree restmp = NULL;
1552 VEC(tree, heap) *vargs;
1553
1554 gimple call;
1555 gimple ret;
1556
1557 DECL_IGNORED_P (thunk_fndecl) = 1;
1558 bitmap_obstack_initialize (NULL);
1559
1560 if (node->thunk.virtual_offset_p)
1561 virtual_offset = size_int (virtual_value);
1562
1563 /* Build the return declaration for the function. */
1564 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1565 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1566 {
1567 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1568 DECL_ARTIFICIAL (resdecl) = 1;
1569 DECL_IGNORED_P (resdecl) = 1;
1570 DECL_RESULT (thunk_fndecl) = resdecl;
1571 }
1572 else
1573 resdecl = DECL_RESULT (thunk_fndecl);
1574
1575 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1576
1577 bsi = gsi_start_bb (bb);
1578
1579 /* Build call to the function being thunked. */
1580 if (!VOID_TYPE_P (restype))
1581 {
1582 if (!is_gimple_reg_type (restype))
1583 {
1584 restmp = resdecl;
2ab2ce89 1585 add_local_decl (cfun, restmp);
28454517 1586 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1587 }
1588 else
1589 restmp = create_tmp_var_raw (restype, "retval");
1590 }
1591
1767a056 1592 for (arg = a; arg; arg = DECL_CHAIN (arg))
28454517 1593 nargs++;
1594 vargs = VEC_alloc (tree, heap, nargs);
1595 if (this_adjusting)
1596 VEC_quick_push (tree, vargs,
1597 thunk_adjust (&bsi,
1598 a, 1, fixed_offset,
1599 virtual_offset));
1600 else
1601 VEC_quick_push (tree, vargs, a);
1767a056 1602 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
28454517 1603 VEC_quick_push (tree, vargs, arg);
1604 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1605 VEC_free (tree, heap, vargs);
1606 gimple_call_set_cannot_inline (call, true);
1607 gimple_call_set_from_thunk (call, true);
1608 if (restmp)
1609 gimple_call_set_lhs (call, restmp);
1610 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1611 mark_symbols_for_renaming (call);
1612 find_referenced_vars_in (call);
1613 update_stmt (call);
1614
1615 if (restmp && !this_adjusting)
1616 {
57ab8ec3 1617 tree true_label = NULL_TREE;
28454517 1618
1619 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1620 {
1621 gimple stmt;
1622 /* If the return type is a pointer, we need to
1623 protect against NULL. We know there will be an
1624 adjustment, because that's why we're emitting a
1625 thunk. */
1626 then_bb = create_basic_block (NULL, (void *) 0, bb);
1627 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1628 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1629 remove_edge (single_succ_edge (bb));
1630 true_label = gimple_block_label (then_bb);
28454517 1631 stmt = gimple_build_cond (NE_EXPR, restmp,
385f3f36 1632 build_zero_cst (TREE_TYPE (restmp)),
28454517 1633 NULL_TREE, NULL_TREE);
1634 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1635 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1636 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1637 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1638 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1639 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1640 bsi = gsi_last_bb (then_bb);
1641 }
1642
1643 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1644 fixed_offset, virtual_offset);
1645 if (true_label)
1646 {
1647 gimple stmt;
1648 bsi = gsi_last_bb (else_bb);
385f3f36 1649 stmt = gimple_build_assign (restmp,
1650 build_zero_cst (TREE_TYPE (restmp)));
28454517 1651 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1652 bsi = gsi_last_bb (return_bb);
1653 }
1654 }
1655 else
1656 gimple_call_set_tail (call, true);
1657
1658 /* Build return value. */
1659 ret = gimple_build_return (restmp);
1660 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1661
1662 delete_unreachable_blocks ();
1663 update_ssa (TODO_update_ssa);
1664
28454517 1665 /* Since we want to emit the thunk, we explicitly mark its name as
1666 referenced. */
91bf9d9a 1667 node->thunk.thunk_p = false;
1668 cgraph_node_remove_callees (node);
28454517 1669 cgraph_add_new_function (thunk_fndecl, true);
1670 bitmap_obstack_release (NULL);
1671 }
1672 current_function_decl = NULL;
1673}
1674
91bf9d9a 1675
c70f46b0 1676
1677/* Assemble thunks and aliases asociated to NODE. */
91bf9d9a 1678
1679static void
c70f46b0 1680assemble_thunks_and_aliases (struct cgraph_node *node)
91bf9d9a 1681{
1682 struct cgraph_edge *e;
c70f46b0 1683 int i;
1684 struct ipa_ref *ref;
1685
91bf9d9a 1686 for (e = node->callers; e;)
1687 if (e->caller->thunk.thunk_p)
1688 {
1689 struct cgraph_node *thunk = e->caller;
1690
1691 e = e->next_caller;
c70f46b0 1692 assemble_thunks_and_aliases (thunk);
91bf9d9a 1693 assemble_thunk (thunk);
1694 }
1695 else
1696 e = e->next_caller;
c70f46b0 1697 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
1698 if (ref->use == IPA_REF_ALIAS)
1699 {
1700 struct cgraph_node *alias = ipa_ref_refering_node (ref);
1701 assemble_alias (alias->decl,
1702 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1703 assemble_thunks_and_aliases (alias);
1704 }
91bf9d9a 1705}
1706
ae01b312 1707/* Expand function specified by NODE. */
e6d2b2d8 1708
ae01b312 1709static void
d9d9733a 1710cgraph_expand_function (struct cgraph_node *node)
ae01b312 1711{
1712 tree decl = node->decl;
1713
b0cdf642 1714 /* We ought to not compile any inline clones. */
cc636d56 1715 gcc_assert (!node->global.inlined_to);
b0cdf642 1716
6329636b 1717 announce_function (decl);
09fc9532 1718 node->process = 0;
c70f46b0 1719 assemble_thunks_and_aliases (node);
f7777314 1720 gcc_assert (node->lowered);
1721
1722 /* Generate RTL for the body of DECL. */
1723 tree_rest_of_compilation (decl);
1724
1725 /* Make sure that BE didn't give up on compiling. */
1726 gcc_assert (TREE_ASM_WRITTEN (decl));
1727 current_function_decl = NULL;
cc91b414 1728 gcc_assert (!cgraph_preserve_function_body_p (node));
1a1a827a 1729 cgraph_release_function_body (node);
1730 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1731 points to the dead function body. */
1732 cgraph_node_remove_callees (node);
e1be32b8 1733
1734 cgraph_function_flags_ready = true;
ae01b312 1735}
1736
b0cdf642 1737/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1738
1739bool
326a9581 1740cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
d7c6d889 1741{
b0cdf642 1742 *reason = e->inline_failed;
1743 return !e->inline_failed;
d7c6d889 1744}
b0cdf642 1745
acc70efa 1746
acc70efa 1747
d9d9733a 1748/* Expand all functions that must be output.
1749
d7c6d889 1750 Attempt to topologically sort the nodes so function is output when
1751 all called functions are already assembled to allow data to be
91c82c20 1752 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1753 between a function and its callees (later we may choose to use a more
d7c6d889 1754 sophisticated algorithm for function reordering; we will likely want
1755 to use subsections to make the output functions appear in top-down
1756 order). */
1757
1758static void
a6868229 1759cgraph_expand_all_functions (void)
d7c6d889 1760{
1761 struct cgraph_node *node;
4c36ffe6 1762 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
c04e3894 1763 int order_pos, new_order_pos = 0;
d7c6d889 1764 int i;
1765
7771d558 1766 order_pos = ipa_reverse_postorder (order);
cc636d56 1767 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1768
7bd28bba 1769 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1770 optimization. So we must be sure to not reference them. */
1771 for (i = 0; i < order_pos; i++)
09fc9532 1772 if (order[i]->process)
b0cdf642 1773 order[new_order_pos++] = order[i];
1774
1775 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1776 {
1777 node = order[i];
09fc9532 1778 if (node->process)
d7c6d889 1779 {
cc636d56 1780 gcc_assert (node->reachable);
09fc9532 1781 node->process = 0;
d7c6d889 1782 cgraph_expand_function (node);
1783 }
1784 }
523c1122 1785 cgraph_process_new_functions ();
773c5ba7 1786
d7c6d889 1787 free (order);
773c5ba7 1788
d7c6d889 1789}
1790
56af936e 1791/* This is used to sort the node types by the cgraph order number. */
1792
0b09525f 1793enum cgraph_order_sort_kind
1794{
1795 ORDER_UNDEFINED = 0,
1796 ORDER_FUNCTION,
1797 ORDER_VAR,
1798 ORDER_ASM
1799};
1800
56af936e 1801struct cgraph_order_sort
1802{
0b09525f 1803 enum cgraph_order_sort_kind kind;
56af936e 1804 union
1805 {
1806 struct cgraph_node *f;
1d416bd7 1807 struct varpool_node *v;
56af936e 1808 struct cgraph_asm_node *a;
1809 } u;
1810};
1811
1812/* Output all functions, variables, and asm statements in the order
1813 according to their order fields, which is the order in which they
1814 appeared in the file. This implements -fno-toplevel-reorder. In
1815 this mode we may output functions and variables which don't really
1816 need to be output. */
1817
1818static void
1819cgraph_output_in_order (void)
1820{
1821 int max;
56af936e 1822 struct cgraph_order_sort *nodes;
1823 int i;
1824 struct cgraph_node *pf;
1d416bd7 1825 struct varpool_node *pv;
56af936e 1826 struct cgraph_asm_node *pa;
1827
1828 max = cgraph_order;
3e1cde87 1829 nodes = XCNEWVEC (struct cgraph_order_sort, max);
56af936e 1830
1d416bd7 1831 varpool_analyze_pending_decls ();
56af936e 1832
1833 for (pf = cgraph_nodes; pf; pf = pf->next)
1834 {
c70f46b0 1835 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
56af936e 1836 {
1837 i = pf->order;
1838 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1839 nodes[i].kind = ORDER_FUNCTION;
1840 nodes[i].u.f = pf;
1841 }
1842 }
1843
1d416bd7 1844 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
56af936e 1845 {
1846 i = pv->order;
1847 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1848 nodes[i].kind = ORDER_VAR;
1849 nodes[i].u.v = pv;
1850 }
1851
1852 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1853 {
1854 i = pa->order;
1855 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1856 nodes[i].kind = ORDER_ASM;
1857 nodes[i].u.a = pa;
1858 }
56af936e 1859
304e5318 1860 /* In toplevel reorder mode we output all statics; mark them as needed. */
1861 for (i = 0; i < max; ++i)
1862 {
1863 if (nodes[i].kind == ORDER_VAR)
1864 {
1865 varpool_mark_needed_node (nodes[i].u.v);
1866 }
1867 }
1868 varpool_empty_needed_queue ();
1869
91da0f1c 1870 for (i = 0; i < max; ++i)
1871 if (nodes[i].kind == ORDER_VAR)
1872 varpool_finalize_named_section_flags (nodes[i].u.v);
1873
56af936e 1874 for (i = 0; i < max; ++i)
1875 {
1876 switch (nodes[i].kind)
1877 {
1878 case ORDER_FUNCTION:
09fc9532 1879 nodes[i].u.f->process = 0;
56af936e 1880 cgraph_expand_function (nodes[i].u.f);
1881 break;
1882
1883 case ORDER_VAR:
1d416bd7 1884 varpool_assemble_decl (nodes[i].u.v);
56af936e 1885 break;
1886
1887 case ORDER_ASM:
1888 assemble_asm (nodes[i].u.a->asm_str);
1889 break;
1890
1891 case ORDER_UNDEFINED:
1892 break;
1893
1894 default:
1895 gcc_unreachable ();
1896 }
1897 }
4b4ea2db 1898
1899 cgraph_asm_nodes = NULL;
3e1cde87 1900 free (nodes);
56af936e 1901}
1902
b0cdf642 1903/* Return true when function body of DECL still needs to be kept around
1904 for later re-use. */
1905bool
cc91b414 1906cgraph_preserve_function_body_p (struct cgraph_node *node)
b0cdf642 1907{
8d8c4c8d 1908 gcc_assert (cgraph_global_info_ready);
c70f46b0 1909 gcc_assert (!node->alias && !node->thunk.thunk_p);
cc91b414 1910
b0cdf642 1911 /* Look if there is any clone around. */
ccf4ab6b 1912 if (node->clones)
1913 return true;
b0cdf642 1914 return false;
1915}
1916
77fce4cd 1917static void
1918ipa_passes (void)
1919{
87d4aa85 1920 set_cfun (NULL);
4b14adf9 1921 current_function_decl = NULL;
75a70cf9 1922 gimple_register_cfg_hooks ();
77fce4cd 1923 bitmap_obstack_initialize (NULL);
59dd4830 1924
c9036234 1925 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1926
59dd4830 1927 if (!in_lto_p)
7b2e8956 1928 {
1929 execute_ipa_pass_list (all_small_ipa_passes);
1930 if (seen_error ())
1931 return;
1932 }
9ed5b1f5 1933
7bfefa9d 1934 /* If pass_all_early_optimizations was not scheduled, the state of
1935 the cgraph will not be properly updated. Update it now. */
1936 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1937 cgraph_state = CGRAPH_STATE_IPA_SSA;
9ed5b1f5 1938
7bfefa9d 1939 if (!in_lto_p)
1940 {
1941 /* Generate coverage variables and constructors. */
1942 coverage_finish ();
1943
1944 /* Process new functions added. */
1945 set_cfun (NULL);
1946 current_function_decl = NULL;
1947 cgraph_process_new_functions ();
7bfefa9d 1948
c9036234 1949 execute_ipa_summary_passes
1950 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
8867b500 1951 }
23433d72 1952
1953 /* Some targets need to handle LTO assembler output specially. */
1954 if (flag_generate_lto)
1955 targetm.asm_out.lto_start ();
1956
7bfefa9d 1957 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1958
1959 if (!in_lto_p)
1960 ipa_write_summaries ();
1961
23433d72 1962 if (flag_generate_lto)
1963 targetm.asm_out.lto_end ();
1964
8867b500 1965 if (!flag_ltrans)
1966 execute_ipa_pass_list (all_regular_ipa_passes);
c9036234 1967 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
9ed5b1f5 1968
77fce4cd 1969 bitmap_obstack_release (NULL);
1970}
1971
34e5cced 1972
ae01b312 1973/* Perform simple optimizations based on callgraph. */
1974
7bfefa9d 1975void
d9d9733a 1976cgraph_optimize (void)
ae01b312 1977{
852f689e 1978 if (seen_error ())
cb2b5570 1979 return;
1980
b0cdf642 1981#ifdef ENABLE_CHECKING
1982 verify_cgraph ();
1983#endif
a861fe52 1984
c1dcd13c 1985 /* Frontend may output common variables after the unit has been finalized.
1986 It is safe to deal with them here as they are always zero initialized. */
1d416bd7 1987 varpool_analyze_pending_decls ();
e9f08e82 1988
f79b6507 1989 timevar_push (TV_CGRAPHOPT);
51949610 1990 if (pre_ipa_mem_report)
1991 {
1992 fprintf (stderr, "Memory consumption before IPA\n");
1993 dump_memory_report (false);
1994 }
d7c6d889 1995 if (!quiet_flag)
cd6bca02 1996 fprintf (stderr, "Performing interprocedural optimizations\n");
523c1122 1997 cgraph_state = CGRAPH_STATE_IPA;
c04e3894 1998
be4d0974 1999 /* Don't run the IPA passes if there was any error or sorry messages. */
852f689e 2000 if (!seen_error ())
be4d0974 2001 ipa_passes ();
2002
34e5cced 2003 /* Do nothing else if any IPA pass found errors. */
852f689e 2004 if (seen_error ())
021c1c18 2005 {
2006 timevar_pop (TV_CGRAPHOPT);
2007 return;
2008 }
34e5cced 2009
e1be32b8 2010 /* This pass remove bodies of extern inline functions we never inlined.
2011 Do this later so other IPA passes see what is really going on. */
2012 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 2013 cgraph_global_info_ready = true;
f79b6507 2014 if (cgraph_dump_file)
2015 {
e4200070 2016 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 2017 dump_cgraph (cgraph_dump_file);
c1dcd13c 2018 dump_varpool (cgraph_dump_file);
f79b6507 2019 }
51949610 2020 if (post_ipa_mem_report)
2021 {
defa2fa6 2022 fprintf (stderr, "Memory consumption after IPA\n");
51949610 2023 dump_memory_report (false);
2024 }
f79b6507 2025 timevar_pop (TV_CGRAPHOPT);
ae01b312 2026
d7c6d889 2027 /* Output everything. */
47306a5d 2028 (*debug_hooks->assembly_start) ();
e4200070 2029 if (!quiet_flag)
2030 fprintf (stderr, "Assembling functions:\n");
b0cdf642 2031#ifdef ENABLE_CHECKING
2032 verify_cgraph ();
2033#endif
56af936e 2034
ccf4ab6b 2035 cgraph_materialize_all_clones ();
acc70efa 2036 cgraph_mark_functions_to_output ();
c1dcd13c 2037
523c1122 2038 cgraph_state = CGRAPH_STATE_EXPANSION;
56af936e 2039 if (!flag_toplevel_reorder)
2040 cgraph_output_in_order ();
2041 else
2042 {
2043 cgraph_output_pending_asms ();
2044
2045 cgraph_expand_all_functions ();
1d416bd7 2046 varpool_remove_unreferenced_decls ();
56af936e 2047
1d416bd7 2048 varpool_assemble_pending_decls ();
56af936e 2049 }
523c1122 2050 cgraph_process_new_functions ();
2051 cgraph_state = CGRAPH_STATE_FINISHED;
c1dcd13c 2052
f79b6507 2053 if (cgraph_dump_file)
2054 {
e4200070 2055 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 2056 dump_cgraph (cgraph_dump_file);
7410370b 2057 dump_varpool (cgraph_dump_file);
f79b6507 2058 }
b0cdf642 2059#ifdef ENABLE_CHECKING
2060 verify_cgraph ();
4ee9c684 2061 /* Double check that all inline clones are gone and that all
2062 function bodies have been released from memory. */
852f689e 2063 if (!seen_error ())
4ee9c684 2064 {
2065 struct cgraph_node *node;
2066 bool error_found = false;
2067
2068 for (node = cgraph_nodes; node; node = node->next)
2069 if (node->analyzed
2070 && (node->global.inlined_to
1a1a827a 2071 || gimple_has_body_p (node->decl)))
4ee9c684 2072 {
2073 error_found = true;
2074 dump_cgraph_node (stderr, node);
a0c938f0 2075 }
4ee9c684 2076 if (error_found)
c04e3894 2077 internal_error ("nodes with unreleased memory found");
4ee9c684 2078 }
b0cdf642 2079#endif
ae01b312 2080}
34e5cced 2081
121f3051 2082void
2083init_cgraph (void)
2084{
01ec0a6c 2085 if (!cgraph_dump_file)
2086 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
121f3051 2087}
b5d36404 2088
a0c938f0 2089/* The edges representing the callers of the NEW_VERSION node were
b5d36404 2090 fixed by cgraph_function_versioning (), now the call_expr in their
2091 respective tree code should be updated to call the NEW_VERSION. */
2092
2093static void
2094update_call_expr (struct cgraph_node *new_version)
2095{
2096 struct cgraph_edge *e;
2097
2098 gcc_assert (new_version);
75a70cf9 2099
2100 /* Update the call expr on the edges to call the new version. */
b5d36404 2101 for (e = new_version->callers; e; e = e->next_caller)
e03a95e7 2102 {
2103 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2104 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
e38def9c 2105 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
e03a95e7 2106 }
b5d36404 2107}
2108
2109
2110/* Create a new cgraph node which is the new version of
2111 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2112 edges which should be redirected to point to
2113 NEW_VERSION. ALL the callees edges of OLD_VERSION
2114 are cloned to the new version node. Return the new
b06ab5fa 2115 version node.
2116
2117 If non-NULL BLOCK_TO_COPY determine what basic blocks
2118 was copied to prevent duplications of calls that are dead
2119 in the clone. */
b5d36404 2120
2121static struct cgraph_node *
2122cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 2123 tree new_decl,
b06ab5fa 2124 VEC(cgraph_edge_p,heap) *redirect_callers,
2125 bitmap bbs_to_copy)
2126 {
b5d36404 2127 struct cgraph_node *new_version;
32936803 2128 struct cgraph_edge *e;
b5d36404 2129 unsigned i;
2130
2131 gcc_assert (old_version);
a0c938f0 2132
5a90471f 2133 new_version = cgraph_create_node (new_decl);
b5d36404 2134
2135 new_version->analyzed = true;
2136 new_version->local = old_version->local;
a70a5e2c 2137 new_version->local.externally_visible = false;
2138 new_version->local.local = true;
b5d36404 2139 new_version->global = old_version->global;
a93f1c3b 2140 new_version->rtl = old_version->rtl;
b5d36404 2141 new_version->reachable = true;
2142 new_version->count = old_version->count;
2143
a70a5e2c 2144 for (e = old_version->callees; e; e=e->next_callee)
b06ab5fa 2145 if (!bbs_to_copy
2146 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2147 cgraph_clone_edge (e, new_version, e->call_stmt,
2148 e->lto_stmt_uid, REG_BR_PROB_BASE,
2149 CGRAPH_FREQ_BASE,
0835ad03 2150 true);
a70a5e2c 2151 for (e = old_version->indirect_calls; e; e=e->next_callee)
b06ab5fa 2152 if (!bbs_to_copy
2153 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2154 cgraph_clone_edge (e, new_version, e->call_stmt,
2155 e->lto_stmt_uid, REG_BR_PROB_BASE,
2156 CGRAPH_FREQ_BASE,
0835ad03 2157 true);
48148244 2158 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
4460a647 2159 {
2160 /* Redirect calls to the old version node to point to its new
2161 version. */
2162 cgraph_redirect_edge_callee (e, new_version);
2163 }
b5d36404 2164
2165 return new_version;
2166 }
2167
2168 /* Perform function versioning.
a0c938f0 2169 Function versioning includes copying of the tree and
b5d36404 2170 a callgraph update (creating a new cgraph node and updating
2171 its callees and callers).
2172
2173 REDIRECT_CALLERS varray includes the edges to be redirected
2174 to the new version.
2175
2176 TREE_MAP is a mapping of tree nodes we want to replace with
2177 new ones (according to results of prior analysis).
2178 OLD_VERSION_NODE is the node that is versioned.
48e1416a 2179 It returns the new version's cgraph node.
b06ab5fa 2180 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2181 from new version.
2182 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2183 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
b5d36404 2184
2185struct cgraph_node *
2186cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 2187 VEC(cgraph_edge_p,heap) *redirect_callers,
ccf4ab6b 2188 VEC (ipa_replace_map_p,gc)* tree_map,
a70a5e2c 2189 bitmap args_to_skip,
b06ab5fa 2190 bitmap bbs_to_copy,
2191 basic_block new_entry_block,
a70a5e2c 2192 const char *clone_name)
b5d36404 2193{
2194 tree old_decl = old_version_node->decl;
2195 struct cgraph_node *new_version_node = NULL;
2196 tree new_decl;
2197
2198 if (!tree_versionable_function_p (old_decl))
2199 return NULL;
2200
3c97c75d 2201 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
2202
b5d36404 2203 /* Make a new FUNCTION_DECL tree node for the
2204 new version. */
5afe38fe 2205 if (!args_to_skip)
2206 new_decl = copy_node (old_decl);
2207 else
2208 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
b5d36404 2209
df0b8dfb 2210 /* Generate a new name for the new version. */
2211 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2212 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2213 SET_DECL_RTL (new_decl, NULL);
2214
b5d36404 2215 /* Create the new version's call-graph node.
2216 and update the edges of the new node. */
2217 new_version_node =
2218 cgraph_copy_node_for_versioning (old_version_node, new_decl,
b06ab5fa 2219 redirect_callers, bbs_to_copy);
b5d36404 2220
2221 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 2222 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2223 bbs_to_copy, new_entry_block);
b5d36404 2224
a0c938f0 2225 /* Update the new version's properties.
e03a95e7 2226 Make The new version visible only within this translation unit. Make sure
2227 that is not weak also.
a0c938f0 2228 ??? We cannot use COMDAT linkage because there is no
b5d36404 2229 ABI support for this. */
6137cc9f 2230 cgraph_make_decl_local (new_version_node->decl);
f014e39d 2231 DECL_VIRTUAL_P (new_version_node->decl) = 0;
b5d36404 2232 new_version_node->local.externally_visible = 0;
2233 new_version_node->local.local = 1;
2234 new_version_node->lowered = true;
f014e39d 2235
e03a95e7 2236 /* Update the call_expr on the edges to call the new version node. */
2237 update_call_expr (new_version_node);
48e1416a 2238
50828ed8 2239 cgraph_call_function_insertion_hooks (new_version_node);
b5d36404 2240 return new_version_node;
2241}
469679ab 2242
ccf4ab6b 2243/* Given virtual clone, turn it into actual clone. */
2244static void
2245cgraph_materialize_clone (struct cgraph_node *node)
2246{
2247 bitmap_obstack_initialize (NULL);
e748b31d 2248 node->former_clone_of = node->clone_of->decl;
2249 if (node->clone_of->former_clone_of)
2250 node->former_clone_of = node->clone_of->former_clone_of;
ccf4ab6b 2251 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2252 tree_function_versioning (node->clone_of->decl, node->decl,
2253 node->clone.tree_map, true,
b06ab5fa 2254 node->clone.args_to_skip, NULL, NULL);
e20422ea 2255 if (cgraph_dump_file)
2256 {
2257 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2258 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2259 }
ccf4ab6b 2260
2261 /* Function is no longer clone. */
2262 if (node->next_sibling_clone)
2263 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2264 if (node->prev_sibling_clone)
2265 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2266 else
2267 node->clone_of->clones = node->next_sibling_clone;
2268 node->next_sibling_clone = NULL;
2269 node->prev_sibling_clone = NULL;
6d1cc52c 2270 if (!node->clone_of->analyzed && !node->clone_of->clones)
7d6a1ec8 2271 {
2272 cgraph_release_function_body (node->clone_of);
2273 cgraph_node_remove_callees (node->clone_of);
2274 ipa_remove_all_references (&node->clone_of->ref_list);
2275 }
ccf4ab6b 2276 node->clone_of = NULL;
2277 bitmap_obstack_release (NULL);
2278}
2279
c596d830 2280/* If necessary, change the function declaration in the call statement
2281 associated with E so that it corresponds to the edge callee. */
2282
2283gimple
2284cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2285{
2286 tree decl = gimple_call_fndecl (e->call_stmt);
2287 gimple new_stmt;
3fd0ca33 2288 gimple_stmt_iterator gsi;
2289 bool gsi_computed = false;
1f449108 2290#ifdef ENABLE_CHECKING
2291 struct cgraph_node *node;
2292#endif
c596d830 2293
1caef38b 2294 if (e->indirect_unknown_callee
2295 || decl == e->callee->decl
c596d830 2296 /* Don't update call from same body alias to the real function. */
1caef38b 2297 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
c596d830 2298 return e->call_stmt;
2299
1f449108 2300#ifdef ENABLE_CHECKING
1caef38b 2301 if (decl)
2302 {
2303 node = cgraph_get_node (decl);
2304 gcc_assert (!node || !node->clone.combined_args_to_skip);
2305 }
1f449108 2306#endif
e748b31d 2307
c596d830 2308 if (cgraph_dump_file)
2309 {
2310 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2311 cgraph_node_name (e->caller), e->caller->uid,
2312 cgraph_node_name (e->callee), e->callee->uid);
2313 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e748b31d 2314 if (e->callee->clone.combined_args_to_skip)
91aba934 2315 {
2316 fprintf (cgraph_dump_file, " combined args to skip: ");
2317 dump_bitmap (cgraph_dump_file,
2318 e->callee->clone.combined_args_to_skip);
e748b31d 2319 }
c596d830 2320 }
2321
9bab6a70 2322 if (e->indirect_info &&
2323 e->indirect_info->thunk_delta != 0
3fd0ca33 2324 && (!e->callee->clone.combined_args_to_skip
2325 || !bitmap_bit_p (e->callee->clone.combined_args_to_skip, 0)))
2326 {
2327 if (cgraph_dump_file)
9bab6a70 2328 fprintf (cgraph_dump_file, " Thunk delta is "
2329 HOST_WIDE_INT_PRINT_DEC "\n", e->indirect_info->thunk_delta);
3fd0ca33 2330 gsi = gsi_for_stmt (e->call_stmt);
2331 gsi_computed = true;
9bab6a70 2332 gimple_adjust_this_by_delta (&gsi,
2333 build_int_cst (sizetype,
2334 e->indirect_info->thunk_delta));
2335 e->indirect_info->thunk_delta = 0;
3fd0ca33 2336 }
2337
c596d830 2338 if (e->callee->clone.combined_args_to_skip)
91aba934 2339 {
092cd838 2340 int lp_nr;
91aba934 2341
2342 new_stmt
2343 = gimple_call_copy_skip_args (e->call_stmt,
2344 e->callee->clone.combined_args_to_skip);
75c7f5a5 2345 gimple_call_set_fndecl (new_stmt, e->callee->decl);
91aba934 2346
2347 if (gimple_vdef (new_stmt)
2348 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2349 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2350
3fd0ca33 2351 if (!gsi_computed)
2352 gsi = gsi_for_stmt (e->call_stmt);
9126b675 2353 gsi_replace (&gsi, new_stmt, false);
092cd838 2354 /* We need to defer cleaning EH info on the new statement to
2355 fixup-cfg. We may not have dominator information at this point
2356 and thus would end up with unreachable blocks and have no way
2357 to communicate that we need to run CFG cleanup then. */
2358 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2359 if (lp_nr != 0)
2360 {
2361 remove_stmt_from_eh_lp (e->call_stmt);
2362 add_stmt_to_eh_lp (new_stmt, lp_nr);
2363 }
91aba934 2364 }
c596d830 2365 else
75c7f5a5 2366 {
2367 new_stmt = e->call_stmt;
2368 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2369 update_stmt (new_stmt);
2370 }
c596d830 2371
c596d830 2372 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2373
2374 if (cgraph_dump_file)
2375 {
2376 fprintf (cgraph_dump_file, " updated to:");
2377 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2378 }
2379 return new_stmt;
2380}
2381
ccf4ab6b 2382/* Once all functions from compilation unit are in memory, produce all clones
c596d830 2383 and update all calls. We might also do this on demand if we don't want to
2384 bring all functions to memory prior compilation, but current WHOPR
2385 implementation does that and it is is bit easier to keep everything right in
2386 this order. */
ccf4ab6b 2387void
2388cgraph_materialize_all_clones (void)
2389{
2390 struct cgraph_node *node;
2391 bool stabilized = false;
2392
2393 if (cgraph_dump_file)
2394 fprintf (cgraph_dump_file, "Materializing clones\n");
2395#ifdef ENABLE_CHECKING
2396 verify_cgraph ();
2397#endif
2398
2399 /* We can also do topological order, but number of iterations should be
2400 bounded by number of IPA passes since single IPA pass is probably not
2401 going to create clones of clones it created itself. */
2402 while (!stabilized)
2403 {
2404 stabilized = true;
2405 for (node = cgraph_nodes; node; node = node->next)
2406 {
2407 if (node->clone_of && node->decl != node->clone_of->decl
2408 && !gimple_has_body_p (node->decl))
2409 {
2410 if (gimple_has_body_p (node->clone_of->decl))
2411 {
2412 if (cgraph_dump_file)
e20422ea 2413 {
0a10fd82 2414 fprintf (cgraph_dump_file, "cloning %s to %s\n",
e20422ea 2415 cgraph_node_name (node->clone_of),
2416 cgraph_node_name (node));
2417 if (node->clone.tree_map)
2418 {
2419 unsigned int i;
2420 fprintf (cgraph_dump_file, " replace map: ");
2421 for (i = 0; i < VEC_length (ipa_replace_map_p,
2422 node->clone.tree_map);
2423 i++)
2424 {
2425 struct ipa_replace_map *replace_info;
2426 replace_info = VEC_index (ipa_replace_map_p,
2427 node->clone.tree_map,
2428 i);
2429 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2430 fprintf (cgraph_dump_file, " -> ");
2431 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2432 fprintf (cgraph_dump_file, "%s%s;",
2433 replace_info->replace_p ? "(replace)":"",
2434 replace_info->ref_p ? "(ref)":"");
2435 }
2436 fprintf (cgraph_dump_file, "\n");
2437 }
2438 if (node->clone.args_to_skip)
2439 {
2440 fprintf (cgraph_dump_file, " args_to_skip: ");
2441 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2442 }
2443 if (node->clone.args_to_skip)
2444 {
2445 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2446 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2447 }
2448 }
ccf4ab6b 2449 cgraph_materialize_clone (node);
a510bd8d 2450 stabilized = false;
ccf4ab6b 2451 }
ccf4ab6b 2452 }
2453 }
2454 }
ee3f5fc0 2455 for (node = cgraph_nodes; node; node = node->next)
2456 if (!node->analyzed && node->callees)
2457 cgraph_node_remove_callees (node);
c596d830 2458 if (cgraph_dump_file)
2459 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
947781ac 2460#ifdef ENABLE_CHECKING
2461 verify_cgraph ();
2462#endif
ccf4ab6b 2463 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2464}
2465
a861fe52 2466#include "gt-cgraphunit.h"