]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
2011-07-19 Richard Guenther <rguenther@suse.de>
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
cd6bca02 1/* Callgraph based interprocedural optimizations.
aed6e608 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
ae01b312 4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
ae01b312 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
cd6bca02 23 few basic interprocedural optimizers.
b0cdf642 24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
b326746d 35 (There is one exception needed for implementing GCC extern inline
36 function.)
b0cdf642 37
1d416bd7 38 - varpool_finalize_variable
b0cdf642 39
7bd28bba 40 This function has same behavior as the above but is used for static
b0cdf642 41 variables.
42
43 - cgraph_finalize_compilation_unit
44
b326746d 45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
b0cdf642 47
851d9296 48 In the call-graph construction and local function analysis takes
49 place here. Bodies of unreachable functions are released to
50 conserve memory usage.
b0cdf642 51
b326746d 52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
b0cdf642 54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
b0cdf642 62 - cgraph_mark_needed_node
1d416bd7 63 - varpool_mark_needed_node
b0cdf642 64
b326746d 65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
ccd2f3d1 69 used by C++ frontend to explicitly mark the keyed methods.
b0cdf642 70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
6329636b 80 Analyzing of all functions is deferred
b0cdf642 81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
ca67a72b 91 The intra-procedural information is produced and its existence
b0cdf642 92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
6329636b 106 where reference has been optimized out. */
121f3051 107
acc70efa 108
ae01b312 109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
b5530559 114#include "rtl.h"
acc70efa 115#include "tree-flow.h"
ae01b312 116#include "tree-inline.h"
117#include "langhooks.h"
c6224531 118#include "pointer-set.h"
ae01b312 119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
80a85d8a 125#include "diagnostic.h"
ce084dfc 126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
f79b6507 128#include "timevar.h"
d7c6d889 129#include "params.h"
130#include "fibheap.h"
611e5405 131#include "intl.h"
b69eb0ff 132#include "function.h"
b5d36404 133#include "ipa-prop.h"
75a70cf9 134#include "gimple.h"
135#include "tree-iterator.h"
f1e2a033 136#include "tree-pass.h"
bfec3452 137#include "tree-dump.h"
c1dcd13c 138#include "output.h"
9ed5b1f5 139#include "coverage.h"
c9036234 140#include "plugin.h"
a41f2a28 141#include "ipa-inline.h"
7771d558 142#include "ipa-utils.h"
a0605d65 143#include "lto-streamer.h"
d7c6d889 144
a6868229 145static void cgraph_expand_all_functions (void);
d9d9733a 146static void cgraph_mark_functions_to_output (void);
147static void cgraph_expand_function (struct cgraph_node *);
f788fff2 148static void cgraph_output_pending_asms (void);
25bb88de 149
ecb08119 150FILE *cgraph_dump_file;
121f3051 151
28454517 152/* Used for vtable lookup in thunk adjusting. */
153static GTY (()) tree vtable_entry_type;
154
2c0b522d 155/* Determine if function DECL is needed. That is, visible to something
156 either outside this translation unit, something magic in the system
6329636b 157 configury. */
2c0b522d 158
7bfefa9d 159bool
160cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
2c0b522d 161{
3f82b628 162 /* If the user told us it is used, then it must be so. */
05806473 163 if (node->local.externally_visible)
164 return true;
165
3f82b628 166 /* ??? If the assembler name is set by hand, it is possible to assemble
167 the name later after finalizing the function and the fact is noticed
168 in assemble_name then. This is arguably a bug. */
169 if (DECL_ASSEMBLER_NAME_SET_P (decl)
c70f46b0 170 && (!node->thunk.thunk_p && !node->same_body_alias)
3f82b628 171 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
172 return true;
173
55680bef 174 /* With -fkeep-inline-functions we are keeping all inline functions except
175 for extern inline ones. */
176 if (flag_keep_inline_functions
177 && DECL_DECLARED_INLINE_P (decl)
316ef6d8 178 && !DECL_EXTERNAL (decl)
cbd7f5a0 179 && !DECL_DISREGARD_INLINE_LIMITS (decl))
55680bef 180 return true;
181
2c0b522d 182 /* If we decided it was needed before, but at the time we didn't have
183 the body of the function available, then it's still needed. We have
184 to go back and re-check its dependencies now. */
185 if (node->needed)
186 return true;
187
188 /* Externally visible functions must be output. The exception is
a0c938f0 189 COMDAT functions that must be output only when they are needed.
8baa9d15 190
191 When not optimizing, also output the static functions. (see
95da6220 192 PR24561), but don't do so for always_inline functions, functions
0f9238c0 193 declared inline and nested functions. These were optimized out
d3d410e1 194 in the original implementation and it is unclear whether we want
554f2707 195 to change the behavior here. */
bba7ddf8 196 if (((TREE_PUBLIC (decl)
0f9238c0 197 || (!optimize
cbd7f5a0 198 && !DECL_DISREGARD_INLINE_LIMITS (decl)
d3d410e1 199 && !DECL_DECLARED_INLINE_P (decl)
0f9238c0 200 && !(DECL_CONTEXT (decl)
201 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
59dd4830 202 && !flag_whole_program
cbcf2791 203 && !flag_lto)
62eec3b4 204 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 205 return true;
206
2c0b522d 207 return false;
208}
209
bdc40eb8 210/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
523c1122 211 functions into callgraph in a way so they look like ordinary reachable
212 functions inserted into callgraph already at construction time. */
213
214bool
215cgraph_process_new_functions (void)
216{
217 bool output = false;
218 tree fndecl;
219 struct cgraph_node *node;
220
0cddb138 221 varpool_analyze_pending_decls ();
523c1122 222 /* Note that this queue may grow as its being processed, as the new
223 functions may generate new ones. */
224 while (cgraph_new_nodes)
225 {
226 node = cgraph_new_nodes;
227 fndecl = node->decl;
228 cgraph_new_nodes = cgraph_new_nodes->next_needed;
229 switch (cgraph_state)
230 {
231 case CGRAPH_STATE_CONSTRUCTION:
232 /* At construction time we just need to finalize function and move
233 it into reachable functions list. */
234
235 node->next_needed = NULL;
236 cgraph_finalize_function (fndecl, false);
237 cgraph_mark_reachable_node (node);
238 output = true;
4f7a1122 239 cgraph_call_function_insertion_hooks (node);
523c1122 240 break;
241
242 case CGRAPH_STATE_IPA:
f517b36e 243 case CGRAPH_STATE_IPA_SSA:
523c1122 244 /* When IPA optimization already started, do all essential
245 transformations that has been already performed on the whole
246 cgraph but not on this function. */
247
75a70cf9 248 gimple_register_cfg_hooks ();
523c1122 249 if (!node->analyzed)
250 cgraph_analyze_function (node);
251 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
252 current_function_decl = fndecl;
f517b36e 253 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
254 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
255 /* When not optimizing, be sure we run early local passes anyway
256 to expand OMP. */
257 || !optimize)
20099e35 258 execute_pass_list (pass_early_local_passes.pass.sub);
649597af 259 else
a41f2a28 260 compute_inline_parameters (node, true);
523c1122 261 free_dominance_info (CDI_POST_DOMINATORS);
262 free_dominance_info (CDI_DOMINATORS);
263 pop_cfun ();
264 current_function_decl = NULL;
4f7a1122 265 cgraph_call_function_insertion_hooks (node);
523c1122 266 break;
267
268 case CGRAPH_STATE_EXPANSION:
269 /* Functions created during expansion shall be compiled
270 directly. */
09fc9532 271 node->process = 0;
4f7a1122 272 cgraph_call_function_insertion_hooks (node);
523c1122 273 cgraph_expand_function (node);
274 break;
275
276 default:
277 gcc_unreachable ();
278 break;
279 }
0cddb138 280 varpool_analyze_pending_decls ();
523c1122 281 }
282 return output;
283}
284
9b8fb23a 285/* As an GCC extension we allow redefinition of the function. The
286 semantics when both copies of bodies differ is not well defined.
287 We replace the old body with new body so in unit at a time mode
288 we always use new body, while in normal mode we may end up with
289 old body inlined into some functions and new body expanded and
290 inlined in others.
291
292 ??? It may make more sense to use one body for inlining and other
293 body for expanding the function but this is difficult to do. */
294
295static void
296cgraph_reset_node (struct cgraph_node *node)
297{
09fc9532 298 /* If node->process is set, then we have already begun whole-unit analysis.
6329636b 299 This is *not* testing for whether we've already emitted the function.
300 That case can be sort-of legitimately seen with real function redefinition
301 errors. I would argue that the front end should never present us with
302 such a case, but don't enforce that for now. */
09fc9532 303 gcc_assert (!node->process);
9b8fb23a 304
305 /* Reset our data structures so we can analyze the function again. */
306 memset (&node->local, 0, sizeof (node->local));
307 memset (&node->global, 0, sizeof (node->global));
308 memset (&node->rtl, 0, sizeof (node->rtl));
309 node->analyzed = false;
9b8fb23a 310 node->local.finalized = false;
311
9b8fb23a 312 cgraph_node_remove_callees (node);
9b8fb23a 313}
c08871a9 314
1e8e9920 315static void
316cgraph_lower_function (struct cgraph_node *node)
317{
318 if (node->lowered)
319 return;
bfec3452 320
321 if (node->nested)
322 lower_nested_functions (node->decl);
323 gcc_assert (!node->nested);
324
1e8e9920 325 tree_lowering_passes (node->decl);
326 node->lowered = true;
327}
328
28df663b 329/* DECL has been parsed. Take it, queue it, compile it at the whim of the
330 logic in effect. If NESTED is true, then our caller cannot stand to have
331 the garbage collector run at the moment. We would need to either create
332 a new GC context, or just not compile right now. */
ae01b312 333
334void
28df663b 335cgraph_finalize_function (tree decl, bool nested)
ae01b312 336{
5a90471f 337 struct cgraph_node *node = cgraph_get_create_node (decl);
ae01b312 338
c08871a9 339 if (node->local.finalized)
443089c1 340 {
341 cgraph_reset_node (node);
342 node->local.redefined_extern_inline = true;
343 }
28df663b 344
c08871a9 345 notice_global_symbol (decl);
79bb87b4 346 node->local.finalized = true;
e27482aa 347 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
ae01b312 348
7bfefa9d 349 if (cgraph_decide_is_function_needed (node, decl))
2c0b522d 350 cgraph_mark_needed_node (node);
351
ecda6e51 352 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 353 level unit, we need to be conservative about possible entry points
354 there. */
1e3aebec 355 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
356 || DECL_STATIC_CONSTRUCTOR (decl)
d050bafd 357 || DECL_STATIC_DESTRUCTOR (decl)
358 /* COMDAT virtual functions may be referenced by vtable from
0a10fd82 359 other compilation unit. Still we want to devirtualize calls
d050bafd 360 to those so we need to analyze them.
361 FIXME: We should introduce may edges for this purpose and update
362 their handling in unreachable function removal and inliner too. */
91bf9d9a 363 || (DECL_VIRTUAL_P (decl)
364 && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
3f82b628 365 cgraph_mark_reachable_node (node);
366
2c0b522d 367 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 368 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 369 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 370
b69eb0ff 371 /* Possibly warn about unused parameters. */
372 if (warn_unused_parameter)
373 do_warn_unused_parameter (decl);
6329636b 374
375 if (!nested)
376 ggc_collect ();
ae01b312 377}
378
0da03d11 379/* C99 extern inline keywords allow changing of declaration after function
380 has been finalized. We need to re-decide if we want to mark the function as
381 needed then. */
382
383void
384cgraph_mark_if_needed (tree decl)
385{
fd6a3c41 386 struct cgraph_node *node = cgraph_get_node (decl);
7bfefa9d 387 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
0da03d11 388 cgraph_mark_needed_node (node);
389}
390
ccf4ab6b 391/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
392static bool
393clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
394{
c70f46b0 395 node = cgraph_function_or_thunk_node (node, NULL);
396 node2 = cgraph_function_or_thunk_node (node2, NULL);
ccf4ab6b 397 while (node != node2 && node2)
398 node2 = node2->clone_of;
399 return node2 != NULL;
400}
401
1a036a3b 402/* Verify edge E count and frequency. */
403
404static bool
405verify_edge_count_and_frequency (struct cgraph_edge *e)
406{
407 bool error_found = false;
408 if (e->count < 0)
409 {
410 error ("caller edge count is negative");
411 error_found = true;
412 }
413 if (e->frequency < 0)
414 {
415 error ("caller edge frequency is negative");
416 error_found = true;
417 }
418 if (e->frequency > CGRAPH_FREQ_MAX)
419 {
420 error ("caller edge frequency is too large");
421 error_found = true;
422 }
423 if (gimple_has_body_p (e->caller->decl)
424 && !e->caller->global.inlined_to
8bae3ea4 425 /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
426 Remove this once edges are actualy removed from the function at that time. */
427 && (e->frequency
428 || (inline_edge_summary_vec
429 && !inline_edge_summary (e)->predicate))
1a036a3b 430 && (e->frequency
431 != compute_call_stmt_bb_frequency (e->caller->decl,
432 gimple_bb (e->call_stmt))))
433 {
0a10fd82 434 error ("caller edge frequency %i does not match BB frequency %i",
1a036a3b 435 e->frequency,
436 compute_call_stmt_bb_frequency (e->caller->decl,
437 gimple_bb (e->call_stmt)));
438 error_found = true;
439 }
440 return error_found;
441}
442
7b29dd2f 443/* Switch to THIS_CFUN if needed and print STMT to stderr. */
444static void
445cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
446{
447 /* debug_gimple_stmt needs correct cfun */
448 if (cfun != this_cfun)
449 set_cfun (this_cfun);
450 debug_gimple_stmt (stmt);
451}
452
2f9d66d3 453/* Verify that call graph edge E corresponds to DECL from the associated
454 statement. Return true if the verification should fail. */
455
456static bool
457verify_edge_corresponds_to_fndecl (struct cgraph_edge *e, tree decl)
458{
459 if (!e->callee->global.inlined_to
460 && decl
461 && cgraph_get_node (decl)
462 && (e->callee->former_clone_of
463 != cgraph_function_or_thunk_node (cgraph_get_node (decl), NULL)->decl)
464 /* IPA-CP sometimes redirect edge to clone and then back to the former
465 function. This ping-pong has to go, eventaully. */
466 && (cgraph_function_or_thunk_node (cgraph_get_node (decl), NULL)
467 != cgraph_function_or_thunk_node (e->callee, NULL))
468 && !clone_of_p (cgraph_get_node (decl),
469 e->callee))
470 return true;
471 else
472 return false;
473}
474
b0cdf642 475/* Verify cgraph nodes of given cgraph node. */
4b987fac 476DEBUG_FUNCTION void
b0cdf642 477verify_cgraph_node (struct cgraph_node *node)
478{
479 struct cgraph_edge *e;
e27482aa 480 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
481 basic_block this_block;
75a70cf9 482 gimple_stmt_iterator gsi;
9bfec7c2 483 bool error_found = false;
b0cdf642 484
852f689e 485 if (seen_error ())
bd09cd3e 486 return;
487
b0cdf642 488 timevar_push (TV_CGRAPH_VERIFY);
b0cdf642 489 for (e = node->callees; e; e = e->next_callee)
490 if (e->aux)
491 {
0a81f5a0 492 error ("aux field set for edge %s->%s",
abd3e6b5 493 identifier_to_locale (cgraph_node_name (e->caller)),
494 identifier_to_locale (cgraph_node_name (e->callee)));
b0cdf642 495 error_found = true;
496 }
a2cb9b3b 497 if (node->count < 0)
498 {
bf776685 499 error ("execution count is negative");
a2cb9b3b 500 error_found = true;
501 }
59dd4830 502 if (node->global.inlined_to && node->local.externally_visible)
503 {
bf776685 504 error ("externally visible inline clone");
59dd4830 505 error_found = true;
506 }
507 if (node->global.inlined_to && node->address_taken)
508 {
bf776685 509 error ("inline clone with address taken");
59dd4830 510 error_found = true;
511 }
512 if (node->global.inlined_to && node->needed)
513 {
bf776685 514 error ("inline clone is needed");
59dd4830 515 error_found = true;
516 }
799c8711 517 for (e = node->indirect_calls; e; e = e->next_callee)
518 {
519 if (e->aux)
520 {
521 error ("aux field set for indirect edge from %s",
522 identifier_to_locale (cgraph_node_name (e->caller)));
523 error_found = true;
524 }
525 if (!e->indirect_unknown_callee
526 || !e->indirect_info)
527 {
528 error ("An indirect edge from %s is not marked as indirect or has "
529 "associated indirect_info, the corresponding statement is: ",
530 identifier_to_locale (cgraph_node_name (e->caller)));
7b29dd2f 531 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
799c8711 532 error_found = true;
533 }
534 }
b0cdf642 535 for (e = node->callers; e; e = e->next_caller)
536 {
1a036a3b 537 if (verify_edge_count_and_frequency (e))
538 error_found = true;
b0cdf642 539 if (!e->inline_failed)
540 {
541 if (node->global.inlined_to
542 != (e->caller->global.inlined_to
543 ? e->caller->global.inlined_to : e->caller))
544 {
0a81f5a0 545 error ("inlined_to pointer is wrong");
b0cdf642 546 error_found = true;
547 }
548 if (node->callers->next_caller)
549 {
0a81f5a0 550 error ("multiple inline callers");
b0cdf642 551 error_found = true;
552 }
553 }
554 else
555 if (node->global.inlined_to)
556 {
0a81f5a0 557 error ("inlined_to pointer set for noninline callers");
b0cdf642 558 error_found = true;
559 }
560 }
1a036a3b 561 for (e = node->indirect_calls; e; e = e->next_callee)
562 if (verify_edge_count_and_frequency (e))
563 error_found = true;
b0cdf642 564 if (!node->callers && node->global.inlined_to)
565 {
5cd75817 566 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 567 error_found = true;
568 }
569 if (node->global.inlined_to == node)
570 {
0a81f5a0 571 error ("inlined_to pointer refers to itself");
b0cdf642 572 error_found = true;
573 }
574
7019fd3f 575 if (!cgraph_get_node (node->decl))
b0cdf642 576 {
0f6439b9 577 error ("node not found in cgraph_hash");
b0cdf642 578 error_found = true;
579 }
a0c938f0 580
ccf4ab6b 581 if (node->clone_of)
582 {
583 struct cgraph_node *n;
584 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
585 if (n == node)
586 break;
587 if (!n)
588 {
589 error ("node has wrong clone_of");
590 error_found = true;
591 }
592 }
593 if (node->clones)
594 {
595 struct cgraph_node *n;
596 for (n = node->clones; n; n = n->next_sibling_clone)
597 if (n->clone_of != node)
598 break;
599 if (n)
600 {
601 error ("node has wrong clone list");
602 error_found = true;
603 }
604 }
605 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
606 {
607 error ("node is in clone list but it is not clone");
608 error_found = true;
609 }
610 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
611 {
612 error ("node has wrong prev_clone pointer");
613 error_found = true;
614 }
615 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
616 {
617 error ("double linked list of clones corrupted");
618 error_found = true;
619 }
c524ac5d 620 if (node->same_comdat_group)
621 {
622 struct cgraph_node *n = node->same_comdat_group;
623
624 if (!DECL_ONE_ONLY (node->decl))
625 {
626 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
627 error_found = true;
628 }
629 if (n == node)
630 {
631 error ("node is alone in a comdat group");
632 error_found = true;
633 }
634 do
635 {
636 if (!n->same_comdat_group)
637 {
638 error ("same_comdat_group is not a circular list");
639 error_found = true;
640 break;
641 }
642 n = n->same_comdat_group;
643 }
644 while (n != node);
645 }
ccf4ab6b 646
c70f46b0 647 if (node->analyzed && node->alias)
648 {
649 bool ref_found = false;
650 int i;
651 struct ipa_ref *ref;
652
653 if (node->callees)
654 {
655 error ("Alias has call edges");
656 error_found = true;
657 }
658 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
659 if (ref->use != IPA_REF_ALIAS)
660 {
661 error ("Alias has non-alias refernece");
662 error_found = true;
663 }
664 else if (ref_found)
665 {
666 error ("Alias has more than one alias reference");
667 error_found = true;
668 }
669 else
670 ref_found = true;
671 if (!ref_found)
672 {
673 error ("Analyzed alias has no reference");
674 error_found = true;
675 }
676 }
91bf9d9a 677 if (node->analyzed && node->thunk.thunk_p)
678 {
679 if (!node->callees)
680 {
681 error ("No edge out of thunk node");
682 error_found = true;
683 }
684 else if (node->callees->next_callee)
685 {
686 error ("More than one edge out of thunk node");
687 error_found = true;
688 }
689 if (gimple_has_body_p (node->decl))
690 {
691 error ("Thunk is not supposed to have body");
692 error_found = true;
693 }
694 }
695 else if (node->analyzed && gimple_has_body_p (node->decl)
696 && !TREE_ASM_WRITTEN (node->decl)
697 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
698 && !flag_wpa)
b0cdf642 699 {
e27482aa 700 if (this_cfun->cfg)
701 {
702 /* The nodes we're interested in are never shared, so walk
703 the tree ignoring duplicates. */
e7c352d1 704 struct pointer_set_t *visited_nodes = pointer_set_create ();
e27482aa 705 /* Reach the trees by walking over the CFG, and note the
706 enclosing basic-blocks in the call edges. */
707 FOR_EACH_BB_FN (this_block, this_cfun)
75a70cf9 708 for (gsi = gsi_start_bb (this_block);
709 !gsi_end_p (gsi);
710 gsi_next (&gsi))
9bfec7c2 711 {
75a70cf9 712 gimple stmt = gsi_stmt (gsi);
799c8711 713 if (is_gimple_call (stmt))
9bfec7c2 714 {
715 struct cgraph_edge *e = cgraph_edge (node, stmt);
799c8711 716 tree decl = gimple_call_fndecl (stmt);
9bfec7c2 717 if (e)
718 {
719 if (e->aux)
720 {
0a81f5a0 721 error ("shared call_stmt:");
7b29dd2f 722 cgraph_debug_gimple_stmt (this_cfun, stmt);
9bfec7c2 723 error_found = true;
724 }
799c8711 725 if (!e->indirect_unknown_callee)
28454517 726 {
2f9d66d3 727 if (verify_edge_corresponds_to_fndecl (e, decl))
799c8711 728 {
729 error ("edge points to wrong declaration:");
730 debug_tree (e->callee->decl);
731 fprintf (stderr," Instead of:");
732 debug_tree (decl);
733 error_found = true;
734 }
28454517 735 }
799c8711 736 else if (decl)
9bfec7c2 737 {
799c8711 738 error ("an indirect edge with unknown callee "
739 "corresponding to a call_stmt with "
740 "a known declaration:");
ee3f5fc0 741 error_found = true;
7b29dd2f 742 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
9bfec7c2 743 }
744 e->aux = (void *)1;
745 }
799c8711 746 else if (decl)
9bfec7c2 747 {
0a81f5a0 748 error ("missing callgraph edge for call stmt:");
7b29dd2f 749 cgraph_debug_gimple_stmt (this_cfun, stmt);
9bfec7c2 750 error_found = true;
751 }
752 }
753 }
e27482aa 754 pointer_set_destroy (visited_nodes);
e27482aa 755 }
756 else
757 /* No CFG available?! */
758 gcc_unreachable ();
759
b0cdf642 760 for (e = node->callees; e; e = e->next_callee)
761 {
799c8711 762 if (!e->aux)
b0cdf642 763 {
0a81f5a0 764 error ("edge %s->%s has no corresponding call_stmt",
abd3e6b5 765 identifier_to_locale (cgraph_node_name (e->caller)),
766 identifier_to_locale (cgraph_node_name (e->callee)));
7b29dd2f 767 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
b0cdf642 768 error_found = true;
769 }
770 e->aux = 0;
771 }
799c8711 772 for (e = node->indirect_calls; e; e = e->next_callee)
773 {
774 if (!e->aux)
775 {
776 error ("an indirect edge from %s has no corresponding call_stmt",
777 identifier_to_locale (cgraph_node_name (e->caller)));
7b29dd2f 778 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
799c8711 779 error_found = true;
780 }
781 e->aux = 0;
782 }
b0cdf642 783 }
784 if (error_found)
785 {
786 dump_cgraph_node (stderr, node);
0a81f5a0 787 internal_error ("verify_cgraph_node failed");
b0cdf642 788 }
789 timevar_pop (TV_CGRAPH_VERIFY);
790}
791
792/* Verify whole cgraph structure. */
4b987fac 793DEBUG_FUNCTION void
b0cdf642 794verify_cgraph (void)
795{
796 struct cgraph_node *node;
797
852f689e 798 if (seen_error ())
8ec2a798 799 return;
800
b0cdf642 801 for (node = cgraph_nodes; node; node = node->next)
802 verify_cgraph_node (node);
803}
804
56af936e 805/* Output all asm statements we have stored up to be output. */
806
807static void
808cgraph_output_pending_asms (void)
809{
810 struct cgraph_asm_node *can;
811
852f689e 812 if (seen_error ())
56af936e 813 return;
814
815 for (can = cgraph_asm_nodes; can; can = can->next)
816 assemble_asm (can->asm_str);
817 cgraph_asm_nodes = NULL;
818}
819
0785e435 820/* Analyze the function scheduled to be output. */
222bc9b9 821void
0785e435 822cgraph_analyze_function (struct cgraph_node *node)
823{
bfec3452 824 tree save = current_function_decl;
0785e435 825 tree decl = node->decl;
826
c70f46b0 827 if (node->alias && node->thunk.alias)
828 {
829 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
830 if (!VEC_length (ipa_ref_t, node->ref_list.references))
831 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
832 if (node->same_body_alias)
833 {
834 DECL_VIRTUAL_P (node->decl) = DECL_VIRTUAL_P (node->thunk.alias);
835 DECL_DECLARED_INLINE_P (node->decl)
836 = DECL_DECLARED_INLINE_P (node->thunk.alias);
837 DECL_DISREGARD_INLINE_LIMITS (node->decl)
838 = DECL_DISREGARD_INLINE_LIMITS (node->thunk.alias);
839 }
840
841 /* Fixup visibility nonsences C++ frontend produce on same body aliases. */
842 if (TREE_PUBLIC (node->decl) && node->same_body_alias)
843 {
844 DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (node->thunk.alias);
89bf5ca9 845 if (DECL_ONE_ONLY (node->thunk.alias))
c70f46b0 846 {
89bf5ca9 847 DECL_COMDAT (node->decl) = DECL_COMDAT (node->thunk.alias);
c70f46b0 848 DECL_COMDAT_GROUP (node->decl) = DECL_COMDAT_GROUP (node->thunk.alias);
849 if (DECL_ONE_ONLY (node->thunk.alias) && !node->same_comdat_group)
850 {
851 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
852 node->same_comdat_group = tgt;
853 if (!tgt->same_comdat_group)
854 tgt->same_comdat_group = node;
855 else
856 {
857 struct cgraph_node *n;
858 for (n = tgt->same_comdat_group;
859 n->same_comdat_group != tgt;
860 n = n->same_comdat_group)
861 ;
862 n->same_comdat_group = node;
863 }
864 }
865 }
866 }
867 cgraph_mark_reachable_node (cgraph_alias_aliased_node (node));
868 if (node->address_taken)
869 cgraph_mark_address_taken_node (cgraph_alias_aliased_node (node));
870 if (cgraph_decide_is_function_needed (node, node->decl))
871 cgraph_mark_needed_node (node);
872 }
873 else if (node->thunk.thunk_p)
91bf9d9a 874 {
875 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
876 NULL, 0, CGRAPH_FREQ_BASE);
877 }
878 else
879 {
880 current_function_decl = decl;
881 push_cfun (DECL_STRUCT_FUNCTION (decl));
bfec3452 882
91bf9d9a 883 assign_assembler_name_if_neeeded (node->decl);
6816d0c4 884
91bf9d9a 885 /* Make sure to gimplify bodies only once. During analyzing a
886 function we lower it, which will require gimplified nested
887 functions, so we can end up here with an already gimplified
888 body. */
889 if (!gimple_body (decl))
890 gimplify_function_tree (decl);
891 dump_function (TDI_generic, decl);
bfec3452 892
91bf9d9a 893 cgraph_lower_function (node);
894 pop_cfun ();
895 }
6e8d6e86 896 node->analyzed = true;
0785e435 897
bfec3452 898 current_function_decl = save;
0785e435 899}
900
c70f46b0 901/* C++ frontend produce same body aliases all over the place, even before PCH
902 gets streamed out. It relies on us linking the aliases with their function
903 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
904 first produce aliases without links, but once C++ FE is sure he won't sream
905 PCH we build the links via this function. */
906
907void
908cgraph_process_same_body_aliases (void)
909{
910 struct cgraph_node *node;
911 for (node = cgraph_nodes; node; node = node->next)
912 if (node->same_body_alias
913 && !VEC_length (ipa_ref_t, node->ref_list.references))
914 {
915 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
916 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
917 }
918 same_body_aliases_done = true;
919}
920
d05db70d 921/* Process attributes common for vars and functions. */
922
923static void
924process_common_attributes (tree decl)
925{
926 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
927
928 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
929 {
930 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
931 "%<weakref%> attribute should be accompanied with"
932 " an %<alias%> attribute");
933 DECL_WEAK (decl) = 0;
40b32d93 934 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
935 DECL_ATTRIBUTES (decl));
d05db70d 936 }
937}
938
05806473 939/* Look for externally_visible and used attributes and mark cgraph nodes
940 accordingly.
941
942 We cannot mark the nodes at the point the attributes are processed (in
943 handle_*_attribute) because the copy of the declarations available at that
944 point may not be canonical. For example, in:
945
946 void f();
947 void f() __attribute__((used));
948
949 the declaration we see in handle_used_attribute will be the second
950 declaration -- but the front end will subsequently merge that declaration
951 with the original declaration and discard the second declaration.
952
953 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
954
955 void f() {}
956 void f() __attribute__((externally_visible));
957
958 is valid.
959
960 So, we walk the nodes at the end of the translation unit, applying the
961 attributes at that point. */
962
963static void
964process_function_and_variable_attributes (struct cgraph_node *first,
1d416bd7 965 struct varpool_node *first_var)
05806473 966{
967 struct cgraph_node *node;
1d416bd7 968 struct varpool_node *vnode;
05806473 969
970 for (node = cgraph_nodes; node != first; node = node->next)
971 {
972 tree decl = node->decl;
83a23b05 973 if (DECL_PRESERVE_P (decl))
0b49f8f8 974 cgraph_mark_needed_node (node);
62433d51 975 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
976 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
977 && TREE_PUBLIC (node->decl))
978 {
979 if (node->local.finalized)
980 cgraph_mark_needed_node (node);
981 }
982 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
05806473 983 {
ba12ea31 984 if (! TREE_PUBLIC (node->decl))
712d2297 985 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
986 "%<externally_visible%>"
987 " attribute have effect only on public objects");
59dd4830 988 else if (node->local.finalized)
989 cgraph_mark_needed_node (node);
05806473 990 }
40b32d93 991 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
c70f46b0 992 && (node->local.finalized && !node->alias))
40b32d93 993 {
994 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
995 "%<weakref%> attribute ignored"
996 " because function is defined");
997 DECL_WEAK (decl) = 0;
998 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
999 DECL_ATTRIBUTES (decl));
1000 }
a522e9eb 1001
1002 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
1003 && !DECL_DECLARED_INLINE_P (decl)
1004 /* redefining extern inline function makes it DECL_UNINLINABLE. */
1005 && !DECL_UNINLINABLE (decl))
1006 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
1007 "always_inline function might not be inlinable");
1008
d05db70d 1009 process_common_attributes (decl);
05806473 1010 }
1d416bd7 1011 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
05806473 1012 {
1013 tree decl = vnode->decl;
83a23b05 1014 if (DECL_PRESERVE_P (decl))
05806473 1015 {
22671757 1016 vnode->force_output = true;
05806473 1017 if (vnode->finalized)
1d416bd7 1018 varpool_mark_needed_node (vnode);
05806473 1019 }
62433d51 1020 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
1021 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
0d9d5d69 1022 && TREE_PUBLIC (vnode->decl))
62433d51 1023 {
1024 if (vnode->finalized)
1025 varpool_mark_needed_node (vnode);
1026 }
1027 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
05806473 1028 {
ba12ea31 1029 if (! TREE_PUBLIC (vnode->decl))
712d2297 1030 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1031 "%<externally_visible%>"
1032 " attribute have effect only on public objects");
59dd4830 1033 else if (vnode->finalized)
1034 varpool_mark_needed_node (vnode);
05806473 1035 }
40b32d93 1036 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
1037 && vnode->finalized
1038 && DECL_INITIAL (decl))
1039 {
1040 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1041 "%<weakref%> attribute ignored"
1042 " because variable is initialized");
1043 DECL_WEAK (decl) = 0;
1044 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1045 DECL_ATTRIBUTES (decl));
1046 }
d05db70d 1047 process_common_attributes (decl);
05806473 1048 }
1049}
1050
aeeb194b 1051/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1052 each reachable functions) and build cgraph.
1053 The function can be called multiple times after inserting new nodes
0d424440 1054 into beginning of queue. Just the new part of queue is re-scanned then. */
ae01b312 1055
aeeb194b 1056static void
1057cgraph_analyze_functions (void)
ae01b312 1058{
c1dcd13c 1059 /* Keep track of already processed nodes when called multiple times for
06b27565 1060 intermodule optimization. */
c1dcd13c 1061 static struct cgraph_node *first_analyzed;
c17d0de1 1062 struct cgraph_node *first_processed = first_analyzed;
1d416bd7 1063 static struct varpool_node *first_analyzed_var;
aeeb194b 1064 struct cgraph_node *node, *next;
ae01b312 1065
f1c35659 1066 bitmap_obstack_initialize (NULL);
c17d0de1 1067 process_function_and_variable_attributes (first_processed,
1068 first_analyzed_var);
1069 first_processed = cgraph_nodes;
1d416bd7 1070 first_analyzed_var = varpool_nodes;
1071 varpool_analyze_pending_decls ();
f79b6507 1072 if (cgraph_dump_file)
ae01b312 1073 {
e4200070 1074 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 1075 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1076 if (node->needed)
f79b6507 1077 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1078 fprintf (cgraph_dump_file, "\n");
ae01b312 1079 }
aeeb194b 1080 cgraph_process_new_functions ();
ae01b312 1081
e6d2b2d8 1082 /* Propagate reachability flag and lower representation of all reachable
1083 functions. In the future, lowering will introduce new functions and
1084 new entry points on the way (by template instantiation and virtual
1085 method table generation for instance). */
3d7bfc56 1086 while (cgraph_nodes_queue)
ae01b312 1087 {
0785e435 1088 struct cgraph_edge *edge;
3d7bfc56 1089 tree decl = cgraph_nodes_queue->decl;
1090
1091 node = cgraph_nodes_queue;
d87976fb 1092 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 1093 node->next_needed = NULL;
ae01b312 1094
638531ad 1095 /* ??? It is possible to create extern inline function and later using
bbd5cba2 1096 weak alias attribute to kill its body. See
638531ad 1097 gcc.c-torture/compile/20011119-1.c */
91bf9d9a 1098 if (!DECL_STRUCT_FUNCTION (decl)
c70f46b0 1099 && (!node->alias || !node->thunk.alias)
91bf9d9a 1100 && !node->thunk.thunk_p)
9b8fb23a 1101 {
1102 cgraph_reset_node (node);
443089c1 1103 node->local.redefined_extern_inline = true;
9b8fb23a 1104 continue;
1105 }
638531ad 1106
7bfefa9d 1107 if (!node->analyzed)
1108 cgraph_analyze_function (node);
2c0b522d 1109
ae01b312 1110 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 1111 if (!edge->callee->reachable)
2c0b522d 1112 cgraph_mark_reachable_node (edge->callee);
91bf9d9a 1113 for (edge = node->callers; edge; edge = edge->next_caller)
1114 if (!edge->caller->reachable && edge->caller->thunk.thunk_p)
1115 cgraph_mark_reachable_node (edge->caller);
2c0b522d 1116
61c2c7b1 1117 if (node->same_comdat_group)
1118 {
1119 for (next = node->same_comdat_group;
1120 next != node;
1121 next = next->same_comdat_group)
1122 cgraph_mark_reachable_node (next);
1123 }
1124
d544ceff 1125 /* If decl is a clone of an abstract function, mark that abstract
1126 function so that we don't release its body. The DECL_INITIAL() of that
fd6a3c41 1127 abstract function declaration will be later needed to output debug
1128 info. */
d544ceff 1129 if (DECL_ABSTRACT_ORIGIN (decl))
1130 {
fd6a3c41 1131 struct cgraph_node *origin_node;
1132 origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
d544ceff 1133 origin_node->abstract_and_needed = true;
1134 }
1135
c17d0de1 1136 /* We finalize local static variables during constructing callgraph
1137 edges. Process their attributes too. */
1138 process_function_and_variable_attributes (first_processed,
1139 first_analyzed_var);
1140 first_processed = cgraph_nodes;
1d416bd7 1141 first_analyzed_var = varpool_nodes;
1142 varpool_analyze_pending_decls ();
aeeb194b 1143 cgraph_process_new_functions ();
ae01b312 1144 }
2c0b522d 1145
aa5e06c7 1146 /* Collect entry points to the unit. */
f79b6507 1147 if (cgraph_dump_file)
3d7bfc56 1148 {
e4200070 1149 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 1150 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1151 if (node->needed)
f79b6507 1152 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 1153 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 1154 dump_cgraph (cgraph_dump_file);
7410370b 1155 dump_varpool (cgraph_dump_file);
3d7bfc56 1156 }
e6d2b2d8 1157
f79b6507 1158 if (cgraph_dump_file)
1159 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 1160
f4ec5ce1 1161 for (node = cgraph_nodes; node != first_analyzed; node = next)
ae01b312 1162 {
1163 tree decl = node->decl;
f4ec5ce1 1164 next = node->next;
ae01b312 1165
91bf9d9a 1166 if (node->local.finalized && !gimple_has_body_p (decl)
c70f46b0 1167 && (!node->alias || !node->thunk.alias)
91bf9d9a 1168 && !node->thunk.thunk_p)
a0c938f0 1169 cgraph_reset_node (node);
9b8fb23a 1170
91bf9d9a 1171 if (!node->reachable
c70f46b0 1172 && (gimple_has_body_p (decl) || node->thunk.thunk_p
1173 || (node->alias && node->thunk.alias)))
ae01b312 1174 {
f79b6507 1175 if (cgraph_dump_file)
1176 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 1177 cgraph_remove_node (node);
9b8fb23a 1178 continue;
ae01b312 1179 }
bc5cab3b 1180 else
1181 node->next_needed = NULL;
91bf9d9a 1182 gcc_assert (!node->local.finalized || node->thunk.thunk_p
c70f46b0 1183 || node->alias
91bf9d9a 1184 || gimple_has_body_p (decl));
9b8fb23a 1185 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 1186 }
f79b6507 1187 if (cgraph_dump_file)
e4200070 1188 {
1189 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1190 dump_cgraph (cgraph_dump_file);
7410370b 1191 dump_varpool (cgraph_dump_file);
e4200070 1192 }
f1c35659 1193 bitmap_obstack_release (NULL);
c1dcd13c 1194 first_analyzed = cgraph_nodes;
ae01b312 1195 ggc_collect ();
aeeb194b 1196}
1197
3a849bc1 1198/* Translate the ugly representation of aliases as alias pairs into nice
1199 representation in callgraph. We don't handle all cases yet,
1200 unforutnately. */
1201
1202static void
1203handle_alias_pairs (void)
1204{
1205 alias_pair *p;
1206 unsigned i;
1207 struct cgraph_node *target_node;
1208 struct cgraph_node *src_node;
e0eaac80 1209 struct varpool_node *target_vnode;
3a849bc1 1210
1211 for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p);)
1212 {
1213 if (TREE_CODE (p->decl) == FUNCTION_DECL
1214 && !lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl))
1215 && (target_node = cgraph_node_for_asm (p->target)) != NULL)
1216 {
1217 src_node = cgraph_get_node (p->decl);
1218 if (src_node && src_node->local.finalized)
1219 cgraph_reset_node (src_node);
1220 /* Normally EXTERNAL flag is used to mark external inlines,
1221 however for aliases it seems to be allowed to use it w/o
1222 any meaning. See gcc.dg/attr-alias-3.c
1223 However for weakref we insist on EXTERNAL flag being set.
1224 See gcc.dg/attr-alias-5.c */
1225 if (DECL_EXTERNAL (p->decl))
1226 DECL_EXTERNAL (p->decl) = 0;
1227 cgraph_create_function_alias (p->decl, target_node->decl);
1228 VEC_unordered_remove (alias_pair, alias_pairs, i);
1229 }
e0eaac80 1230 else if (TREE_CODE (p->decl) == VAR_DECL
1231 && !lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl))
1232 && (target_vnode = varpool_node_for_asm (p->target)) != NULL)
1233 {
1234 /* Normally EXTERNAL flag is used to mark external inlines,
1235 however for aliases it seems to be allowed to use it w/o
1236 any meaning. See gcc.dg/attr-alias-3.c
1237 However for weakref we insist on EXTERNAL flag being set.
1238 See gcc.dg/attr-alias-5.c */
1239 if (DECL_EXTERNAL (p->decl))
1240 DECL_EXTERNAL (p->decl) = 0;
1241 varpool_create_variable_alias (p->decl, target_vnode->decl);
1242 VEC_unordered_remove (alias_pair, alias_pairs, i);
1243 }
3a849bc1 1244 else
1245 {
1246 if (dump_file)
1247 fprintf (dump_file, "Unhandled alias %s->%s\n",
1248 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
1249 IDENTIFIER_POINTER (p->target));
1250
1251 i++;
1252 }
1253 }
1254}
1255
8f69fd82 1256
aeeb194b 1257/* Analyze the whole compilation unit once it is parsed completely. */
1258
1259void
1260cgraph_finalize_compilation_unit (void)
1261{
9929334e 1262 timevar_push (TV_CGRAPH);
1263
a0605d65 1264 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
1265 if (flag_lto)
1266 lto_streamer_hooks_init ();
1267
bb903e9c 1268 /* If we're here there's no current function anymore. Some frontends
1269 are lazy in clearing these. */
1270 current_function_decl = NULL;
1271 set_cfun (NULL);
1272
bfec3452 1273 /* Do not skip analyzing the functions if there were errors, we
1274 miss diagnostics for following functions otherwise. */
aeeb194b 1275
8f69fd82 1276 /* Emit size functions we didn't inline. */
4189e677 1277 finalize_size_functions ();
8f69fd82 1278
9929334e 1279 /* Mark alias targets necessary and emit diagnostics. */
1280 finish_aliases_1 ();
3a849bc1 1281 handle_alias_pairs ();
9929334e 1282
aeeb194b 1283 if (!quiet_flag)
1284 {
1285 fprintf (stderr, "\nAnalyzing compilation unit\n");
1286 fflush (stderr);
1287 }
1288
ec4791a8 1289 if (flag_dump_passes)
1290 dump_passes ();
1291
9929334e 1292 /* Gimplify and lower all functions, compute reachability and
1293 remove unreachable nodes. */
1294 cgraph_analyze_functions ();
1295
8f69fd82 1296 /* Mark alias targets necessary and emit diagnostics. */
1297 finish_aliases_1 ();
3a849bc1 1298 handle_alias_pairs ();
8f69fd82 1299
9929334e 1300 /* Gimplify and lower thunks. */
aeeb194b 1301 cgraph_analyze_functions ();
bfec3452 1302
9929334e 1303 /* Finally drive the pass manager. */
bfec3452 1304 cgraph_optimize ();
9929334e 1305
1306 timevar_pop (TV_CGRAPH);
ae01b312 1307}
9ed5b1f5 1308
1309
ae01b312 1310/* Figure out what functions we want to assemble. */
1311
1312static void
d9d9733a 1313cgraph_mark_functions_to_output (void)
ae01b312 1314{
1315 struct cgraph_node *node;
61c2c7b1 1316#ifdef ENABLE_CHECKING
1317 bool check_same_comdat_groups = false;
1318
1319 for (node = cgraph_nodes; node; node = node->next)
1320 gcc_assert (!node->process);
1321#endif
ae01b312 1322
ae01b312 1323 for (node = cgraph_nodes; node; node = node->next)
1324 {
1325 tree decl = node->decl;
d7c6d889 1326 struct cgraph_edge *e;
a0c938f0 1327
61c2c7b1 1328 gcc_assert (!node->process || node->same_comdat_group);
1329 if (node->process)
1330 continue;
d7c6d889 1331
1332 for (e = node->callers; e; e = e->next_caller)
611e5405 1333 if (e->inline_failed)
d7c6d889 1334 break;
ae01b312 1335
e6d2b2d8 1336 /* We need to output all local functions that are used and not
1337 always inlined, as well as those that are reachable from
1338 outside the current compilation unit. */
1a1a827a 1339 if (node->analyzed
91bf9d9a 1340 && !node->thunk.thunk_p
c70f46b0 1341 && !node->alias
b0cdf642 1342 && !node->global.inlined_to
1e3aebec 1343 && (!cgraph_only_called_directly_p (node)
c70f46b0 1344 || ((e || ipa_ref_has_aliases_p (&node->ref_list))
1345 && node->reachable))
4ee9c684 1346 && !TREE_ASM_WRITTEN (decl)
ae01b312 1347 && !DECL_EXTERNAL (decl))
61c2c7b1 1348 {
1349 node->process = 1;
1350 if (node->same_comdat_group)
1351 {
1352 struct cgraph_node *next;
1353 for (next = node->same_comdat_group;
1354 next != node;
1355 next = next->same_comdat_group)
c70f46b0 1356 if (!next->thunk.thunk_p && !next->alias)
91bf9d9a 1357 next->process = 1;
61c2c7b1 1358 }
1359 }
1360 else if (node->same_comdat_group)
1361 {
1362#ifdef ENABLE_CHECKING
1363 check_same_comdat_groups = true;
1364#endif
1365 }
cc636d56 1366 else
9cee7c3f 1367 {
1368 /* We should've reclaimed all functions that are not needed. */
1369#ifdef ENABLE_CHECKING
75a70cf9 1370 if (!node->global.inlined_to
1a1a827a 1371 && gimple_has_body_p (decl)
08843223 1372 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1373 are inside partition, we can end up not removing the body since we no longer
1374 have analyzed node pointing to it. */
1375 && !node->in_other_partition
c70f46b0 1376 && !node->alias
9cee7c3f 1377 && !DECL_EXTERNAL (decl))
1378 {
1379 dump_cgraph_node (stderr, node);
1380 internal_error ("failed to reclaim unneeded function");
1381 }
1382#endif
75a70cf9 1383 gcc_assert (node->global.inlined_to
1a1a827a 1384 || !gimple_has_body_p (decl)
08843223 1385 || node->in_other_partition
9cee7c3f 1386 || DECL_EXTERNAL (decl));
1387
1388 }
a0c938f0 1389
961e3b13 1390 }
61c2c7b1 1391#ifdef ENABLE_CHECKING
1392 if (check_same_comdat_groups)
1393 for (node = cgraph_nodes; node; node = node->next)
1394 if (node->same_comdat_group && !node->process)
1395 {
1396 tree decl = node->decl;
1397 if (!node->global.inlined_to
1398 && gimple_has_body_p (decl)
08843223 1399 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1400 are inside partition, we can end up not removing the body since we no longer
1401 have analyzed node pointing to it. */
1402 && !node->in_other_partition
61c2c7b1 1403 && !DECL_EXTERNAL (decl))
1404 {
1405 dump_cgraph_node (stderr, node);
c70f46b0 1406 internal_error ("failed to reclaim unneeded functionin same comdat group");
61c2c7b1 1407 }
1408 }
1409#endif
961e3b13 1410}
1411
28454517 1412/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1413 in lowered gimple form.
1414
1415 Set current_function_decl and cfun to newly constructed empty function body.
1416 return basic block in the function body. */
1417
1418static basic_block
1419init_lowered_empty_function (tree decl)
1420{
1421 basic_block bb;
1422
1423 current_function_decl = decl;
1424 allocate_struct_function (decl, false);
1425 gimple_register_cfg_hooks ();
1426 init_empty_tree_cfg ();
1427 init_tree_ssa (cfun);
1428 init_ssa_operands ();
1429 cfun->gimple_df->in_ssa_p = true;
1430 DECL_INITIAL (decl) = make_node (BLOCK);
1431
1432 DECL_SAVED_TREE (decl) = error_mark_node;
1433 cfun->curr_properties |=
1434 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
657e3a56 1435 PROP_ssa | PROP_gimple_any);
28454517 1436
1437 /* Create BB for body of the function and connect it properly. */
1438 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1439 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1440 make_edge (bb, EXIT_BLOCK_PTR, 0);
1441
1442 return bb;
1443}
1444
1445/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1446 offset indicated by VIRTUAL_OFFSET, if that is
1447 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1448 zero for a result adjusting thunk. */
1449
1450static tree
1451thunk_adjust (gimple_stmt_iterator * bsi,
1452 tree ptr, bool this_adjusting,
1453 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1454{
1455 gimple stmt;
1456 tree ret;
1457
55d6cb23 1458 if (this_adjusting
1459 && fixed_offset != 0)
28454517 1460 {
1461 stmt = gimple_build_assign (ptr,
1462 fold_build2_loc (input_location,
1463 POINTER_PLUS_EXPR,
1464 TREE_TYPE (ptr), ptr,
1465 size_int (fixed_offset)));
1466 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1467 }
1468
1469 /* If there's a virtual offset, look up that value in the vtable and
1470 adjust the pointer again. */
1471 if (virtual_offset)
1472 {
1473 tree vtabletmp;
1474 tree vtabletmp2;
1475 tree vtabletmp3;
1476 tree offsettmp;
1477
1478 if (!vtable_entry_type)
1479 {
1480 tree vfunc_type = make_node (FUNCTION_TYPE);
1481 TREE_TYPE (vfunc_type) = integer_type_node;
1482 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1483 layout_type (vfunc_type);
1484
1485 vtable_entry_type = build_pointer_type (vfunc_type);
1486 }
1487
1488 vtabletmp =
1489 create_tmp_var (build_pointer_type
1490 (build_pointer_type (vtable_entry_type)), "vptr");
1491
1492 /* The vptr is always at offset zero in the object. */
1493 stmt = gimple_build_assign (vtabletmp,
1494 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1495 ptr));
1496 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1497 mark_symbols_for_renaming (stmt);
1498 find_referenced_vars_in (stmt);
1499
1500 /* Form the vtable address. */
1501 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1502 "vtableaddr");
1503 stmt = gimple_build_assign (vtabletmp2,
182cf5a9 1504 build_simple_mem_ref (vtabletmp));
28454517 1505 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1506 mark_symbols_for_renaming (stmt);
1507 find_referenced_vars_in (stmt);
1508
1509 /* Find the entry with the vcall offset. */
1510 stmt = gimple_build_assign (vtabletmp2,
1511 fold_build2_loc (input_location,
1512 POINTER_PLUS_EXPR,
1513 TREE_TYPE (vtabletmp2),
1514 vtabletmp2,
1515 fold_convert (sizetype,
1516 virtual_offset)));
1517 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1518
1519 /* Get the offset itself. */
1520 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1521 "vcalloffset");
1522 stmt = gimple_build_assign (vtabletmp3,
182cf5a9 1523 build_simple_mem_ref (vtabletmp2));
28454517 1524 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1525 mark_symbols_for_renaming (stmt);
1526 find_referenced_vars_in (stmt);
1527
1528 /* Cast to sizetype. */
1529 offsettmp = create_tmp_var (sizetype, "offset");
1530 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1531 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1532 mark_symbols_for_renaming (stmt);
1533 find_referenced_vars_in (stmt);
1534
1535 /* Adjust the `this' pointer. */
1536 ptr = fold_build2_loc (input_location,
1537 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1538 offsettmp);
1539 }
1540
55d6cb23 1541 if (!this_adjusting
1542 && fixed_offset != 0)
28454517 1543 /* Adjust the pointer by the constant. */
1544 {
1545 tree ptrtmp;
1546
1547 if (TREE_CODE (ptr) == VAR_DECL)
1548 ptrtmp = ptr;
1549 else
1550 {
1551 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1552 stmt = gimple_build_assign (ptrtmp, ptr);
1553 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1554 mark_symbols_for_renaming (stmt);
1555 find_referenced_vars_in (stmt);
1556 }
1557 ptr = fold_build2_loc (input_location,
1558 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1559 size_int (fixed_offset));
1560 }
1561
1562 /* Emit the statement and gimplify the adjustment expression. */
1563 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1564 stmt = gimple_build_assign (ret, ptr);
1565 mark_symbols_for_renaming (stmt);
1566 find_referenced_vars_in (stmt);
1567 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1568
1569 return ret;
1570}
1571
1572/* Produce assembler for thunk NODE. */
1573
1574static void
1575assemble_thunk (struct cgraph_node *node)
1576{
1577 bool this_adjusting = node->thunk.this_adjusting;
1578 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1579 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1580 tree virtual_offset = NULL;
1581 tree alias = node->thunk.alias;
1582 tree thunk_fndecl = node->decl;
1583 tree a = DECL_ARGUMENTS (thunk_fndecl);
1584
1585 current_function_decl = thunk_fndecl;
1586
aed6e608 1587 /* Ensure thunks are emitted in their correct sections. */
1588 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1589
28454517 1590 if (this_adjusting
1591 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1592 virtual_value, alias))
1593 {
1594 const char *fnname;
1595 tree fn_block;
28b2c6a7 1596 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
28454517 1597
1598 DECL_RESULT (thunk_fndecl)
1599 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
28b2c6a7 1600 RESULT_DECL, 0, restype);
22ea3b47 1601 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
28454517 1602
1603 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1604 create one. */
1605 fn_block = make_node (BLOCK);
1606 BLOCK_VARS (fn_block) = a;
1607 DECL_INITIAL (thunk_fndecl) = fn_block;
1608 init_function_start (thunk_fndecl);
1609 cfun->is_thunk = 1;
1610 assemble_start_function (thunk_fndecl, fnname);
1611
1612 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1613 fixed_offset, virtual_value, alias);
1614
1615 assemble_end_function (thunk_fndecl, fnname);
1616 init_insn_lengths ();
1617 free_after_compilation (cfun);
1618 set_cfun (NULL);
1619 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
91bf9d9a 1620 node->thunk.thunk_p = false;
1621 node->analyzed = false;
28454517 1622 }
1623 else
1624 {
1625 tree restype;
1626 basic_block bb, then_bb, else_bb, return_bb;
1627 gimple_stmt_iterator bsi;
1628 int nargs = 0;
1629 tree arg;
1630 int i;
1631 tree resdecl;
1632 tree restmp = NULL;
1633 VEC(tree, heap) *vargs;
1634
1635 gimple call;
1636 gimple ret;
1637
1638 DECL_IGNORED_P (thunk_fndecl) = 1;
1639 bitmap_obstack_initialize (NULL);
1640
1641 if (node->thunk.virtual_offset_p)
1642 virtual_offset = size_int (virtual_value);
1643
1644 /* Build the return declaration for the function. */
1645 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1646 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1647 {
1648 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1649 DECL_ARTIFICIAL (resdecl) = 1;
1650 DECL_IGNORED_P (resdecl) = 1;
1651 DECL_RESULT (thunk_fndecl) = resdecl;
1652 }
1653 else
1654 resdecl = DECL_RESULT (thunk_fndecl);
1655
1656 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1657
1658 bsi = gsi_start_bb (bb);
1659
1660 /* Build call to the function being thunked. */
1661 if (!VOID_TYPE_P (restype))
1662 {
1663 if (!is_gimple_reg_type (restype))
1664 {
1665 restmp = resdecl;
2ab2ce89 1666 add_local_decl (cfun, restmp);
28454517 1667 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1668 }
1669 else
1670 restmp = create_tmp_var_raw (restype, "retval");
1671 }
1672
1767a056 1673 for (arg = a; arg; arg = DECL_CHAIN (arg))
28454517 1674 nargs++;
1675 vargs = VEC_alloc (tree, heap, nargs);
1676 if (this_adjusting)
1677 VEC_quick_push (tree, vargs,
1678 thunk_adjust (&bsi,
1679 a, 1, fixed_offset,
1680 virtual_offset));
1681 else
1682 VEC_quick_push (tree, vargs, a);
1767a056 1683 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
28454517 1684 VEC_quick_push (tree, vargs, arg);
1685 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1686 VEC_free (tree, heap, vargs);
1687 gimple_call_set_cannot_inline (call, true);
1688 gimple_call_set_from_thunk (call, true);
1689 if (restmp)
1690 gimple_call_set_lhs (call, restmp);
1691 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1692 mark_symbols_for_renaming (call);
1693 find_referenced_vars_in (call);
1694 update_stmt (call);
1695
1696 if (restmp && !this_adjusting)
1697 {
57ab8ec3 1698 tree true_label = NULL_TREE;
28454517 1699
1700 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1701 {
1702 gimple stmt;
1703 /* If the return type is a pointer, we need to
1704 protect against NULL. We know there will be an
1705 adjustment, because that's why we're emitting a
1706 thunk. */
1707 then_bb = create_basic_block (NULL, (void *) 0, bb);
1708 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1709 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1710 remove_edge (single_succ_edge (bb));
1711 true_label = gimple_block_label (then_bb);
28454517 1712 stmt = gimple_build_cond (NE_EXPR, restmp,
385f3f36 1713 build_zero_cst (TREE_TYPE (restmp)),
28454517 1714 NULL_TREE, NULL_TREE);
1715 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1716 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1717 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1718 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1719 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1720 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1721 bsi = gsi_last_bb (then_bb);
1722 }
1723
1724 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1725 fixed_offset, virtual_offset);
1726 if (true_label)
1727 {
1728 gimple stmt;
1729 bsi = gsi_last_bb (else_bb);
385f3f36 1730 stmt = gimple_build_assign (restmp,
1731 build_zero_cst (TREE_TYPE (restmp)));
28454517 1732 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1733 bsi = gsi_last_bb (return_bb);
1734 }
1735 }
1736 else
1737 gimple_call_set_tail (call, true);
1738
1739 /* Build return value. */
1740 ret = gimple_build_return (restmp);
1741 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1742
1743 delete_unreachable_blocks ();
1744 update_ssa (TODO_update_ssa);
1745
28454517 1746 /* Since we want to emit the thunk, we explicitly mark its name as
1747 referenced. */
91bf9d9a 1748 node->thunk.thunk_p = false;
1749 cgraph_node_remove_callees (node);
28454517 1750 cgraph_add_new_function (thunk_fndecl, true);
1751 bitmap_obstack_release (NULL);
1752 }
1753 current_function_decl = NULL;
1754}
1755
91bf9d9a 1756
c70f46b0 1757
1758/* Assemble thunks and aliases asociated to NODE. */
91bf9d9a 1759
1760static void
c70f46b0 1761assemble_thunks_and_aliases (struct cgraph_node *node)
91bf9d9a 1762{
1763 struct cgraph_edge *e;
c70f46b0 1764 int i;
1765 struct ipa_ref *ref;
1766
91bf9d9a 1767 for (e = node->callers; e;)
1768 if (e->caller->thunk.thunk_p)
1769 {
1770 struct cgraph_node *thunk = e->caller;
1771
1772 e = e->next_caller;
c70f46b0 1773 assemble_thunks_and_aliases (thunk);
91bf9d9a 1774 assemble_thunk (thunk);
1775 }
1776 else
1777 e = e->next_caller;
c70f46b0 1778 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
1779 if (ref->use == IPA_REF_ALIAS)
1780 {
1781 struct cgraph_node *alias = ipa_ref_refering_node (ref);
1782 assemble_alias (alias->decl,
1783 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1784 assemble_thunks_and_aliases (alias);
1785 }
91bf9d9a 1786}
1787
ae01b312 1788/* Expand function specified by NODE. */
e6d2b2d8 1789
ae01b312 1790static void
d9d9733a 1791cgraph_expand_function (struct cgraph_node *node)
ae01b312 1792{
1793 tree decl = node->decl;
1794
b0cdf642 1795 /* We ought to not compile any inline clones. */
cc636d56 1796 gcc_assert (!node->global.inlined_to);
b0cdf642 1797
6329636b 1798 announce_function (decl);
09fc9532 1799 node->process = 0;
c70f46b0 1800 assemble_thunks_and_aliases (node);
f7777314 1801 gcc_assert (node->lowered);
1802
1803 /* Generate RTL for the body of DECL. */
1804 tree_rest_of_compilation (decl);
1805
1806 /* Make sure that BE didn't give up on compiling. */
1807 gcc_assert (TREE_ASM_WRITTEN (decl));
1808 current_function_decl = NULL;
cc91b414 1809 gcc_assert (!cgraph_preserve_function_body_p (node));
1a1a827a 1810 cgraph_release_function_body (node);
1811 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1812 points to the dead function body. */
1813 cgraph_node_remove_callees (node);
e1be32b8 1814
1815 cgraph_function_flags_ready = true;
ae01b312 1816}
1817
b0cdf642 1818/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1819
1820bool
326a9581 1821cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
d7c6d889 1822{
b0cdf642 1823 *reason = e->inline_failed;
1824 return !e->inline_failed;
d7c6d889 1825}
b0cdf642 1826
acc70efa 1827
acc70efa 1828
d9d9733a 1829/* Expand all functions that must be output.
1830
d7c6d889 1831 Attempt to topologically sort the nodes so function is output when
1832 all called functions are already assembled to allow data to be
91c82c20 1833 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1834 between a function and its callees (later we may choose to use a more
d7c6d889 1835 sophisticated algorithm for function reordering; we will likely want
1836 to use subsections to make the output functions appear in top-down
1837 order). */
1838
1839static void
a6868229 1840cgraph_expand_all_functions (void)
d7c6d889 1841{
1842 struct cgraph_node *node;
4c36ffe6 1843 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
c04e3894 1844 int order_pos, new_order_pos = 0;
d7c6d889 1845 int i;
1846
7771d558 1847 order_pos = ipa_reverse_postorder (order);
cc636d56 1848 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1849
7bd28bba 1850 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1851 optimization. So we must be sure to not reference them. */
1852 for (i = 0; i < order_pos; i++)
09fc9532 1853 if (order[i]->process)
b0cdf642 1854 order[new_order_pos++] = order[i];
1855
1856 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1857 {
1858 node = order[i];
09fc9532 1859 if (node->process)
d7c6d889 1860 {
cc636d56 1861 gcc_assert (node->reachable);
09fc9532 1862 node->process = 0;
d7c6d889 1863 cgraph_expand_function (node);
1864 }
1865 }
523c1122 1866 cgraph_process_new_functions ();
773c5ba7 1867
d7c6d889 1868 free (order);
773c5ba7 1869
d7c6d889 1870}
1871
56af936e 1872/* This is used to sort the node types by the cgraph order number. */
1873
0b09525f 1874enum cgraph_order_sort_kind
1875{
1876 ORDER_UNDEFINED = 0,
1877 ORDER_FUNCTION,
1878 ORDER_VAR,
1879 ORDER_ASM
1880};
1881
56af936e 1882struct cgraph_order_sort
1883{
0b09525f 1884 enum cgraph_order_sort_kind kind;
56af936e 1885 union
1886 {
1887 struct cgraph_node *f;
1d416bd7 1888 struct varpool_node *v;
56af936e 1889 struct cgraph_asm_node *a;
1890 } u;
1891};
1892
1893/* Output all functions, variables, and asm statements in the order
1894 according to their order fields, which is the order in which they
1895 appeared in the file. This implements -fno-toplevel-reorder. In
1896 this mode we may output functions and variables which don't really
1897 need to be output. */
1898
1899static void
1900cgraph_output_in_order (void)
1901{
1902 int max;
56af936e 1903 struct cgraph_order_sort *nodes;
1904 int i;
1905 struct cgraph_node *pf;
1d416bd7 1906 struct varpool_node *pv;
56af936e 1907 struct cgraph_asm_node *pa;
1908
1909 max = cgraph_order;
3e1cde87 1910 nodes = XCNEWVEC (struct cgraph_order_sort, max);
56af936e 1911
1d416bd7 1912 varpool_analyze_pending_decls ();
56af936e 1913
1914 for (pf = cgraph_nodes; pf; pf = pf->next)
1915 {
c70f46b0 1916 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
56af936e 1917 {
1918 i = pf->order;
1919 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1920 nodes[i].kind = ORDER_FUNCTION;
1921 nodes[i].u.f = pf;
1922 }
1923 }
1924
1d416bd7 1925 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
56af936e 1926 {
1927 i = pv->order;
1928 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1929 nodes[i].kind = ORDER_VAR;
1930 nodes[i].u.v = pv;
1931 }
1932
1933 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1934 {
1935 i = pa->order;
1936 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1937 nodes[i].kind = ORDER_ASM;
1938 nodes[i].u.a = pa;
1939 }
56af936e 1940
304e5318 1941 /* In toplevel reorder mode we output all statics; mark them as needed. */
1942 for (i = 0; i < max; ++i)
1943 {
1944 if (nodes[i].kind == ORDER_VAR)
1945 {
1946 varpool_mark_needed_node (nodes[i].u.v);
1947 }
1948 }
1949 varpool_empty_needed_queue ();
1950
91da0f1c 1951 for (i = 0; i < max; ++i)
1952 if (nodes[i].kind == ORDER_VAR)
1953 varpool_finalize_named_section_flags (nodes[i].u.v);
1954
56af936e 1955 for (i = 0; i < max; ++i)
1956 {
1957 switch (nodes[i].kind)
1958 {
1959 case ORDER_FUNCTION:
09fc9532 1960 nodes[i].u.f->process = 0;
56af936e 1961 cgraph_expand_function (nodes[i].u.f);
1962 break;
1963
1964 case ORDER_VAR:
1d416bd7 1965 varpool_assemble_decl (nodes[i].u.v);
56af936e 1966 break;
1967
1968 case ORDER_ASM:
1969 assemble_asm (nodes[i].u.a->asm_str);
1970 break;
1971
1972 case ORDER_UNDEFINED:
1973 break;
1974
1975 default:
1976 gcc_unreachable ();
1977 }
1978 }
4b4ea2db 1979
1980 cgraph_asm_nodes = NULL;
3e1cde87 1981 free (nodes);
56af936e 1982}
1983
b0cdf642 1984/* Return true when function body of DECL still needs to be kept around
1985 for later re-use. */
1986bool
cc91b414 1987cgraph_preserve_function_body_p (struct cgraph_node *node)
b0cdf642 1988{
8d8c4c8d 1989 gcc_assert (cgraph_global_info_ready);
c70f46b0 1990 gcc_assert (!node->alias && !node->thunk.thunk_p);
cc91b414 1991
b0cdf642 1992 /* Look if there is any clone around. */
ccf4ab6b 1993 if (node->clones)
1994 return true;
b0cdf642 1995 return false;
1996}
1997
77fce4cd 1998static void
1999ipa_passes (void)
2000{
87d4aa85 2001 set_cfun (NULL);
4b14adf9 2002 current_function_decl = NULL;
75a70cf9 2003 gimple_register_cfg_hooks ();
77fce4cd 2004 bitmap_obstack_initialize (NULL);
59dd4830 2005
c9036234 2006 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2007
59dd4830 2008 if (!in_lto_p)
7b2e8956 2009 {
2010 execute_ipa_pass_list (all_small_ipa_passes);
2011 if (seen_error ())
2012 return;
2013 }
9ed5b1f5 2014
7bfefa9d 2015 /* If pass_all_early_optimizations was not scheduled, the state of
2016 the cgraph will not be properly updated. Update it now. */
2017 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
2018 cgraph_state = CGRAPH_STATE_IPA_SSA;
9ed5b1f5 2019
7bfefa9d 2020 if (!in_lto_p)
2021 {
2022 /* Generate coverage variables and constructors. */
2023 coverage_finish ();
2024
2025 /* Process new functions added. */
2026 set_cfun (NULL);
2027 current_function_decl = NULL;
2028 cgraph_process_new_functions ();
7bfefa9d 2029
c9036234 2030 execute_ipa_summary_passes
2031 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
8867b500 2032 }
23433d72 2033
2034 /* Some targets need to handle LTO assembler output specially. */
2035 if (flag_generate_lto)
2036 targetm.asm_out.lto_start ();
2037
7bfefa9d 2038 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
2039
2040 if (!in_lto_p)
2041 ipa_write_summaries ();
2042
23433d72 2043 if (flag_generate_lto)
2044 targetm.asm_out.lto_end ();
2045
8867b500 2046 if (!flag_ltrans)
2047 execute_ipa_pass_list (all_regular_ipa_passes);
c9036234 2048 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
9ed5b1f5 2049
77fce4cd 2050 bitmap_obstack_release (NULL);
2051}
2052
34e5cced 2053
ae01b312 2054/* Perform simple optimizations based on callgraph. */
2055
7bfefa9d 2056void
d9d9733a 2057cgraph_optimize (void)
ae01b312 2058{
852f689e 2059 if (seen_error ())
cb2b5570 2060 return;
2061
b0cdf642 2062#ifdef ENABLE_CHECKING
2063 verify_cgraph ();
2064#endif
a861fe52 2065
c1dcd13c 2066 /* Frontend may output common variables after the unit has been finalized.
2067 It is safe to deal with them here as they are always zero initialized. */
1d416bd7 2068 varpool_analyze_pending_decls ();
e9f08e82 2069
f79b6507 2070 timevar_push (TV_CGRAPHOPT);
51949610 2071 if (pre_ipa_mem_report)
2072 {
2073 fprintf (stderr, "Memory consumption before IPA\n");
2074 dump_memory_report (false);
2075 }
d7c6d889 2076 if (!quiet_flag)
cd6bca02 2077 fprintf (stderr, "Performing interprocedural optimizations\n");
523c1122 2078 cgraph_state = CGRAPH_STATE_IPA;
c04e3894 2079
be4d0974 2080 /* Don't run the IPA passes if there was any error or sorry messages. */
852f689e 2081 if (!seen_error ())
be4d0974 2082 ipa_passes ();
2083
34e5cced 2084 /* Do nothing else if any IPA pass found errors. */
852f689e 2085 if (seen_error ())
021c1c18 2086 {
2087 timevar_pop (TV_CGRAPHOPT);
2088 return;
2089 }
34e5cced 2090
e1be32b8 2091 /* This pass remove bodies of extern inline functions we never inlined.
2092 Do this later so other IPA passes see what is really going on. */
2093 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 2094 cgraph_global_info_ready = true;
f79b6507 2095 if (cgraph_dump_file)
2096 {
e4200070 2097 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 2098 dump_cgraph (cgraph_dump_file);
c1dcd13c 2099 dump_varpool (cgraph_dump_file);
f79b6507 2100 }
51949610 2101 if (post_ipa_mem_report)
2102 {
defa2fa6 2103 fprintf (stderr, "Memory consumption after IPA\n");
51949610 2104 dump_memory_report (false);
2105 }
f79b6507 2106 timevar_pop (TV_CGRAPHOPT);
ae01b312 2107
d7c6d889 2108 /* Output everything. */
47306a5d 2109 (*debug_hooks->assembly_start) ();
e4200070 2110 if (!quiet_flag)
2111 fprintf (stderr, "Assembling functions:\n");
b0cdf642 2112#ifdef ENABLE_CHECKING
2113 verify_cgraph ();
2114#endif
56af936e 2115
ccf4ab6b 2116 cgraph_materialize_all_clones ();
657e3a56 2117 bitmap_obstack_initialize (NULL);
2118 execute_ipa_pass_list (all_late_ipa_passes);
2119 cgraph_remove_unreachable_nodes (true, dump_file);
2120#ifdef ENABLE_CHECKING
2121 verify_cgraph ();
2122#endif
2123 bitmap_obstack_release (NULL);
acc70efa 2124 cgraph_mark_functions_to_output ();
c1dcd13c 2125
523c1122 2126 cgraph_state = CGRAPH_STATE_EXPANSION;
56af936e 2127 if (!flag_toplevel_reorder)
2128 cgraph_output_in_order ();
2129 else
2130 {
2131 cgraph_output_pending_asms ();
2132
2133 cgraph_expand_all_functions ();
1d416bd7 2134 varpool_remove_unreferenced_decls ();
56af936e 2135
1d416bd7 2136 varpool_assemble_pending_decls ();
56af936e 2137 }
523c1122 2138 cgraph_process_new_functions ();
2139 cgraph_state = CGRAPH_STATE_FINISHED;
c1dcd13c 2140
f79b6507 2141 if (cgraph_dump_file)
2142 {
e4200070 2143 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 2144 dump_cgraph (cgraph_dump_file);
7410370b 2145 dump_varpool (cgraph_dump_file);
f79b6507 2146 }
b0cdf642 2147#ifdef ENABLE_CHECKING
2148 verify_cgraph ();
4ee9c684 2149 /* Double check that all inline clones are gone and that all
2150 function bodies have been released from memory. */
852f689e 2151 if (!seen_error ())
4ee9c684 2152 {
2153 struct cgraph_node *node;
2154 bool error_found = false;
2155
2156 for (node = cgraph_nodes; node; node = node->next)
2157 if (node->analyzed
2158 && (node->global.inlined_to
1a1a827a 2159 || gimple_has_body_p (node->decl)))
4ee9c684 2160 {
2161 error_found = true;
2162 dump_cgraph_node (stderr, node);
a0c938f0 2163 }
4ee9c684 2164 if (error_found)
c04e3894 2165 internal_error ("nodes with unreleased memory found");
4ee9c684 2166 }
b0cdf642 2167#endif
ae01b312 2168}
34e5cced 2169
121f3051 2170void
2171init_cgraph (void)
2172{
01ec0a6c 2173 if (!cgraph_dump_file)
2174 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
121f3051 2175}
b5d36404 2176
a0c938f0 2177/* The edges representing the callers of the NEW_VERSION node were
b5d36404 2178 fixed by cgraph_function_versioning (), now the call_expr in their
2179 respective tree code should be updated to call the NEW_VERSION. */
2180
2181static void
2182update_call_expr (struct cgraph_node *new_version)
2183{
2184 struct cgraph_edge *e;
2185
2186 gcc_assert (new_version);
75a70cf9 2187
2188 /* Update the call expr on the edges to call the new version. */
b5d36404 2189 for (e = new_version->callers; e; e = e->next_caller)
e03a95e7 2190 {
2191 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2192 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
e38def9c 2193 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
e03a95e7 2194 }
b5d36404 2195}
2196
2197
2198/* Create a new cgraph node which is the new version of
2199 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2200 edges which should be redirected to point to
2201 NEW_VERSION. ALL the callees edges of OLD_VERSION
2202 are cloned to the new version node. Return the new
b06ab5fa 2203 version node.
2204
2205 If non-NULL BLOCK_TO_COPY determine what basic blocks
2206 was copied to prevent duplications of calls that are dead
2207 in the clone. */
b5d36404 2208
2209static struct cgraph_node *
2210cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 2211 tree new_decl,
b06ab5fa 2212 VEC(cgraph_edge_p,heap) *redirect_callers,
2213 bitmap bbs_to_copy)
2214 {
b5d36404 2215 struct cgraph_node *new_version;
32936803 2216 struct cgraph_edge *e;
b5d36404 2217 unsigned i;
2218
2219 gcc_assert (old_version);
a0c938f0 2220
5a90471f 2221 new_version = cgraph_create_node (new_decl);
b5d36404 2222
2223 new_version->analyzed = true;
2224 new_version->local = old_version->local;
a70a5e2c 2225 new_version->local.externally_visible = false;
2226 new_version->local.local = true;
b5d36404 2227 new_version->global = old_version->global;
a93f1c3b 2228 new_version->rtl = old_version->rtl;
b5d36404 2229 new_version->reachable = true;
2230 new_version->count = old_version->count;
2231
a70a5e2c 2232 for (e = old_version->callees; e; e=e->next_callee)
b06ab5fa 2233 if (!bbs_to_copy
2234 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2235 cgraph_clone_edge (e, new_version, e->call_stmt,
2236 e->lto_stmt_uid, REG_BR_PROB_BASE,
2237 CGRAPH_FREQ_BASE,
0835ad03 2238 true);
a70a5e2c 2239 for (e = old_version->indirect_calls; e; e=e->next_callee)
b06ab5fa 2240 if (!bbs_to_copy
2241 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2242 cgraph_clone_edge (e, new_version, e->call_stmt,
2243 e->lto_stmt_uid, REG_BR_PROB_BASE,
2244 CGRAPH_FREQ_BASE,
0835ad03 2245 true);
48148244 2246 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
4460a647 2247 {
2248 /* Redirect calls to the old version node to point to its new
2249 version. */
2250 cgraph_redirect_edge_callee (e, new_version);
2251 }
b5d36404 2252
2253 return new_version;
2254 }
2255
2256 /* Perform function versioning.
a0c938f0 2257 Function versioning includes copying of the tree and
b5d36404 2258 a callgraph update (creating a new cgraph node and updating
2259 its callees and callers).
2260
2261 REDIRECT_CALLERS varray includes the edges to be redirected
2262 to the new version.
2263
2264 TREE_MAP is a mapping of tree nodes we want to replace with
2265 new ones (according to results of prior analysis).
2266 OLD_VERSION_NODE is the node that is versioned.
48e1416a 2267 It returns the new version's cgraph node.
b06ab5fa 2268 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2269 from new version.
2270 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2271 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
b5d36404 2272
2273struct cgraph_node *
2274cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 2275 VEC(cgraph_edge_p,heap) *redirect_callers,
ccf4ab6b 2276 VEC (ipa_replace_map_p,gc)* tree_map,
a70a5e2c 2277 bitmap args_to_skip,
b06ab5fa 2278 bitmap bbs_to_copy,
2279 basic_block new_entry_block,
a70a5e2c 2280 const char *clone_name)
b5d36404 2281{
2282 tree old_decl = old_version_node->decl;
2283 struct cgraph_node *new_version_node = NULL;
2284 tree new_decl;
2285
2286 if (!tree_versionable_function_p (old_decl))
2287 return NULL;
2288
3c97c75d 2289 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
2290
b5d36404 2291 /* Make a new FUNCTION_DECL tree node for the
2292 new version. */
5afe38fe 2293 if (!args_to_skip)
2294 new_decl = copy_node (old_decl);
2295 else
2296 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
b5d36404 2297
df0b8dfb 2298 /* Generate a new name for the new version. */
2299 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2300 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2301 SET_DECL_RTL (new_decl, NULL);
2302
b5d36404 2303 /* Create the new version's call-graph node.
2304 and update the edges of the new node. */
2305 new_version_node =
2306 cgraph_copy_node_for_versioning (old_version_node, new_decl,
b06ab5fa 2307 redirect_callers, bbs_to_copy);
b5d36404 2308
2309 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 2310 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2311 bbs_to_copy, new_entry_block);
b5d36404 2312
a0c938f0 2313 /* Update the new version's properties.
e03a95e7 2314 Make The new version visible only within this translation unit. Make sure
2315 that is not weak also.
a0c938f0 2316 ??? We cannot use COMDAT linkage because there is no
b5d36404 2317 ABI support for this. */
6137cc9f 2318 cgraph_make_decl_local (new_version_node->decl);
f014e39d 2319 DECL_VIRTUAL_P (new_version_node->decl) = 0;
b5d36404 2320 new_version_node->local.externally_visible = 0;
2321 new_version_node->local.local = 1;
2322 new_version_node->lowered = true;
f014e39d 2323
e03a95e7 2324 /* Update the call_expr on the edges to call the new version node. */
2325 update_call_expr (new_version_node);
48e1416a 2326
50828ed8 2327 cgraph_call_function_insertion_hooks (new_version_node);
b5d36404 2328 return new_version_node;
2329}
469679ab 2330
ccf4ab6b 2331/* Given virtual clone, turn it into actual clone. */
2332static void
2333cgraph_materialize_clone (struct cgraph_node *node)
2334{
2335 bitmap_obstack_initialize (NULL);
e748b31d 2336 node->former_clone_of = node->clone_of->decl;
2337 if (node->clone_of->former_clone_of)
2338 node->former_clone_of = node->clone_of->former_clone_of;
ccf4ab6b 2339 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2340 tree_function_versioning (node->clone_of->decl, node->decl,
2341 node->clone.tree_map, true,
b06ab5fa 2342 node->clone.args_to_skip, NULL, NULL);
e20422ea 2343 if (cgraph_dump_file)
2344 {
2345 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2346 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2347 }
ccf4ab6b 2348
2349 /* Function is no longer clone. */
2350 if (node->next_sibling_clone)
2351 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2352 if (node->prev_sibling_clone)
2353 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2354 else
2355 node->clone_of->clones = node->next_sibling_clone;
2356 node->next_sibling_clone = NULL;
2357 node->prev_sibling_clone = NULL;
6d1cc52c 2358 if (!node->clone_of->analyzed && !node->clone_of->clones)
7d6a1ec8 2359 {
2360 cgraph_release_function_body (node->clone_of);
2361 cgraph_node_remove_callees (node->clone_of);
2362 ipa_remove_all_references (&node->clone_of->ref_list);
2363 }
ccf4ab6b 2364 node->clone_of = NULL;
2365 bitmap_obstack_release (NULL);
2366}
2367
c596d830 2368/* If necessary, change the function declaration in the call statement
2369 associated with E so that it corresponds to the edge callee. */
2370
2371gimple
2372cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2373{
2374 tree decl = gimple_call_fndecl (e->call_stmt);
2375 gimple new_stmt;
3fd0ca33 2376 gimple_stmt_iterator gsi;
2377 bool gsi_computed = false;
1f449108 2378#ifdef ENABLE_CHECKING
2379 struct cgraph_node *node;
2380#endif
c596d830 2381
1caef38b 2382 if (e->indirect_unknown_callee
2383 || decl == e->callee->decl
c596d830 2384 /* Don't update call from same body alias to the real function. */
1caef38b 2385 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
c596d830 2386 return e->call_stmt;
2387
1f449108 2388#ifdef ENABLE_CHECKING
1caef38b 2389 if (decl)
2390 {
2391 node = cgraph_get_node (decl);
2392 gcc_assert (!node || !node->clone.combined_args_to_skip);
2393 }
1f449108 2394#endif
e748b31d 2395
c596d830 2396 if (cgraph_dump_file)
2397 {
2398 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2399 cgraph_node_name (e->caller), e->caller->uid,
2400 cgraph_node_name (e->callee), e->callee->uid);
2401 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e748b31d 2402 if (e->callee->clone.combined_args_to_skip)
91aba934 2403 {
2404 fprintf (cgraph_dump_file, " combined args to skip: ");
2405 dump_bitmap (cgraph_dump_file,
2406 e->callee->clone.combined_args_to_skip);
e748b31d 2407 }
c596d830 2408 }
2409
9bab6a70 2410 if (e->indirect_info &&
2411 e->indirect_info->thunk_delta != 0
3fd0ca33 2412 && (!e->callee->clone.combined_args_to_skip
2413 || !bitmap_bit_p (e->callee->clone.combined_args_to_skip, 0)))
2414 {
2415 if (cgraph_dump_file)
9bab6a70 2416 fprintf (cgraph_dump_file, " Thunk delta is "
2417 HOST_WIDE_INT_PRINT_DEC "\n", e->indirect_info->thunk_delta);
3fd0ca33 2418 gsi = gsi_for_stmt (e->call_stmt);
2419 gsi_computed = true;
9bab6a70 2420 gimple_adjust_this_by_delta (&gsi,
2421 build_int_cst (sizetype,
2422 e->indirect_info->thunk_delta));
2423 e->indirect_info->thunk_delta = 0;
3fd0ca33 2424 }
2425
c596d830 2426 if (e->callee->clone.combined_args_to_skip)
91aba934 2427 {
092cd838 2428 int lp_nr;
91aba934 2429
2430 new_stmt
2431 = gimple_call_copy_skip_args (e->call_stmt,
2432 e->callee->clone.combined_args_to_skip);
75c7f5a5 2433 gimple_call_set_fndecl (new_stmt, e->callee->decl);
91aba934 2434
2435 if (gimple_vdef (new_stmt)
2436 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2437 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2438
3fd0ca33 2439 if (!gsi_computed)
2440 gsi = gsi_for_stmt (e->call_stmt);
9126b675 2441 gsi_replace (&gsi, new_stmt, false);
092cd838 2442 /* We need to defer cleaning EH info on the new statement to
2443 fixup-cfg. We may not have dominator information at this point
2444 and thus would end up with unreachable blocks and have no way
2445 to communicate that we need to run CFG cleanup then. */
2446 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2447 if (lp_nr != 0)
2448 {
2449 remove_stmt_from_eh_lp (e->call_stmt);
2450 add_stmt_to_eh_lp (new_stmt, lp_nr);
2451 }
91aba934 2452 }
c596d830 2453 else
75c7f5a5 2454 {
2455 new_stmt = e->call_stmt;
2456 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2457 update_stmt (new_stmt);
2458 }
c596d830 2459
c596d830 2460 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2461
2462 if (cgraph_dump_file)
2463 {
2464 fprintf (cgraph_dump_file, " updated to:");
2465 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2466 }
2467 return new_stmt;
2468}
2469
ccf4ab6b 2470/* Once all functions from compilation unit are in memory, produce all clones
c596d830 2471 and update all calls. We might also do this on demand if we don't want to
2472 bring all functions to memory prior compilation, but current WHOPR
2473 implementation does that and it is is bit easier to keep everything right in
2474 this order. */
ccf4ab6b 2475void
2476cgraph_materialize_all_clones (void)
2477{
2478 struct cgraph_node *node;
2479 bool stabilized = false;
2480
2481 if (cgraph_dump_file)
2482 fprintf (cgraph_dump_file, "Materializing clones\n");
2483#ifdef ENABLE_CHECKING
2484 verify_cgraph ();
2485#endif
2486
2487 /* We can also do topological order, but number of iterations should be
2488 bounded by number of IPA passes since single IPA pass is probably not
2489 going to create clones of clones it created itself. */
2490 while (!stabilized)
2491 {
2492 stabilized = true;
2493 for (node = cgraph_nodes; node; node = node->next)
2494 {
2495 if (node->clone_of && node->decl != node->clone_of->decl
2496 && !gimple_has_body_p (node->decl))
2497 {
2498 if (gimple_has_body_p (node->clone_of->decl))
2499 {
2500 if (cgraph_dump_file)
e20422ea 2501 {
0a10fd82 2502 fprintf (cgraph_dump_file, "cloning %s to %s\n",
e20422ea 2503 cgraph_node_name (node->clone_of),
2504 cgraph_node_name (node));
2505 if (node->clone.tree_map)
2506 {
2507 unsigned int i;
2508 fprintf (cgraph_dump_file, " replace map: ");
2509 for (i = 0; i < VEC_length (ipa_replace_map_p,
2510 node->clone.tree_map);
2511 i++)
2512 {
2513 struct ipa_replace_map *replace_info;
2514 replace_info = VEC_index (ipa_replace_map_p,
2515 node->clone.tree_map,
2516 i);
2517 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2518 fprintf (cgraph_dump_file, " -> ");
2519 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2520 fprintf (cgraph_dump_file, "%s%s;",
2521 replace_info->replace_p ? "(replace)":"",
2522 replace_info->ref_p ? "(ref)":"");
2523 }
2524 fprintf (cgraph_dump_file, "\n");
2525 }
2526 if (node->clone.args_to_skip)
2527 {
2528 fprintf (cgraph_dump_file, " args_to_skip: ");
2529 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2530 }
2531 if (node->clone.args_to_skip)
2532 {
2533 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2534 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2535 }
2536 }
ccf4ab6b 2537 cgraph_materialize_clone (node);
a510bd8d 2538 stabilized = false;
ccf4ab6b 2539 }
ccf4ab6b 2540 }
2541 }
2542 }
ee3f5fc0 2543 for (node = cgraph_nodes; node; node = node->next)
2544 if (!node->analyzed && node->callees)
2545 cgraph_node_remove_callees (node);
c596d830 2546 if (cgraph_dump_file)
2547 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
947781ac 2548#ifdef ENABLE_CHECKING
2549 verify_cgraph ();
2550#endif
ccf4ab6b 2551 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2552}
2553
a861fe52 2554#include "gt-cgraphunit.h"