]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
* dwarf2out.c (loc_descriptor): For SUBREG pass SUBREG_REG's mode as
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
cd6bca02 1/* Callgraph based interprocedural optimizations.
aed6e608 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
ae01b312 4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
ae01b312 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
cd6bca02 23 few basic interprocedural optimizers.
b0cdf642 24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
b326746d 35 (There is one exception needed for implementing GCC extern inline
36 function.)
b0cdf642 37
1d416bd7 38 - varpool_finalize_variable
b0cdf642 39
7bd28bba 40 This function has same behavior as the above but is used for static
b0cdf642 41 variables.
42
43 - cgraph_finalize_compilation_unit
44
b326746d 45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
b0cdf642 47
851d9296 48 In the call-graph construction and local function analysis takes
49 place here. Bodies of unreachable functions are released to
50 conserve memory usage.
b0cdf642 51
b326746d 52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
b0cdf642 54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
b0cdf642 62 - cgraph_mark_needed_node
1d416bd7 63 - varpool_mark_needed_node
b0cdf642 64
b326746d 65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
ccd2f3d1 69 used by C++ frontend to explicitly mark the keyed methods.
b0cdf642 70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
6329636b 80 Analyzing of all functions is deferred
b0cdf642 81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
ca67a72b 91 The intra-procedural information is produced and its existence
b0cdf642 92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
6329636b 106 where reference has been optimized out. */
121f3051 107
acc70efa 108
ae01b312 109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
b5530559 114#include "rtl.h"
acc70efa 115#include "tree-flow.h"
ae01b312 116#include "tree-inline.h"
117#include "langhooks.h"
c6224531 118#include "pointer-set.h"
ae01b312 119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
80a85d8a 125#include "diagnostic.h"
ce084dfc 126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
f79b6507 128#include "timevar.h"
d7c6d889 129#include "params.h"
130#include "fibheap.h"
611e5405 131#include "intl.h"
b69eb0ff 132#include "function.h"
b5d36404 133#include "ipa-prop.h"
75a70cf9 134#include "gimple.h"
135#include "tree-iterator.h"
f1e2a033 136#include "tree-pass.h"
bfec3452 137#include "tree-dump.h"
c1dcd13c 138#include "output.h"
9ed5b1f5 139#include "coverage.h"
c9036234 140#include "plugin.h"
a41f2a28 141#include "ipa-inline.h"
7771d558 142#include "ipa-utils.h"
a0605d65 143#include "lto-streamer.h"
d7c6d889 144
a6868229 145static void cgraph_expand_all_functions (void);
d9d9733a 146static void cgraph_mark_functions_to_output (void);
147static void cgraph_expand_function (struct cgraph_node *);
f788fff2 148static void cgraph_output_pending_asms (void);
25bb88de 149
ecb08119 150FILE *cgraph_dump_file;
121f3051 151
28454517 152/* Used for vtable lookup in thunk adjusting. */
153static GTY (()) tree vtable_entry_type;
154
2c0b522d 155/* Determine if function DECL is needed. That is, visible to something
156 either outside this translation unit, something magic in the system
6329636b 157 configury. */
2c0b522d 158
7bfefa9d 159bool
160cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
2c0b522d 161{
3f82b628 162 /* If the user told us it is used, then it must be so. */
05806473 163 if (node->local.externally_visible)
164 return true;
165
3f82b628 166 /* ??? If the assembler name is set by hand, it is possible to assemble
167 the name later after finalizing the function and the fact is noticed
168 in assemble_name then. This is arguably a bug. */
169 if (DECL_ASSEMBLER_NAME_SET_P (decl)
c70f46b0 170 && (!node->thunk.thunk_p && !node->same_body_alias)
3f82b628 171 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
172 return true;
173
55680bef 174 /* With -fkeep-inline-functions we are keeping all inline functions except
175 for extern inline ones. */
176 if (flag_keep_inline_functions
177 && DECL_DECLARED_INLINE_P (decl)
316ef6d8 178 && !DECL_EXTERNAL (decl)
cbd7f5a0 179 && !DECL_DISREGARD_INLINE_LIMITS (decl))
55680bef 180 return true;
181
2c0b522d 182 /* If we decided it was needed before, but at the time we didn't have
183 the body of the function available, then it's still needed. We have
184 to go back and re-check its dependencies now. */
185 if (node->needed)
186 return true;
187
188 /* Externally visible functions must be output. The exception is
a0c938f0 189 COMDAT functions that must be output only when they are needed.
8baa9d15 190
191 When not optimizing, also output the static functions. (see
95da6220 192 PR24561), but don't do so for always_inline functions, functions
0f9238c0 193 declared inline and nested functions. These were optimized out
d3d410e1 194 in the original implementation and it is unclear whether we want
554f2707 195 to change the behavior here. */
bba7ddf8 196 if (((TREE_PUBLIC (decl)
0f9238c0 197 || (!optimize
cbd7f5a0 198 && !DECL_DISREGARD_INLINE_LIMITS (decl)
d3d410e1 199 && !DECL_DECLARED_INLINE_P (decl)
0f9238c0 200 && !(DECL_CONTEXT (decl)
201 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
59dd4830 202 && !flag_whole_program
cbcf2791 203 && !flag_lto)
62eec3b4 204 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 205 return true;
206
2c0b522d 207 return false;
208}
209
bdc40eb8 210/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
523c1122 211 functions into callgraph in a way so they look like ordinary reachable
212 functions inserted into callgraph already at construction time. */
213
214bool
215cgraph_process_new_functions (void)
216{
217 bool output = false;
218 tree fndecl;
219 struct cgraph_node *node;
220
0cddb138 221 varpool_analyze_pending_decls ();
523c1122 222 /* Note that this queue may grow as its being processed, as the new
223 functions may generate new ones. */
224 while (cgraph_new_nodes)
225 {
226 node = cgraph_new_nodes;
227 fndecl = node->decl;
228 cgraph_new_nodes = cgraph_new_nodes->next_needed;
229 switch (cgraph_state)
230 {
231 case CGRAPH_STATE_CONSTRUCTION:
232 /* At construction time we just need to finalize function and move
233 it into reachable functions list. */
234
235 node->next_needed = NULL;
236 cgraph_finalize_function (fndecl, false);
237 cgraph_mark_reachable_node (node);
238 output = true;
4f7a1122 239 cgraph_call_function_insertion_hooks (node);
523c1122 240 break;
241
242 case CGRAPH_STATE_IPA:
f517b36e 243 case CGRAPH_STATE_IPA_SSA:
523c1122 244 /* When IPA optimization already started, do all essential
245 transformations that has been already performed on the whole
246 cgraph but not on this function. */
247
75a70cf9 248 gimple_register_cfg_hooks ();
523c1122 249 if (!node->analyzed)
250 cgraph_analyze_function (node);
251 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
252 current_function_decl = fndecl;
f517b36e 253 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
254 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
255 /* When not optimizing, be sure we run early local passes anyway
256 to expand OMP. */
257 || !optimize)
20099e35 258 execute_pass_list (pass_early_local_passes.pass.sub);
649597af 259 else
a41f2a28 260 compute_inline_parameters (node, true);
523c1122 261 free_dominance_info (CDI_POST_DOMINATORS);
262 free_dominance_info (CDI_DOMINATORS);
263 pop_cfun ();
264 current_function_decl = NULL;
4f7a1122 265 cgraph_call_function_insertion_hooks (node);
523c1122 266 break;
267
268 case CGRAPH_STATE_EXPANSION:
269 /* Functions created during expansion shall be compiled
270 directly. */
09fc9532 271 node->process = 0;
4f7a1122 272 cgraph_call_function_insertion_hooks (node);
523c1122 273 cgraph_expand_function (node);
274 break;
275
276 default:
277 gcc_unreachable ();
278 break;
279 }
0cddb138 280 varpool_analyze_pending_decls ();
523c1122 281 }
282 return output;
283}
284
9b8fb23a 285/* As an GCC extension we allow redefinition of the function. The
286 semantics when both copies of bodies differ is not well defined.
287 We replace the old body with new body so in unit at a time mode
288 we always use new body, while in normal mode we may end up with
289 old body inlined into some functions and new body expanded and
290 inlined in others.
291
292 ??? It may make more sense to use one body for inlining and other
293 body for expanding the function but this is difficult to do. */
294
295static void
296cgraph_reset_node (struct cgraph_node *node)
297{
09fc9532 298 /* If node->process is set, then we have already begun whole-unit analysis.
6329636b 299 This is *not* testing for whether we've already emitted the function.
300 That case can be sort-of legitimately seen with real function redefinition
301 errors. I would argue that the front end should never present us with
302 such a case, but don't enforce that for now. */
09fc9532 303 gcc_assert (!node->process);
9b8fb23a 304
305 /* Reset our data structures so we can analyze the function again. */
306 memset (&node->local, 0, sizeof (node->local));
307 memset (&node->global, 0, sizeof (node->global));
308 memset (&node->rtl, 0, sizeof (node->rtl));
309 node->analyzed = false;
9b8fb23a 310 node->local.finalized = false;
311
9b8fb23a 312 cgraph_node_remove_callees (node);
9b8fb23a 313}
c08871a9 314
1e8e9920 315static void
316cgraph_lower_function (struct cgraph_node *node)
317{
318 if (node->lowered)
319 return;
bfec3452 320
321 if (node->nested)
322 lower_nested_functions (node->decl);
323 gcc_assert (!node->nested);
324
1e8e9920 325 tree_lowering_passes (node->decl);
326 node->lowered = true;
327}
328
28df663b 329/* DECL has been parsed. Take it, queue it, compile it at the whim of the
330 logic in effect. If NESTED is true, then our caller cannot stand to have
331 the garbage collector run at the moment. We would need to either create
332 a new GC context, or just not compile right now. */
ae01b312 333
334void
28df663b 335cgraph_finalize_function (tree decl, bool nested)
ae01b312 336{
5a90471f 337 struct cgraph_node *node = cgraph_get_create_node (decl);
ae01b312 338
c08871a9 339 if (node->local.finalized)
443089c1 340 {
341 cgraph_reset_node (node);
342 node->local.redefined_extern_inline = true;
343 }
28df663b 344
c08871a9 345 notice_global_symbol (decl);
79bb87b4 346 node->local.finalized = true;
e27482aa 347 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
ae01b312 348
7bfefa9d 349 if (cgraph_decide_is_function_needed (node, decl))
2c0b522d 350 cgraph_mark_needed_node (node);
351
ecda6e51 352 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 353 level unit, we need to be conservative about possible entry points
354 there. */
1e3aebec 355 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
356 || DECL_STATIC_CONSTRUCTOR (decl)
d050bafd 357 || DECL_STATIC_DESTRUCTOR (decl)
358 /* COMDAT virtual functions may be referenced by vtable from
0a10fd82 359 other compilation unit. Still we want to devirtualize calls
d050bafd 360 to those so we need to analyze them.
361 FIXME: We should introduce may edges for this purpose and update
362 their handling in unreachable function removal and inliner too. */
91bf9d9a 363 || (DECL_VIRTUAL_P (decl)
364 && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
3f82b628 365 cgraph_mark_reachable_node (node);
366
2c0b522d 367 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 368 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 369 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 370
b69eb0ff 371 /* Possibly warn about unused parameters. */
372 if (warn_unused_parameter)
373 do_warn_unused_parameter (decl);
6329636b 374
375 if (!nested)
376 ggc_collect ();
ae01b312 377}
378
0da03d11 379/* C99 extern inline keywords allow changing of declaration after function
380 has been finalized. We need to re-decide if we want to mark the function as
381 needed then. */
382
383void
384cgraph_mark_if_needed (tree decl)
385{
fd6a3c41 386 struct cgraph_node *node = cgraph_get_node (decl);
7bfefa9d 387 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
0da03d11 388 cgraph_mark_needed_node (node);
389}
390
ccf4ab6b 391/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
392static bool
393clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
394{
c70f46b0 395 node = cgraph_function_or_thunk_node (node, NULL);
396 node2 = cgraph_function_or_thunk_node (node2, NULL);
ccf4ab6b 397 while (node != node2 && node2)
398 node2 = node2->clone_of;
399 return node2 != NULL;
400}
401
1a036a3b 402/* Verify edge E count and frequency. */
403
404static bool
405verify_edge_count_and_frequency (struct cgraph_edge *e)
406{
407 bool error_found = false;
408 if (e->count < 0)
409 {
410 error ("caller edge count is negative");
411 error_found = true;
412 }
413 if (e->frequency < 0)
414 {
415 error ("caller edge frequency is negative");
416 error_found = true;
417 }
418 if (e->frequency > CGRAPH_FREQ_MAX)
419 {
420 error ("caller edge frequency is too large");
421 error_found = true;
422 }
423 if (gimple_has_body_p (e->caller->decl)
424 && !e->caller->global.inlined_to
8bae3ea4 425 /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
426 Remove this once edges are actualy removed from the function at that time. */
427 && (e->frequency
428 || (inline_edge_summary_vec
9ed50dd9 429 && ((VEC_length(inline_edge_summary_t, inline_edge_summary_vec)
430 <= (unsigned) e->uid)
431 || !inline_edge_summary (e)->predicate)))
1a036a3b 432 && (e->frequency
433 != compute_call_stmt_bb_frequency (e->caller->decl,
434 gimple_bb (e->call_stmt))))
435 {
0a10fd82 436 error ("caller edge frequency %i does not match BB frequency %i",
1a036a3b 437 e->frequency,
438 compute_call_stmt_bb_frequency (e->caller->decl,
439 gimple_bb (e->call_stmt)));
440 error_found = true;
441 }
442 return error_found;
443}
444
7b29dd2f 445/* Switch to THIS_CFUN if needed and print STMT to stderr. */
446static void
447cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
448{
449 /* debug_gimple_stmt needs correct cfun */
450 if (cfun != this_cfun)
451 set_cfun (this_cfun);
452 debug_gimple_stmt (stmt);
453}
454
2f9d66d3 455/* Verify that call graph edge E corresponds to DECL from the associated
456 statement. Return true if the verification should fail. */
457
458static bool
459verify_edge_corresponds_to_fndecl (struct cgraph_edge *e, tree decl)
460{
54e8af13 461 struct cgraph_node *node;
462
463 if (!decl || e->callee->global.inlined_to)
464 return false;
465 node = cgraph_get_node (decl);
466
467 /* We do not know if a node from a different partition is an alias or what it
468 aliases and therefore cannot do the former_clone_of check reliably. */
469 if (!node || node->in_other_partition)
470 return false;
471 node = cgraph_function_or_thunk_node (node, NULL);
472
473 if ((e->callee->former_clone_of != node->decl)
2f9d66d3 474 /* IPA-CP sometimes redirect edge to clone and then back to the former
475 function. This ping-pong has to go, eventaully. */
54e8af13 476 && (node != cgraph_function_or_thunk_node (e->callee, NULL))
477 && !clone_of_p (node, e->callee))
2f9d66d3 478 return true;
479 else
480 return false;
481}
482
b0cdf642 483/* Verify cgraph nodes of given cgraph node. */
4b987fac 484DEBUG_FUNCTION void
b0cdf642 485verify_cgraph_node (struct cgraph_node *node)
486{
487 struct cgraph_edge *e;
e27482aa 488 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
489 basic_block this_block;
75a70cf9 490 gimple_stmt_iterator gsi;
9bfec7c2 491 bool error_found = false;
b0cdf642 492
852f689e 493 if (seen_error ())
bd09cd3e 494 return;
495
b0cdf642 496 timevar_push (TV_CGRAPH_VERIFY);
b0cdf642 497 for (e = node->callees; e; e = e->next_callee)
498 if (e->aux)
499 {
0a81f5a0 500 error ("aux field set for edge %s->%s",
abd3e6b5 501 identifier_to_locale (cgraph_node_name (e->caller)),
502 identifier_to_locale (cgraph_node_name (e->callee)));
b0cdf642 503 error_found = true;
504 }
a2cb9b3b 505 if (node->count < 0)
506 {
bf776685 507 error ("execution count is negative");
a2cb9b3b 508 error_found = true;
509 }
59dd4830 510 if (node->global.inlined_to && node->local.externally_visible)
511 {
bf776685 512 error ("externally visible inline clone");
59dd4830 513 error_found = true;
514 }
515 if (node->global.inlined_to && node->address_taken)
516 {
bf776685 517 error ("inline clone with address taken");
59dd4830 518 error_found = true;
519 }
520 if (node->global.inlined_to && node->needed)
521 {
bf776685 522 error ("inline clone is needed");
59dd4830 523 error_found = true;
524 }
799c8711 525 for (e = node->indirect_calls; e; e = e->next_callee)
526 {
527 if (e->aux)
528 {
529 error ("aux field set for indirect edge from %s",
530 identifier_to_locale (cgraph_node_name (e->caller)));
531 error_found = true;
532 }
533 if (!e->indirect_unknown_callee
534 || !e->indirect_info)
535 {
536 error ("An indirect edge from %s is not marked as indirect or has "
537 "associated indirect_info, the corresponding statement is: ",
538 identifier_to_locale (cgraph_node_name (e->caller)));
7b29dd2f 539 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
799c8711 540 error_found = true;
541 }
542 }
b0cdf642 543 for (e = node->callers; e; e = e->next_caller)
544 {
1a036a3b 545 if (verify_edge_count_and_frequency (e))
546 error_found = true;
b0cdf642 547 if (!e->inline_failed)
548 {
549 if (node->global.inlined_to
550 != (e->caller->global.inlined_to
551 ? e->caller->global.inlined_to : e->caller))
552 {
0a81f5a0 553 error ("inlined_to pointer is wrong");
b0cdf642 554 error_found = true;
555 }
556 if (node->callers->next_caller)
557 {
0a81f5a0 558 error ("multiple inline callers");
b0cdf642 559 error_found = true;
560 }
561 }
562 else
563 if (node->global.inlined_to)
564 {
0a81f5a0 565 error ("inlined_to pointer set for noninline callers");
b0cdf642 566 error_found = true;
567 }
568 }
1a036a3b 569 for (e = node->indirect_calls; e; e = e->next_callee)
570 if (verify_edge_count_and_frequency (e))
571 error_found = true;
b0cdf642 572 if (!node->callers && node->global.inlined_to)
573 {
5cd75817 574 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 575 error_found = true;
576 }
577 if (node->global.inlined_to == node)
578 {
0a81f5a0 579 error ("inlined_to pointer refers to itself");
b0cdf642 580 error_found = true;
581 }
582
7019fd3f 583 if (!cgraph_get_node (node->decl))
b0cdf642 584 {
0f6439b9 585 error ("node not found in cgraph_hash");
b0cdf642 586 error_found = true;
587 }
a0c938f0 588
ccf4ab6b 589 if (node->clone_of)
590 {
591 struct cgraph_node *n;
592 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
593 if (n == node)
594 break;
595 if (!n)
596 {
597 error ("node has wrong clone_of");
598 error_found = true;
599 }
600 }
601 if (node->clones)
602 {
603 struct cgraph_node *n;
604 for (n = node->clones; n; n = n->next_sibling_clone)
605 if (n->clone_of != node)
606 break;
607 if (n)
608 {
609 error ("node has wrong clone list");
610 error_found = true;
611 }
612 }
613 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
614 {
615 error ("node is in clone list but it is not clone");
616 error_found = true;
617 }
618 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
619 {
620 error ("node has wrong prev_clone pointer");
621 error_found = true;
622 }
623 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
624 {
625 error ("double linked list of clones corrupted");
626 error_found = true;
627 }
c524ac5d 628 if (node->same_comdat_group)
629 {
630 struct cgraph_node *n = node->same_comdat_group;
631
632 if (!DECL_ONE_ONLY (node->decl))
633 {
634 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
635 error_found = true;
636 }
637 if (n == node)
638 {
639 error ("node is alone in a comdat group");
640 error_found = true;
641 }
642 do
643 {
644 if (!n->same_comdat_group)
645 {
646 error ("same_comdat_group is not a circular list");
647 error_found = true;
648 break;
649 }
650 n = n->same_comdat_group;
651 }
652 while (n != node);
653 }
ccf4ab6b 654
c70f46b0 655 if (node->analyzed && node->alias)
656 {
657 bool ref_found = false;
658 int i;
659 struct ipa_ref *ref;
660
661 if (node->callees)
662 {
663 error ("Alias has call edges");
664 error_found = true;
665 }
666 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
667 if (ref->use != IPA_REF_ALIAS)
668 {
669 error ("Alias has non-alias refernece");
670 error_found = true;
671 }
672 else if (ref_found)
673 {
674 error ("Alias has more than one alias reference");
675 error_found = true;
676 }
677 else
678 ref_found = true;
679 if (!ref_found)
680 {
681 error ("Analyzed alias has no reference");
682 error_found = true;
683 }
684 }
91bf9d9a 685 if (node->analyzed && node->thunk.thunk_p)
686 {
687 if (!node->callees)
688 {
689 error ("No edge out of thunk node");
690 error_found = true;
691 }
692 else if (node->callees->next_callee)
693 {
694 error ("More than one edge out of thunk node");
695 error_found = true;
696 }
697 if (gimple_has_body_p (node->decl))
698 {
699 error ("Thunk is not supposed to have body");
700 error_found = true;
701 }
702 }
703 else if (node->analyzed && gimple_has_body_p (node->decl)
704 && !TREE_ASM_WRITTEN (node->decl)
705 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
706 && !flag_wpa)
b0cdf642 707 {
e27482aa 708 if (this_cfun->cfg)
709 {
710 /* The nodes we're interested in are never shared, so walk
711 the tree ignoring duplicates. */
e7c352d1 712 struct pointer_set_t *visited_nodes = pointer_set_create ();
e27482aa 713 /* Reach the trees by walking over the CFG, and note the
714 enclosing basic-blocks in the call edges. */
715 FOR_EACH_BB_FN (this_block, this_cfun)
75a70cf9 716 for (gsi = gsi_start_bb (this_block);
717 !gsi_end_p (gsi);
718 gsi_next (&gsi))
9bfec7c2 719 {
75a70cf9 720 gimple stmt = gsi_stmt (gsi);
799c8711 721 if (is_gimple_call (stmt))
9bfec7c2 722 {
723 struct cgraph_edge *e = cgraph_edge (node, stmt);
799c8711 724 tree decl = gimple_call_fndecl (stmt);
9bfec7c2 725 if (e)
726 {
727 if (e->aux)
728 {
0a81f5a0 729 error ("shared call_stmt:");
7b29dd2f 730 cgraph_debug_gimple_stmt (this_cfun, stmt);
9bfec7c2 731 error_found = true;
732 }
799c8711 733 if (!e->indirect_unknown_callee)
28454517 734 {
2f9d66d3 735 if (verify_edge_corresponds_to_fndecl (e, decl))
799c8711 736 {
737 error ("edge points to wrong declaration:");
738 debug_tree (e->callee->decl);
739 fprintf (stderr," Instead of:");
740 debug_tree (decl);
741 error_found = true;
742 }
28454517 743 }
799c8711 744 else if (decl)
9bfec7c2 745 {
799c8711 746 error ("an indirect edge with unknown callee "
747 "corresponding to a call_stmt with "
748 "a known declaration:");
ee3f5fc0 749 error_found = true;
7b29dd2f 750 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
9bfec7c2 751 }
752 e->aux = (void *)1;
753 }
799c8711 754 else if (decl)
9bfec7c2 755 {
0a81f5a0 756 error ("missing callgraph edge for call stmt:");
7b29dd2f 757 cgraph_debug_gimple_stmt (this_cfun, stmt);
9bfec7c2 758 error_found = true;
759 }
760 }
761 }
e27482aa 762 pointer_set_destroy (visited_nodes);
e27482aa 763 }
764 else
765 /* No CFG available?! */
766 gcc_unreachable ();
767
b0cdf642 768 for (e = node->callees; e; e = e->next_callee)
769 {
799c8711 770 if (!e->aux)
b0cdf642 771 {
0a81f5a0 772 error ("edge %s->%s has no corresponding call_stmt",
abd3e6b5 773 identifier_to_locale (cgraph_node_name (e->caller)),
774 identifier_to_locale (cgraph_node_name (e->callee)));
7b29dd2f 775 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
b0cdf642 776 error_found = true;
777 }
778 e->aux = 0;
779 }
799c8711 780 for (e = node->indirect_calls; e; e = e->next_callee)
781 {
782 if (!e->aux)
783 {
784 error ("an indirect edge from %s has no corresponding call_stmt",
785 identifier_to_locale (cgraph_node_name (e->caller)));
7b29dd2f 786 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
799c8711 787 error_found = true;
788 }
789 e->aux = 0;
790 }
b0cdf642 791 }
792 if (error_found)
793 {
794 dump_cgraph_node (stderr, node);
0a81f5a0 795 internal_error ("verify_cgraph_node failed");
b0cdf642 796 }
797 timevar_pop (TV_CGRAPH_VERIFY);
798}
799
800/* Verify whole cgraph structure. */
4b987fac 801DEBUG_FUNCTION void
b0cdf642 802verify_cgraph (void)
803{
804 struct cgraph_node *node;
805
852f689e 806 if (seen_error ())
8ec2a798 807 return;
808
b0cdf642 809 for (node = cgraph_nodes; node; node = node->next)
810 verify_cgraph_node (node);
811}
812
56af936e 813/* Output all asm statements we have stored up to be output. */
814
815static void
816cgraph_output_pending_asms (void)
817{
818 struct cgraph_asm_node *can;
819
852f689e 820 if (seen_error ())
56af936e 821 return;
822
823 for (can = cgraph_asm_nodes; can; can = can->next)
824 assemble_asm (can->asm_str);
825 cgraph_asm_nodes = NULL;
826}
827
0785e435 828/* Analyze the function scheduled to be output. */
222bc9b9 829void
0785e435 830cgraph_analyze_function (struct cgraph_node *node)
831{
bfec3452 832 tree save = current_function_decl;
0785e435 833 tree decl = node->decl;
834
c70f46b0 835 if (node->alias && node->thunk.alias)
836 {
837 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
838 if (!VEC_length (ipa_ref_t, node->ref_list.references))
839 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
840 if (node->same_body_alias)
841 {
842 DECL_VIRTUAL_P (node->decl) = DECL_VIRTUAL_P (node->thunk.alias);
843 DECL_DECLARED_INLINE_P (node->decl)
844 = DECL_DECLARED_INLINE_P (node->thunk.alias);
845 DECL_DISREGARD_INLINE_LIMITS (node->decl)
846 = DECL_DISREGARD_INLINE_LIMITS (node->thunk.alias);
847 }
848
849 /* Fixup visibility nonsences C++ frontend produce on same body aliases. */
850 if (TREE_PUBLIC (node->decl) && node->same_body_alias)
851 {
852 DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (node->thunk.alias);
89bf5ca9 853 if (DECL_ONE_ONLY (node->thunk.alias))
c70f46b0 854 {
89bf5ca9 855 DECL_COMDAT (node->decl) = DECL_COMDAT (node->thunk.alias);
c70f46b0 856 DECL_COMDAT_GROUP (node->decl) = DECL_COMDAT_GROUP (node->thunk.alias);
857 if (DECL_ONE_ONLY (node->thunk.alias) && !node->same_comdat_group)
858 {
859 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
860 node->same_comdat_group = tgt;
861 if (!tgt->same_comdat_group)
862 tgt->same_comdat_group = node;
863 else
864 {
865 struct cgraph_node *n;
866 for (n = tgt->same_comdat_group;
867 n->same_comdat_group != tgt;
868 n = n->same_comdat_group)
869 ;
870 n->same_comdat_group = node;
871 }
872 }
873 }
874 }
875 cgraph_mark_reachable_node (cgraph_alias_aliased_node (node));
876 if (node->address_taken)
877 cgraph_mark_address_taken_node (cgraph_alias_aliased_node (node));
878 if (cgraph_decide_is_function_needed (node, node->decl))
879 cgraph_mark_needed_node (node);
880 }
881 else if (node->thunk.thunk_p)
91bf9d9a 882 {
883 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
884 NULL, 0, CGRAPH_FREQ_BASE);
885 }
886 else
887 {
888 current_function_decl = decl;
889 push_cfun (DECL_STRUCT_FUNCTION (decl));
bfec3452 890
91bf9d9a 891 assign_assembler_name_if_neeeded (node->decl);
6816d0c4 892
91bf9d9a 893 /* Make sure to gimplify bodies only once. During analyzing a
894 function we lower it, which will require gimplified nested
895 functions, so we can end up here with an already gimplified
896 body. */
897 if (!gimple_body (decl))
898 gimplify_function_tree (decl);
899 dump_function (TDI_generic, decl);
bfec3452 900
91bf9d9a 901 cgraph_lower_function (node);
902 pop_cfun ();
903 }
6e8d6e86 904 node->analyzed = true;
0785e435 905
bfec3452 906 current_function_decl = save;
0785e435 907}
908
c70f46b0 909/* C++ frontend produce same body aliases all over the place, even before PCH
910 gets streamed out. It relies on us linking the aliases with their function
911 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
912 first produce aliases without links, but once C++ FE is sure he won't sream
913 PCH we build the links via this function. */
914
915void
916cgraph_process_same_body_aliases (void)
917{
918 struct cgraph_node *node;
919 for (node = cgraph_nodes; node; node = node->next)
920 if (node->same_body_alias
921 && !VEC_length (ipa_ref_t, node->ref_list.references))
922 {
923 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
924 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
925 }
926 same_body_aliases_done = true;
927}
928
d05db70d 929/* Process attributes common for vars and functions. */
930
931static void
932process_common_attributes (tree decl)
933{
934 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
935
936 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
937 {
938 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
939 "%<weakref%> attribute should be accompanied with"
940 " an %<alias%> attribute");
941 DECL_WEAK (decl) = 0;
40b32d93 942 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
943 DECL_ATTRIBUTES (decl));
d05db70d 944 }
945}
946
05806473 947/* Look for externally_visible and used attributes and mark cgraph nodes
948 accordingly.
949
950 We cannot mark the nodes at the point the attributes are processed (in
951 handle_*_attribute) because the copy of the declarations available at that
952 point may not be canonical. For example, in:
953
954 void f();
955 void f() __attribute__((used));
956
957 the declaration we see in handle_used_attribute will be the second
958 declaration -- but the front end will subsequently merge that declaration
959 with the original declaration and discard the second declaration.
960
961 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
962
963 void f() {}
964 void f() __attribute__((externally_visible));
965
966 is valid.
967
968 So, we walk the nodes at the end of the translation unit, applying the
969 attributes at that point. */
970
971static void
972process_function_and_variable_attributes (struct cgraph_node *first,
1d416bd7 973 struct varpool_node *first_var)
05806473 974{
975 struct cgraph_node *node;
1d416bd7 976 struct varpool_node *vnode;
05806473 977
978 for (node = cgraph_nodes; node != first; node = node->next)
979 {
980 tree decl = node->decl;
83a23b05 981 if (DECL_PRESERVE_P (decl))
0b49f8f8 982 cgraph_mark_needed_node (node);
62433d51 983 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
984 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
985 && TREE_PUBLIC (node->decl))
986 {
987 if (node->local.finalized)
988 cgraph_mark_needed_node (node);
989 }
990 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
05806473 991 {
ba12ea31 992 if (! TREE_PUBLIC (node->decl))
712d2297 993 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
994 "%<externally_visible%>"
995 " attribute have effect only on public objects");
59dd4830 996 else if (node->local.finalized)
997 cgraph_mark_needed_node (node);
05806473 998 }
40b32d93 999 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
c70f46b0 1000 && (node->local.finalized && !node->alias))
40b32d93 1001 {
1002 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
1003 "%<weakref%> attribute ignored"
1004 " because function is defined");
1005 DECL_WEAK (decl) = 0;
1006 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1007 DECL_ATTRIBUTES (decl));
1008 }
a522e9eb 1009
1010 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
1011 && !DECL_DECLARED_INLINE_P (decl)
1012 /* redefining extern inline function makes it DECL_UNINLINABLE. */
1013 && !DECL_UNINLINABLE (decl))
1014 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
1015 "always_inline function might not be inlinable");
1016
d05db70d 1017 process_common_attributes (decl);
05806473 1018 }
1d416bd7 1019 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
05806473 1020 {
1021 tree decl = vnode->decl;
83a23b05 1022 if (DECL_PRESERVE_P (decl))
05806473 1023 {
22671757 1024 vnode->force_output = true;
05806473 1025 if (vnode->finalized)
1d416bd7 1026 varpool_mark_needed_node (vnode);
05806473 1027 }
62433d51 1028 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
1029 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
0d9d5d69 1030 && TREE_PUBLIC (vnode->decl))
62433d51 1031 {
1032 if (vnode->finalized)
1033 varpool_mark_needed_node (vnode);
1034 }
1035 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
05806473 1036 {
ba12ea31 1037 if (! TREE_PUBLIC (vnode->decl))
712d2297 1038 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1039 "%<externally_visible%>"
1040 " attribute have effect only on public objects");
59dd4830 1041 else if (vnode->finalized)
1042 varpool_mark_needed_node (vnode);
05806473 1043 }
40b32d93 1044 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
1045 && vnode->finalized
1046 && DECL_INITIAL (decl))
1047 {
1048 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1049 "%<weakref%> attribute ignored"
1050 " because variable is initialized");
1051 DECL_WEAK (decl) = 0;
1052 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1053 DECL_ATTRIBUTES (decl));
1054 }
d05db70d 1055 process_common_attributes (decl);
05806473 1056 }
1057}
1058
aeeb194b 1059/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1060 each reachable functions) and build cgraph.
1061 The function can be called multiple times after inserting new nodes
0d424440 1062 into beginning of queue. Just the new part of queue is re-scanned then. */
ae01b312 1063
aeeb194b 1064static void
1065cgraph_analyze_functions (void)
ae01b312 1066{
c1dcd13c 1067 /* Keep track of already processed nodes when called multiple times for
06b27565 1068 intermodule optimization. */
c1dcd13c 1069 static struct cgraph_node *first_analyzed;
c17d0de1 1070 struct cgraph_node *first_processed = first_analyzed;
1d416bd7 1071 static struct varpool_node *first_analyzed_var;
aeeb194b 1072 struct cgraph_node *node, *next;
ae01b312 1073
f1c35659 1074 bitmap_obstack_initialize (NULL);
c17d0de1 1075 process_function_and_variable_attributes (first_processed,
1076 first_analyzed_var);
1077 first_processed = cgraph_nodes;
1d416bd7 1078 first_analyzed_var = varpool_nodes;
1079 varpool_analyze_pending_decls ();
f79b6507 1080 if (cgraph_dump_file)
ae01b312 1081 {
e4200070 1082 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 1083 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1084 if (node->needed)
f79b6507 1085 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1086 fprintf (cgraph_dump_file, "\n");
ae01b312 1087 }
aeeb194b 1088 cgraph_process_new_functions ();
ae01b312 1089
e6d2b2d8 1090 /* Propagate reachability flag and lower representation of all reachable
1091 functions. In the future, lowering will introduce new functions and
1092 new entry points on the way (by template instantiation and virtual
1093 method table generation for instance). */
3d7bfc56 1094 while (cgraph_nodes_queue)
ae01b312 1095 {
0785e435 1096 struct cgraph_edge *edge;
3d7bfc56 1097 tree decl = cgraph_nodes_queue->decl;
1098
1099 node = cgraph_nodes_queue;
d87976fb 1100 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 1101 node->next_needed = NULL;
ae01b312 1102
638531ad 1103 /* ??? It is possible to create extern inline function and later using
bbd5cba2 1104 weak alias attribute to kill its body. See
638531ad 1105 gcc.c-torture/compile/20011119-1.c */
91bf9d9a 1106 if (!DECL_STRUCT_FUNCTION (decl)
c70f46b0 1107 && (!node->alias || !node->thunk.alias)
91bf9d9a 1108 && !node->thunk.thunk_p)
9b8fb23a 1109 {
1110 cgraph_reset_node (node);
443089c1 1111 node->local.redefined_extern_inline = true;
9b8fb23a 1112 continue;
1113 }
638531ad 1114
7bfefa9d 1115 if (!node->analyzed)
1116 cgraph_analyze_function (node);
2c0b522d 1117
ae01b312 1118 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 1119 if (!edge->callee->reachable)
2c0b522d 1120 cgraph_mark_reachable_node (edge->callee);
91bf9d9a 1121 for (edge = node->callers; edge; edge = edge->next_caller)
1122 if (!edge->caller->reachable && edge->caller->thunk.thunk_p)
1123 cgraph_mark_reachable_node (edge->caller);
2c0b522d 1124
61c2c7b1 1125 if (node->same_comdat_group)
1126 {
1127 for (next = node->same_comdat_group;
1128 next != node;
1129 next = next->same_comdat_group)
1130 cgraph_mark_reachable_node (next);
1131 }
1132
d544ceff 1133 /* If decl is a clone of an abstract function, mark that abstract
1134 function so that we don't release its body. The DECL_INITIAL() of that
fd6a3c41 1135 abstract function declaration will be later needed to output debug
1136 info. */
d544ceff 1137 if (DECL_ABSTRACT_ORIGIN (decl))
1138 {
fd6a3c41 1139 struct cgraph_node *origin_node;
1140 origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
d544ceff 1141 origin_node->abstract_and_needed = true;
1142 }
1143
c17d0de1 1144 /* We finalize local static variables during constructing callgraph
1145 edges. Process their attributes too. */
1146 process_function_and_variable_attributes (first_processed,
1147 first_analyzed_var);
1148 first_processed = cgraph_nodes;
1d416bd7 1149 first_analyzed_var = varpool_nodes;
1150 varpool_analyze_pending_decls ();
aeeb194b 1151 cgraph_process_new_functions ();
ae01b312 1152 }
2c0b522d 1153
aa5e06c7 1154 /* Collect entry points to the unit. */
f79b6507 1155 if (cgraph_dump_file)
3d7bfc56 1156 {
e4200070 1157 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 1158 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1159 if (node->needed)
f79b6507 1160 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 1161 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 1162 dump_cgraph (cgraph_dump_file);
7410370b 1163 dump_varpool (cgraph_dump_file);
3d7bfc56 1164 }
e6d2b2d8 1165
f79b6507 1166 if (cgraph_dump_file)
1167 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 1168
f4ec5ce1 1169 for (node = cgraph_nodes; node != first_analyzed; node = next)
ae01b312 1170 {
1171 tree decl = node->decl;
f4ec5ce1 1172 next = node->next;
ae01b312 1173
91bf9d9a 1174 if (node->local.finalized && !gimple_has_body_p (decl)
c70f46b0 1175 && (!node->alias || !node->thunk.alias)
91bf9d9a 1176 && !node->thunk.thunk_p)
a0c938f0 1177 cgraph_reset_node (node);
9b8fb23a 1178
91bf9d9a 1179 if (!node->reachable
c70f46b0 1180 && (gimple_has_body_p (decl) || node->thunk.thunk_p
1181 || (node->alias && node->thunk.alias)))
ae01b312 1182 {
f79b6507 1183 if (cgraph_dump_file)
1184 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 1185 cgraph_remove_node (node);
9b8fb23a 1186 continue;
ae01b312 1187 }
bc5cab3b 1188 else
1189 node->next_needed = NULL;
91bf9d9a 1190 gcc_assert (!node->local.finalized || node->thunk.thunk_p
c70f46b0 1191 || node->alias
91bf9d9a 1192 || gimple_has_body_p (decl));
9b8fb23a 1193 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 1194 }
f79b6507 1195 if (cgraph_dump_file)
e4200070 1196 {
1197 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1198 dump_cgraph (cgraph_dump_file);
7410370b 1199 dump_varpool (cgraph_dump_file);
e4200070 1200 }
f1c35659 1201 bitmap_obstack_release (NULL);
c1dcd13c 1202 first_analyzed = cgraph_nodes;
ae01b312 1203 ggc_collect ();
aeeb194b 1204}
1205
3a849bc1 1206/* Translate the ugly representation of aliases as alias pairs into nice
1207 representation in callgraph. We don't handle all cases yet,
1208 unforutnately. */
1209
1210static void
1211handle_alias_pairs (void)
1212{
1213 alias_pair *p;
1214 unsigned i;
1215 struct cgraph_node *target_node;
1216 struct cgraph_node *src_node;
e0eaac80 1217 struct varpool_node *target_vnode;
3a849bc1 1218
1219 for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p);)
1220 {
1221 if (TREE_CODE (p->decl) == FUNCTION_DECL
1222 && !lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl))
1223 && (target_node = cgraph_node_for_asm (p->target)) != NULL)
1224 {
1225 src_node = cgraph_get_node (p->decl);
1226 if (src_node && src_node->local.finalized)
1227 cgraph_reset_node (src_node);
1228 /* Normally EXTERNAL flag is used to mark external inlines,
1229 however for aliases it seems to be allowed to use it w/o
1230 any meaning. See gcc.dg/attr-alias-3.c
1231 However for weakref we insist on EXTERNAL flag being set.
1232 See gcc.dg/attr-alias-5.c */
1233 if (DECL_EXTERNAL (p->decl))
1234 DECL_EXTERNAL (p->decl) = 0;
1235 cgraph_create_function_alias (p->decl, target_node->decl);
1236 VEC_unordered_remove (alias_pair, alias_pairs, i);
1237 }
e0eaac80 1238 else if (TREE_CODE (p->decl) == VAR_DECL
1239 && !lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl))
1240 && (target_vnode = varpool_node_for_asm (p->target)) != NULL)
1241 {
1242 /* Normally EXTERNAL flag is used to mark external inlines,
1243 however for aliases it seems to be allowed to use it w/o
1244 any meaning. See gcc.dg/attr-alias-3.c
1245 However for weakref we insist on EXTERNAL flag being set.
1246 See gcc.dg/attr-alias-5.c */
1247 if (DECL_EXTERNAL (p->decl))
1248 DECL_EXTERNAL (p->decl) = 0;
1249 varpool_create_variable_alias (p->decl, target_vnode->decl);
1250 VEC_unordered_remove (alias_pair, alias_pairs, i);
1251 }
3a849bc1 1252 else
1253 {
1254 if (dump_file)
1255 fprintf (dump_file, "Unhandled alias %s->%s\n",
1256 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
1257 IDENTIFIER_POINTER (p->target));
1258
1259 i++;
1260 }
1261 }
1262}
1263
8f69fd82 1264
aeeb194b 1265/* Analyze the whole compilation unit once it is parsed completely. */
1266
1267void
1268cgraph_finalize_compilation_unit (void)
1269{
9929334e 1270 timevar_push (TV_CGRAPH);
1271
a0605d65 1272 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
1273 if (flag_lto)
1274 lto_streamer_hooks_init ();
1275
bb903e9c 1276 /* If we're here there's no current function anymore. Some frontends
1277 are lazy in clearing these. */
1278 current_function_decl = NULL;
1279 set_cfun (NULL);
1280
bfec3452 1281 /* Do not skip analyzing the functions if there were errors, we
1282 miss diagnostics for following functions otherwise. */
aeeb194b 1283
8f69fd82 1284 /* Emit size functions we didn't inline. */
4189e677 1285 finalize_size_functions ();
8f69fd82 1286
9929334e 1287 /* Mark alias targets necessary and emit diagnostics. */
1288 finish_aliases_1 ();
3a849bc1 1289 handle_alias_pairs ();
9929334e 1290
aeeb194b 1291 if (!quiet_flag)
1292 {
1293 fprintf (stderr, "\nAnalyzing compilation unit\n");
1294 fflush (stderr);
1295 }
1296
ec4791a8 1297 if (flag_dump_passes)
1298 dump_passes ();
1299
9929334e 1300 /* Gimplify and lower all functions, compute reachability and
1301 remove unreachable nodes. */
1302 cgraph_analyze_functions ();
1303
8f69fd82 1304 /* Mark alias targets necessary and emit diagnostics. */
1305 finish_aliases_1 ();
3a849bc1 1306 handle_alias_pairs ();
8f69fd82 1307
9929334e 1308 /* Gimplify and lower thunks. */
aeeb194b 1309 cgraph_analyze_functions ();
bfec3452 1310
9929334e 1311 /* Finally drive the pass manager. */
bfec3452 1312 cgraph_optimize ();
9929334e 1313
1314 timevar_pop (TV_CGRAPH);
ae01b312 1315}
9ed5b1f5 1316
1317
ae01b312 1318/* Figure out what functions we want to assemble. */
1319
1320static void
d9d9733a 1321cgraph_mark_functions_to_output (void)
ae01b312 1322{
1323 struct cgraph_node *node;
61c2c7b1 1324#ifdef ENABLE_CHECKING
1325 bool check_same_comdat_groups = false;
1326
1327 for (node = cgraph_nodes; node; node = node->next)
1328 gcc_assert (!node->process);
1329#endif
ae01b312 1330
ae01b312 1331 for (node = cgraph_nodes; node; node = node->next)
1332 {
1333 tree decl = node->decl;
d7c6d889 1334 struct cgraph_edge *e;
a0c938f0 1335
61c2c7b1 1336 gcc_assert (!node->process || node->same_comdat_group);
1337 if (node->process)
1338 continue;
d7c6d889 1339
1340 for (e = node->callers; e; e = e->next_caller)
611e5405 1341 if (e->inline_failed)
d7c6d889 1342 break;
ae01b312 1343
e6d2b2d8 1344 /* We need to output all local functions that are used and not
1345 always inlined, as well as those that are reachable from
1346 outside the current compilation unit. */
1a1a827a 1347 if (node->analyzed
91bf9d9a 1348 && !node->thunk.thunk_p
c70f46b0 1349 && !node->alias
b0cdf642 1350 && !node->global.inlined_to
1e3aebec 1351 && (!cgraph_only_called_directly_p (node)
c70f46b0 1352 || ((e || ipa_ref_has_aliases_p (&node->ref_list))
1353 && node->reachable))
4ee9c684 1354 && !TREE_ASM_WRITTEN (decl)
ae01b312 1355 && !DECL_EXTERNAL (decl))
61c2c7b1 1356 {
1357 node->process = 1;
1358 if (node->same_comdat_group)
1359 {
1360 struct cgraph_node *next;
1361 for (next = node->same_comdat_group;
1362 next != node;
1363 next = next->same_comdat_group)
c70f46b0 1364 if (!next->thunk.thunk_p && !next->alias)
91bf9d9a 1365 next->process = 1;
61c2c7b1 1366 }
1367 }
1368 else if (node->same_comdat_group)
1369 {
1370#ifdef ENABLE_CHECKING
1371 check_same_comdat_groups = true;
1372#endif
1373 }
cc636d56 1374 else
9cee7c3f 1375 {
1376 /* We should've reclaimed all functions that are not needed. */
1377#ifdef ENABLE_CHECKING
75a70cf9 1378 if (!node->global.inlined_to
1a1a827a 1379 && gimple_has_body_p (decl)
08843223 1380 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1381 are inside partition, we can end up not removing the body since we no longer
1382 have analyzed node pointing to it. */
1383 && !node->in_other_partition
c70f46b0 1384 && !node->alias
9cee7c3f 1385 && !DECL_EXTERNAL (decl))
1386 {
1387 dump_cgraph_node (stderr, node);
1388 internal_error ("failed to reclaim unneeded function");
1389 }
1390#endif
75a70cf9 1391 gcc_assert (node->global.inlined_to
1a1a827a 1392 || !gimple_has_body_p (decl)
08843223 1393 || node->in_other_partition
9cee7c3f 1394 || DECL_EXTERNAL (decl));
1395
1396 }
a0c938f0 1397
961e3b13 1398 }
61c2c7b1 1399#ifdef ENABLE_CHECKING
1400 if (check_same_comdat_groups)
1401 for (node = cgraph_nodes; node; node = node->next)
1402 if (node->same_comdat_group && !node->process)
1403 {
1404 tree decl = node->decl;
1405 if (!node->global.inlined_to
1406 && gimple_has_body_p (decl)
08843223 1407 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1408 are inside partition, we can end up not removing the body since we no longer
1409 have analyzed node pointing to it. */
1410 && !node->in_other_partition
61c2c7b1 1411 && !DECL_EXTERNAL (decl))
1412 {
1413 dump_cgraph_node (stderr, node);
c70f46b0 1414 internal_error ("failed to reclaim unneeded functionin same comdat group");
61c2c7b1 1415 }
1416 }
1417#endif
961e3b13 1418}
1419
28454517 1420/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1421 in lowered gimple form.
1422
1423 Set current_function_decl and cfun to newly constructed empty function body.
1424 return basic block in the function body. */
1425
1426static basic_block
1427init_lowered_empty_function (tree decl)
1428{
1429 basic_block bb;
1430
1431 current_function_decl = decl;
1432 allocate_struct_function (decl, false);
1433 gimple_register_cfg_hooks ();
1434 init_empty_tree_cfg ();
1435 init_tree_ssa (cfun);
1436 init_ssa_operands ();
1437 cfun->gimple_df->in_ssa_p = true;
1438 DECL_INITIAL (decl) = make_node (BLOCK);
1439
1440 DECL_SAVED_TREE (decl) = error_mark_node;
1441 cfun->curr_properties |=
1442 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
657e3a56 1443 PROP_ssa | PROP_gimple_any);
28454517 1444
1445 /* Create BB for body of the function and connect it properly. */
1446 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1447 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1448 make_edge (bb, EXIT_BLOCK_PTR, 0);
1449
1450 return bb;
1451}
1452
1453/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1454 offset indicated by VIRTUAL_OFFSET, if that is
1455 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1456 zero for a result adjusting thunk. */
1457
1458static tree
1459thunk_adjust (gimple_stmt_iterator * bsi,
1460 tree ptr, bool this_adjusting,
1461 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1462{
1463 gimple stmt;
1464 tree ret;
1465
55d6cb23 1466 if (this_adjusting
1467 && fixed_offset != 0)
28454517 1468 {
2cc66f2a 1469 stmt = gimple_build_assign
1470 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1471 ptr,
1472 fixed_offset));
28454517 1473 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1474 }
1475
1476 /* If there's a virtual offset, look up that value in the vtable and
1477 adjust the pointer again. */
1478 if (virtual_offset)
1479 {
1480 tree vtabletmp;
1481 tree vtabletmp2;
1482 tree vtabletmp3;
28454517 1483
1484 if (!vtable_entry_type)
1485 {
1486 tree vfunc_type = make_node (FUNCTION_TYPE);
1487 TREE_TYPE (vfunc_type) = integer_type_node;
1488 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1489 layout_type (vfunc_type);
1490
1491 vtable_entry_type = build_pointer_type (vfunc_type);
1492 }
1493
1494 vtabletmp =
1495 create_tmp_var (build_pointer_type
1496 (build_pointer_type (vtable_entry_type)), "vptr");
1497
1498 /* The vptr is always at offset zero in the object. */
1499 stmt = gimple_build_assign (vtabletmp,
1500 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1501 ptr));
1502 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1503 mark_symbols_for_renaming (stmt);
1504 find_referenced_vars_in (stmt);
1505
1506 /* Form the vtable address. */
1507 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1508 "vtableaddr");
1509 stmt = gimple_build_assign (vtabletmp2,
182cf5a9 1510 build_simple_mem_ref (vtabletmp));
28454517 1511 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1512 mark_symbols_for_renaming (stmt);
1513 find_referenced_vars_in (stmt);
1514
1515 /* Find the entry with the vcall offset. */
1516 stmt = gimple_build_assign (vtabletmp2,
2cc66f2a 1517 fold_build_pointer_plus_loc (input_location,
1518 vtabletmp2,
1519 virtual_offset));
28454517 1520 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1521
1522 /* Get the offset itself. */
1523 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1524 "vcalloffset");
1525 stmt = gimple_build_assign (vtabletmp3,
182cf5a9 1526 build_simple_mem_ref (vtabletmp2));
28454517 1527 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1528 mark_symbols_for_renaming (stmt);
1529 find_referenced_vars_in (stmt);
1530
28454517 1531 /* Adjust the `this' pointer. */
a0553bff 1532 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1533 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1534 GSI_CONTINUE_LINKING);
28454517 1535 }
1536
55d6cb23 1537 if (!this_adjusting
1538 && fixed_offset != 0)
28454517 1539 /* Adjust the pointer by the constant. */
1540 {
1541 tree ptrtmp;
1542
1543 if (TREE_CODE (ptr) == VAR_DECL)
1544 ptrtmp = ptr;
1545 else
1546 {
1547 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1548 stmt = gimple_build_assign (ptrtmp, ptr);
1549 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1550 mark_symbols_for_renaming (stmt);
1551 find_referenced_vars_in (stmt);
1552 }
2cc66f2a 1553 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1554 ptrtmp, fixed_offset);
28454517 1555 }
1556
1557 /* Emit the statement and gimplify the adjustment expression. */
1558 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1559 stmt = gimple_build_assign (ret, ptr);
1560 mark_symbols_for_renaming (stmt);
1561 find_referenced_vars_in (stmt);
1562 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1563
1564 return ret;
1565}
1566
1567/* Produce assembler for thunk NODE. */
1568
1569static void
1570assemble_thunk (struct cgraph_node *node)
1571{
1572 bool this_adjusting = node->thunk.this_adjusting;
1573 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1574 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1575 tree virtual_offset = NULL;
1576 tree alias = node->thunk.alias;
1577 tree thunk_fndecl = node->decl;
1578 tree a = DECL_ARGUMENTS (thunk_fndecl);
1579
1580 current_function_decl = thunk_fndecl;
1581
aed6e608 1582 /* Ensure thunks are emitted in their correct sections. */
1583 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1584
28454517 1585 if (this_adjusting
1586 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1587 virtual_value, alias))
1588 {
1589 const char *fnname;
1590 tree fn_block;
28b2c6a7 1591 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
28454517 1592
1593 DECL_RESULT (thunk_fndecl)
1594 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
28b2c6a7 1595 RESULT_DECL, 0, restype);
22ea3b47 1596 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
28454517 1597
1598 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1599 create one. */
1600 fn_block = make_node (BLOCK);
1601 BLOCK_VARS (fn_block) = a;
1602 DECL_INITIAL (thunk_fndecl) = fn_block;
1603 init_function_start (thunk_fndecl);
1604 cfun->is_thunk = 1;
1605 assemble_start_function (thunk_fndecl, fnname);
1606
1607 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1608 fixed_offset, virtual_value, alias);
1609
1610 assemble_end_function (thunk_fndecl, fnname);
1611 init_insn_lengths ();
1612 free_after_compilation (cfun);
1613 set_cfun (NULL);
1614 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
91bf9d9a 1615 node->thunk.thunk_p = false;
1616 node->analyzed = false;
28454517 1617 }
1618 else
1619 {
1620 tree restype;
1621 basic_block bb, then_bb, else_bb, return_bb;
1622 gimple_stmt_iterator bsi;
1623 int nargs = 0;
1624 tree arg;
1625 int i;
1626 tree resdecl;
1627 tree restmp = NULL;
1628 VEC(tree, heap) *vargs;
1629
1630 gimple call;
1631 gimple ret;
1632
1633 DECL_IGNORED_P (thunk_fndecl) = 1;
1634 bitmap_obstack_initialize (NULL);
1635
1636 if (node->thunk.virtual_offset_p)
1637 virtual_offset = size_int (virtual_value);
1638
1639 /* Build the return declaration for the function. */
1640 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1641 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1642 {
1643 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1644 DECL_ARTIFICIAL (resdecl) = 1;
1645 DECL_IGNORED_P (resdecl) = 1;
1646 DECL_RESULT (thunk_fndecl) = resdecl;
1647 }
1648 else
1649 resdecl = DECL_RESULT (thunk_fndecl);
1650
1651 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1652
1653 bsi = gsi_start_bb (bb);
1654
1655 /* Build call to the function being thunked. */
1656 if (!VOID_TYPE_P (restype))
1657 {
1658 if (!is_gimple_reg_type (restype))
1659 {
1660 restmp = resdecl;
2ab2ce89 1661 add_local_decl (cfun, restmp);
28454517 1662 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1663 }
1664 else
1665 restmp = create_tmp_var_raw (restype, "retval");
1666 }
1667
1767a056 1668 for (arg = a; arg; arg = DECL_CHAIN (arg))
28454517 1669 nargs++;
1670 vargs = VEC_alloc (tree, heap, nargs);
1671 if (this_adjusting)
1672 VEC_quick_push (tree, vargs,
1673 thunk_adjust (&bsi,
1674 a, 1, fixed_offset,
1675 virtual_offset));
1676 else
1677 VEC_quick_push (tree, vargs, a);
1767a056 1678 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
28454517 1679 VEC_quick_push (tree, vargs, arg);
1680 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1681 VEC_free (tree, heap, vargs);
1682 gimple_call_set_cannot_inline (call, true);
1683 gimple_call_set_from_thunk (call, true);
1684 if (restmp)
1685 gimple_call_set_lhs (call, restmp);
1686 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1687 mark_symbols_for_renaming (call);
1688 find_referenced_vars_in (call);
1689 update_stmt (call);
1690
1691 if (restmp && !this_adjusting)
1692 {
57ab8ec3 1693 tree true_label = NULL_TREE;
28454517 1694
1695 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1696 {
1697 gimple stmt;
1698 /* If the return type is a pointer, we need to
1699 protect against NULL. We know there will be an
1700 adjustment, because that's why we're emitting a
1701 thunk. */
1702 then_bb = create_basic_block (NULL, (void *) 0, bb);
1703 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1704 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1705 remove_edge (single_succ_edge (bb));
1706 true_label = gimple_block_label (then_bb);
28454517 1707 stmt = gimple_build_cond (NE_EXPR, restmp,
385f3f36 1708 build_zero_cst (TREE_TYPE (restmp)),
28454517 1709 NULL_TREE, NULL_TREE);
1710 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1711 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1712 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1713 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1714 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1715 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1716 bsi = gsi_last_bb (then_bb);
1717 }
1718
1719 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1720 fixed_offset, virtual_offset);
1721 if (true_label)
1722 {
1723 gimple stmt;
1724 bsi = gsi_last_bb (else_bb);
385f3f36 1725 stmt = gimple_build_assign (restmp,
1726 build_zero_cst (TREE_TYPE (restmp)));
28454517 1727 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1728 bsi = gsi_last_bb (return_bb);
1729 }
1730 }
1731 else
1732 gimple_call_set_tail (call, true);
1733
1734 /* Build return value. */
1735 ret = gimple_build_return (restmp);
1736 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1737
1738 delete_unreachable_blocks ();
1739 update_ssa (TODO_update_ssa);
1740
28454517 1741 /* Since we want to emit the thunk, we explicitly mark its name as
1742 referenced. */
91bf9d9a 1743 node->thunk.thunk_p = false;
1744 cgraph_node_remove_callees (node);
28454517 1745 cgraph_add_new_function (thunk_fndecl, true);
1746 bitmap_obstack_release (NULL);
1747 }
1748 current_function_decl = NULL;
1749}
1750
91bf9d9a 1751
c70f46b0 1752
1753/* Assemble thunks and aliases asociated to NODE. */
91bf9d9a 1754
1755static void
c70f46b0 1756assemble_thunks_and_aliases (struct cgraph_node *node)
91bf9d9a 1757{
1758 struct cgraph_edge *e;
c70f46b0 1759 int i;
1760 struct ipa_ref *ref;
1761
91bf9d9a 1762 for (e = node->callers; e;)
1763 if (e->caller->thunk.thunk_p)
1764 {
1765 struct cgraph_node *thunk = e->caller;
1766
1767 e = e->next_caller;
c70f46b0 1768 assemble_thunks_and_aliases (thunk);
91bf9d9a 1769 assemble_thunk (thunk);
1770 }
1771 else
1772 e = e->next_caller;
c70f46b0 1773 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
1774 if (ref->use == IPA_REF_ALIAS)
1775 {
1776 struct cgraph_node *alias = ipa_ref_refering_node (ref);
968b8c52 1777 bool saved_written = TREE_ASM_WRITTEN (alias->thunk.alias);
1778
1779 /* Force assemble_alias to really output the alias this time instead
1780 of buffering it in same alias pairs. */
1781 TREE_ASM_WRITTEN (alias->thunk.alias) = 1;
c70f46b0 1782 assemble_alias (alias->decl,
1783 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1784 assemble_thunks_and_aliases (alias);
968b8c52 1785 TREE_ASM_WRITTEN (alias->thunk.alias) = saved_written;
c70f46b0 1786 }
91bf9d9a 1787}
1788
ae01b312 1789/* Expand function specified by NODE. */
e6d2b2d8 1790
ae01b312 1791static void
d9d9733a 1792cgraph_expand_function (struct cgraph_node *node)
ae01b312 1793{
1794 tree decl = node->decl;
1795
b0cdf642 1796 /* We ought to not compile any inline clones. */
cc636d56 1797 gcc_assert (!node->global.inlined_to);
b0cdf642 1798
6329636b 1799 announce_function (decl);
09fc9532 1800 node->process = 0;
f7777314 1801 gcc_assert (node->lowered);
1802
1803 /* Generate RTL for the body of DECL. */
1804 tree_rest_of_compilation (decl);
1805
1806 /* Make sure that BE didn't give up on compiling. */
1807 gcc_assert (TREE_ASM_WRITTEN (decl));
1808 current_function_decl = NULL;
cc91b414 1809 gcc_assert (!cgraph_preserve_function_body_p (node));
f76f7453 1810
1811 /* It would make a lot more sense to output thunks before function body to get more
1812 forward and lest backwarding jumps. This is however would need solving problem
1813 with comdats. See PR48668. Also aliases must come after function itself to
1814 make one pass assemblers, like one on AIX happy. See PR 50689.
1815 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1816 groups. */
1817 assemble_thunks_and_aliases (node);
1a1a827a 1818 cgraph_release_function_body (node);
1819 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1820 points to the dead function body. */
1821 cgraph_node_remove_callees (node);
e1be32b8 1822
1823 cgraph_function_flags_ready = true;
ae01b312 1824}
1825
b0cdf642 1826/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1827
1828bool
326a9581 1829cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
d7c6d889 1830{
b0cdf642 1831 *reason = e->inline_failed;
1832 return !e->inline_failed;
d7c6d889 1833}
b0cdf642 1834
acc70efa 1835
acc70efa 1836
d9d9733a 1837/* Expand all functions that must be output.
1838
d7c6d889 1839 Attempt to topologically sort the nodes so function is output when
1840 all called functions are already assembled to allow data to be
91c82c20 1841 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1842 between a function and its callees (later we may choose to use a more
d7c6d889 1843 sophisticated algorithm for function reordering; we will likely want
1844 to use subsections to make the output functions appear in top-down
1845 order). */
1846
1847static void
a6868229 1848cgraph_expand_all_functions (void)
d7c6d889 1849{
1850 struct cgraph_node *node;
4c36ffe6 1851 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
c04e3894 1852 int order_pos, new_order_pos = 0;
d7c6d889 1853 int i;
1854
7771d558 1855 order_pos = ipa_reverse_postorder (order);
cc636d56 1856 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1857
7bd28bba 1858 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1859 optimization. So we must be sure to not reference them. */
1860 for (i = 0; i < order_pos; i++)
09fc9532 1861 if (order[i]->process)
b0cdf642 1862 order[new_order_pos++] = order[i];
1863
1864 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1865 {
1866 node = order[i];
09fc9532 1867 if (node->process)
d7c6d889 1868 {
cc636d56 1869 gcc_assert (node->reachable);
09fc9532 1870 node->process = 0;
d7c6d889 1871 cgraph_expand_function (node);
1872 }
1873 }
523c1122 1874 cgraph_process_new_functions ();
773c5ba7 1875
d7c6d889 1876 free (order);
773c5ba7 1877
d7c6d889 1878}
1879
56af936e 1880/* This is used to sort the node types by the cgraph order number. */
1881
0b09525f 1882enum cgraph_order_sort_kind
1883{
1884 ORDER_UNDEFINED = 0,
1885 ORDER_FUNCTION,
1886 ORDER_VAR,
1887 ORDER_ASM
1888};
1889
56af936e 1890struct cgraph_order_sort
1891{
0b09525f 1892 enum cgraph_order_sort_kind kind;
56af936e 1893 union
1894 {
1895 struct cgraph_node *f;
1d416bd7 1896 struct varpool_node *v;
56af936e 1897 struct cgraph_asm_node *a;
1898 } u;
1899};
1900
1901/* Output all functions, variables, and asm statements in the order
1902 according to their order fields, which is the order in which they
1903 appeared in the file. This implements -fno-toplevel-reorder. In
1904 this mode we may output functions and variables which don't really
1905 need to be output. */
1906
1907static void
1908cgraph_output_in_order (void)
1909{
1910 int max;
56af936e 1911 struct cgraph_order_sort *nodes;
1912 int i;
1913 struct cgraph_node *pf;
1d416bd7 1914 struct varpool_node *pv;
56af936e 1915 struct cgraph_asm_node *pa;
1916
1917 max = cgraph_order;
3e1cde87 1918 nodes = XCNEWVEC (struct cgraph_order_sort, max);
56af936e 1919
1d416bd7 1920 varpool_analyze_pending_decls ();
56af936e 1921
1922 for (pf = cgraph_nodes; pf; pf = pf->next)
1923 {
c70f46b0 1924 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
56af936e 1925 {
1926 i = pf->order;
1927 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1928 nodes[i].kind = ORDER_FUNCTION;
1929 nodes[i].u.f = pf;
1930 }
1931 }
1932
1d416bd7 1933 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
56af936e 1934 {
1935 i = pv->order;
1936 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1937 nodes[i].kind = ORDER_VAR;
1938 nodes[i].u.v = pv;
1939 }
1940
1941 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1942 {
1943 i = pa->order;
1944 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1945 nodes[i].kind = ORDER_ASM;
1946 nodes[i].u.a = pa;
1947 }
56af936e 1948
304e5318 1949 /* In toplevel reorder mode we output all statics; mark them as needed. */
1950 for (i = 0; i < max; ++i)
1951 {
1952 if (nodes[i].kind == ORDER_VAR)
1953 {
1954 varpool_mark_needed_node (nodes[i].u.v);
1955 }
1956 }
1957 varpool_empty_needed_queue ();
1958
91da0f1c 1959 for (i = 0; i < max; ++i)
1960 if (nodes[i].kind == ORDER_VAR)
1961 varpool_finalize_named_section_flags (nodes[i].u.v);
1962
56af936e 1963 for (i = 0; i < max; ++i)
1964 {
1965 switch (nodes[i].kind)
1966 {
1967 case ORDER_FUNCTION:
09fc9532 1968 nodes[i].u.f->process = 0;
56af936e 1969 cgraph_expand_function (nodes[i].u.f);
1970 break;
1971
1972 case ORDER_VAR:
1d416bd7 1973 varpool_assemble_decl (nodes[i].u.v);
56af936e 1974 break;
1975
1976 case ORDER_ASM:
1977 assemble_asm (nodes[i].u.a->asm_str);
1978 break;
1979
1980 case ORDER_UNDEFINED:
1981 break;
1982
1983 default:
1984 gcc_unreachable ();
1985 }
1986 }
4b4ea2db 1987
1988 cgraph_asm_nodes = NULL;
3e1cde87 1989 free (nodes);
56af936e 1990}
1991
b0cdf642 1992/* Return true when function body of DECL still needs to be kept around
1993 for later re-use. */
1994bool
cc91b414 1995cgraph_preserve_function_body_p (struct cgraph_node *node)
b0cdf642 1996{
8d8c4c8d 1997 gcc_assert (cgraph_global_info_ready);
c70f46b0 1998 gcc_assert (!node->alias && !node->thunk.thunk_p);
cc91b414 1999
b0cdf642 2000 /* Look if there is any clone around. */
ccf4ab6b 2001 if (node->clones)
2002 return true;
b0cdf642 2003 return false;
2004}
2005
77fce4cd 2006static void
2007ipa_passes (void)
2008{
87d4aa85 2009 set_cfun (NULL);
4b14adf9 2010 current_function_decl = NULL;
75a70cf9 2011 gimple_register_cfg_hooks ();
77fce4cd 2012 bitmap_obstack_initialize (NULL);
59dd4830 2013
c9036234 2014 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2015
59dd4830 2016 if (!in_lto_p)
7b2e8956 2017 {
2018 execute_ipa_pass_list (all_small_ipa_passes);
2019 if (seen_error ())
2020 return;
2021 }
9ed5b1f5 2022
941125aa 2023 /* We never run removal of unreachable nodes after early passes. This is
2024 because TODO is run before the subpasses. It is important to remove
2025 the unreachable functions to save works at IPA level and to get LTO
2026 symbol tables right. */
2027 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
2028
7bfefa9d 2029 /* If pass_all_early_optimizations was not scheduled, the state of
2030 the cgraph will not be properly updated. Update it now. */
2031 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
2032 cgraph_state = CGRAPH_STATE_IPA_SSA;
9ed5b1f5 2033
7bfefa9d 2034 if (!in_lto_p)
2035 {
2036 /* Generate coverage variables and constructors. */
2037 coverage_finish ();
2038
2039 /* Process new functions added. */
2040 set_cfun (NULL);
2041 current_function_decl = NULL;
2042 cgraph_process_new_functions ();
7bfefa9d 2043
c9036234 2044 execute_ipa_summary_passes
2045 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
8867b500 2046 }
23433d72 2047
2048 /* Some targets need to handle LTO assembler output specially. */
2049 if (flag_generate_lto)
2050 targetm.asm_out.lto_start ();
2051
7bfefa9d 2052 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
2053
2054 if (!in_lto_p)
2055 ipa_write_summaries ();
2056
23433d72 2057 if (flag_generate_lto)
2058 targetm.asm_out.lto_end ();
2059
b33542ab 2060 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
8867b500 2061 execute_ipa_pass_list (all_regular_ipa_passes);
c9036234 2062 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
9ed5b1f5 2063
77fce4cd 2064 bitmap_obstack_release (NULL);
2065}
2066
34e5cced 2067
ae01b312 2068/* Perform simple optimizations based on callgraph. */
2069
7bfefa9d 2070void
d9d9733a 2071cgraph_optimize (void)
ae01b312 2072{
852f689e 2073 if (seen_error ())
cb2b5570 2074 return;
2075
b0cdf642 2076#ifdef ENABLE_CHECKING
2077 verify_cgraph ();
2078#endif
a861fe52 2079
c1dcd13c 2080 /* Frontend may output common variables after the unit has been finalized.
2081 It is safe to deal with them here as they are always zero initialized. */
1d416bd7 2082 varpool_analyze_pending_decls ();
e9f08e82 2083
f79b6507 2084 timevar_push (TV_CGRAPHOPT);
51949610 2085 if (pre_ipa_mem_report)
2086 {
2087 fprintf (stderr, "Memory consumption before IPA\n");
2088 dump_memory_report (false);
2089 }
d7c6d889 2090 if (!quiet_flag)
cd6bca02 2091 fprintf (stderr, "Performing interprocedural optimizations\n");
523c1122 2092 cgraph_state = CGRAPH_STATE_IPA;
c04e3894 2093
be4d0974 2094 /* Don't run the IPA passes if there was any error or sorry messages. */
852f689e 2095 if (!seen_error ())
be4d0974 2096 ipa_passes ();
2097
b33542ab 2098 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2099 if (seen_error ()
2100 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
021c1c18 2101 {
2102 timevar_pop (TV_CGRAPHOPT);
2103 return;
2104 }
34e5cced 2105
e1be32b8 2106 /* This pass remove bodies of extern inline functions we never inlined.
2107 Do this later so other IPA passes see what is really going on. */
2108 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 2109 cgraph_global_info_ready = true;
f79b6507 2110 if (cgraph_dump_file)
2111 {
e4200070 2112 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 2113 dump_cgraph (cgraph_dump_file);
c1dcd13c 2114 dump_varpool (cgraph_dump_file);
f79b6507 2115 }
51949610 2116 if (post_ipa_mem_report)
2117 {
defa2fa6 2118 fprintf (stderr, "Memory consumption after IPA\n");
51949610 2119 dump_memory_report (false);
2120 }
f79b6507 2121 timevar_pop (TV_CGRAPHOPT);
ae01b312 2122
d7c6d889 2123 /* Output everything. */
47306a5d 2124 (*debug_hooks->assembly_start) ();
e4200070 2125 if (!quiet_flag)
2126 fprintf (stderr, "Assembling functions:\n");
b0cdf642 2127#ifdef ENABLE_CHECKING
2128 verify_cgraph ();
2129#endif
56af936e 2130
ccf4ab6b 2131 cgraph_materialize_all_clones ();
657e3a56 2132 bitmap_obstack_initialize (NULL);
2133 execute_ipa_pass_list (all_late_ipa_passes);
2134 cgraph_remove_unreachable_nodes (true, dump_file);
2135#ifdef ENABLE_CHECKING
2136 verify_cgraph ();
2137#endif
2138 bitmap_obstack_release (NULL);
acc70efa 2139 cgraph_mark_functions_to_output ();
c1dcd13c 2140
523c1122 2141 cgraph_state = CGRAPH_STATE_EXPANSION;
56af936e 2142 if (!flag_toplevel_reorder)
2143 cgraph_output_in_order ();
2144 else
2145 {
2146 cgraph_output_pending_asms ();
2147
2148 cgraph_expand_all_functions ();
1d416bd7 2149 varpool_remove_unreferenced_decls ();
56af936e 2150
1d416bd7 2151 varpool_assemble_pending_decls ();
56af936e 2152 }
523c1122 2153 cgraph_process_new_functions ();
2154 cgraph_state = CGRAPH_STATE_FINISHED;
c1dcd13c 2155
f79b6507 2156 if (cgraph_dump_file)
2157 {
e4200070 2158 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 2159 dump_cgraph (cgraph_dump_file);
7410370b 2160 dump_varpool (cgraph_dump_file);
f79b6507 2161 }
b0cdf642 2162#ifdef ENABLE_CHECKING
2163 verify_cgraph ();
4ee9c684 2164 /* Double check that all inline clones are gone and that all
2165 function bodies have been released from memory. */
852f689e 2166 if (!seen_error ())
4ee9c684 2167 {
2168 struct cgraph_node *node;
2169 bool error_found = false;
2170
2171 for (node = cgraph_nodes; node; node = node->next)
2172 if (node->analyzed
2173 && (node->global.inlined_to
1a1a827a 2174 || gimple_has_body_p (node->decl)))
4ee9c684 2175 {
2176 error_found = true;
2177 dump_cgraph_node (stderr, node);
a0c938f0 2178 }
4ee9c684 2179 if (error_found)
c04e3894 2180 internal_error ("nodes with unreleased memory found");
4ee9c684 2181 }
b0cdf642 2182#endif
ae01b312 2183}
34e5cced 2184
121f3051 2185void
2186init_cgraph (void)
2187{
01ec0a6c 2188 if (!cgraph_dump_file)
2189 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
121f3051 2190}
b5d36404 2191
a0c938f0 2192/* The edges representing the callers of the NEW_VERSION node were
b5d36404 2193 fixed by cgraph_function_versioning (), now the call_expr in their
2194 respective tree code should be updated to call the NEW_VERSION. */
2195
2196static void
2197update_call_expr (struct cgraph_node *new_version)
2198{
2199 struct cgraph_edge *e;
2200
2201 gcc_assert (new_version);
75a70cf9 2202
2203 /* Update the call expr on the edges to call the new version. */
b5d36404 2204 for (e = new_version->callers; e; e = e->next_caller)
e03a95e7 2205 {
2206 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2207 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
e38def9c 2208 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
e03a95e7 2209 }
b5d36404 2210}
2211
2212
2213/* Create a new cgraph node which is the new version of
2214 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2215 edges which should be redirected to point to
2216 NEW_VERSION. ALL the callees edges of OLD_VERSION
2217 are cloned to the new version node. Return the new
b06ab5fa 2218 version node.
2219
2220 If non-NULL BLOCK_TO_COPY determine what basic blocks
2221 was copied to prevent duplications of calls that are dead
2222 in the clone. */
b5d36404 2223
2224static struct cgraph_node *
2225cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 2226 tree new_decl,
b06ab5fa 2227 VEC(cgraph_edge_p,heap) *redirect_callers,
2228 bitmap bbs_to_copy)
2229 {
b5d36404 2230 struct cgraph_node *new_version;
32936803 2231 struct cgraph_edge *e;
b5d36404 2232 unsigned i;
2233
2234 gcc_assert (old_version);
a0c938f0 2235
5a90471f 2236 new_version = cgraph_create_node (new_decl);
b5d36404 2237
2238 new_version->analyzed = true;
2239 new_version->local = old_version->local;
a70a5e2c 2240 new_version->local.externally_visible = false;
2241 new_version->local.local = true;
b5d36404 2242 new_version->global = old_version->global;
a93f1c3b 2243 new_version->rtl = old_version->rtl;
b5d36404 2244 new_version->reachable = true;
2245 new_version->count = old_version->count;
2246
a70a5e2c 2247 for (e = old_version->callees; e; e=e->next_callee)
b06ab5fa 2248 if (!bbs_to_copy
2249 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2250 cgraph_clone_edge (e, new_version, e->call_stmt,
2251 e->lto_stmt_uid, REG_BR_PROB_BASE,
2252 CGRAPH_FREQ_BASE,
0835ad03 2253 true);
a70a5e2c 2254 for (e = old_version->indirect_calls; e; e=e->next_callee)
b06ab5fa 2255 if (!bbs_to_copy
2256 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2257 cgraph_clone_edge (e, new_version, e->call_stmt,
2258 e->lto_stmt_uid, REG_BR_PROB_BASE,
2259 CGRAPH_FREQ_BASE,
0835ad03 2260 true);
48148244 2261 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
4460a647 2262 {
2263 /* Redirect calls to the old version node to point to its new
2264 version. */
2265 cgraph_redirect_edge_callee (e, new_version);
2266 }
b5d36404 2267
2268 return new_version;
2269 }
2270
2271 /* Perform function versioning.
a0c938f0 2272 Function versioning includes copying of the tree and
b5d36404 2273 a callgraph update (creating a new cgraph node and updating
2274 its callees and callers).
2275
2276 REDIRECT_CALLERS varray includes the edges to be redirected
2277 to the new version.
2278
2279 TREE_MAP is a mapping of tree nodes we want to replace with
2280 new ones (according to results of prior analysis).
2281 OLD_VERSION_NODE is the node that is versioned.
48e1416a 2282 It returns the new version's cgraph node.
b06ab5fa 2283 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2284 from new version.
2285 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2286 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
b5d36404 2287
2288struct cgraph_node *
2289cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 2290 VEC(cgraph_edge_p,heap) *redirect_callers,
ccf4ab6b 2291 VEC (ipa_replace_map_p,gc)* tree_map,
a70a5e2c 2292 bitmap args_to_skip,
b06ab5fa 2293 bitmap bbs_to_copy,
2294 basic_block new_entry_block,
a70a5e2c 2295 const char *clone_name)
b5d36404 2296{
2297 tree old_decl = old_version_node->decl;
2298 struct cgraph_node *new_version_node = NULL;
2299 tree new_decl;
2300
2301 if (!tree_versionable_function_p (old_decl))
2302 return NULL;
2303
3c97c75d 2304 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
2305
b5d36404 2306 /* Make a new FUNCTION_DECL tree node for the
2307 new version. */
5afe38fe 2308 if (!args_to_skip)
2309 new_decl = copy_node (old_decl);
2310 else
2311 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
b5d36404 2312
df0b8dfb 2313 /* Generate a new name for the new version. */
2314 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2315 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2316 SET_DECL_RTL (new_decl, NULL);
2317
b5d36404 2318 /* Create the new version's call-graph node.
2319 and update the edges of the new node. */
2320 new_version_node =
2321 cgraph_copy_node_for_versioning (old_version_node, new_decl,
b06ab5fa 2322 redirect_callers, bbs_to_copy);
b5d36404 2323
2324 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 2325 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2326 bbs_to_copy, new_entry_block);
b5d36404 2327
a0c938f0 2328 /* Update the new version's properties.
e03a95e7 2329 Make The new version visible only within this translation unit. Make sure
2330 that is not weak also.
a0c938f0 2331 ??? We cannot use COMDAT linkage because there is no
b5d36404 2332 ABI support for this. */
6137cc9f 2333 cgraph_make_decl_local (new_version_node->decl);
f014e39d 2334 DECL_VIRTUAL_P (new_version_node->decl) = 0;
b5d36404 2335 new_version_node->local.externally_visible = 0;
2336 new_version_node->local.local = 1;
2337 new_version_node->lowered = true;
f014e39d 2338
e03a95e7 2339 /* Update the call_expr on the edges to call the new version node. */
2340 update_call_expr (new_version_node);
48e1416a 2341
50828ed8 2342 cgraph_call_function_insertion_hooks (new_version_node);
b5d36404 2343 return new_version_node;
2344}
469679ab 2345
ccf4ab6b 2346/* Given virtual clone, turn it into actual clone. */
2347static void
2348cgraph_materialize_clone (struct cgraph_node *node)
2349{
2350 bitmap_obstack_initialize (NULL);
e748b31d 2351 node->former_clone_of = node->clone_of->decl;
2352 if (node->clone_of->former_clone_of)
2353 node->former_clone_of = node->clone_of->former_clone_of;
ccf4ab6b 2354 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2355 tree_function_versioning (node->clone_of->decl, node->decl,
2356 node->clone.tree_map, true,
b06ab5fa 2357 node->clone.args_to_skip, NULL, NULL);
e20422ea 2358 if (cgraph_dump_file)
2359 {
2360 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2361 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2362 }
ccf4ab6b 2363
2364 /* Function is no longer clone. */
2365 if (node->next_sibling_clone)
2366 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2367 if (node->prev_sibling_clone)
2368 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2369 else
2370 node->clone_of->clones = node->next_sibling_clone;
2371 node->next_sibling_clone = NULL;
2372 node->prev_sibling_clone = NULL;
6d1cc52c 2373 if (!node->clone_of->analyzed && !node->clone_of->clones)
7d6a1ec8 2374 {
2375 cgraph_release_function_body (node->clone_of);
2376 cgraph_node_remove_callees (node->clone_of);
2377 ipa_remove_all_references (&node->clone_of->ref_list);
2378 }
ccf4ab6b 2379 node->clone_of = NULL;
2380 bitmap_obstack_release (NULL);
2381}
2382
c596d830 2383/* If necessary, change the function declaration in the call statement
2384 associated with E so that it corresponds to the edge callee. */
2385
2386gimple
2387cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2388{
2389 tree decl = gimple_call_fndecl (e->call_stmt);
2390 gimple new_stmt;
3fd0ca33 2391 gimple_stmt_iterator gsi;
1f449108 2392#ifdef ENABLE_CHECKING
2393 struct cgraph_node *node;
2394#endif
c596d830 2395
1caef38b 2396 if (e->indirect_unknown_callee
0a31490e 2397 || decl == e->callee->decl)
c596d830 2398 return e->call_stmt;
2399
1f449108 2400#ifdef ENABLE_CHECKING
1caef38b 2401 if (decl)
2402 {
2403 node = cgraph_get_node (decl);
2404 gcc_assert (!node || !node->clone.combined_args_to_skip);
2405 }
1f449108 2406#endif
e748b31d 2407
c596d830 2408 if (cgraph_dump_file)
2409 {
2410 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2411 cgraph_node_name (e->caller), e->caller->uid,
2412 cgraph_node_name (e->callee), e->callee->uid);
2413 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e748b31d 2414 if (e->callee->clone.combined_args_to_skip)
91aba934 2415 {
2416 fprintf (cgraph_dump_file, " combined args to skip: ");
2417 dump_bitmap (cgraph_dump_file,
2418 e->callee->clone.combined_args_to_skip);
e748b31d 2419 }
c596d830 2420 }
2421
2422 if (e->callee->clone.combined_args_to_skip)
91aba934 2423 {
092cd838 2424 int lp_nr;
91aba934 2425
2426 new_stmt
2427 = gimple_call_copy_skip_args (e->call_stmt,
2428 e->callee->clone.combined_args_to_skip);
75c7f5a5 2429 gimple_call_set_fndecl (new_stmt, e->callee->decl);
91aba934 2430
2431 if (gimple_vdef (new_stmt)
2432 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2433 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2434
d4e80e2b 2435 gsi = gsi_for_stmt (e->call_stmt);
9126b675 2436 gsi_replace (&gsi, new_stmt, false);
092cd838 2437 /* We need to defer cleaning EH info on the new statement to
2438 fixup-cfg. We may not have dominator information at this point
2439 and thus would end up with unreachable blocks and have no way
2440 to communicate that we need to run CFG cleanup then. */
2441 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2442 if (lp_nr != 0)
2443 {
2444 remove_stmt_from_eh_lp (e->call_stmt);
2445 add_stmt_to_eh_lp (new_stmt, lp_nr);
2446 }
91aba934 2447 }
c596d830 2448 else
75c7f5a5 2449 {
2450 new_stmt = e->call_stmt;
2451 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2452 update_stmt (new_stmt);
2453 }
c596d830 2454
c596d830 2455 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2456
2457 if (cgraph_dump_file)
2458 {
2459 fprintf (cgraph_dump_file, " updated to:");
2460 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2461 }
2462 return new_stmt;
2463}
2464
ccf4ab6b 2465/* Once all functions from compilation unit are in memory, produce all clones
c596d830 2466 and update all calls. We might also do this on demand if we don't want to
2467 bring all functions to memory prior compilation, but current WHOPR
2468 implementation does that and it is is bit easier to keep everything right in
2469 this order. */
ccf4ab6b 2470void
2471cgraph_materialize_all_clones (void)
2472{
2473 struct cgraph_node *node;
2474 bool stabilized = false;
2475
2476 if (cgraph_dump_file)
2477 fprintf (cgraph_dump_file, "Materializing clones\n");
2478#ifdef ENABLE_CHECKING
2479 verify_cgraph ();
2480#endif
2481
2482 /* We can also do topological order, but number of iterations should be
2483 bounded by number of IPA passes since single IPA pass is probably not
2484 going to create clones of clones it created itself. */
2485 while (!stabilized)
2486 {
2487 stabilized = true;
2488 for (node = cgraph_nodes; node; node = node->next)
2489 {
2490 if (node->clone_of && node->decl != node->clone_of->decl
2491 && !gimple_has_body_p (node->decl))
2492 {
2493 if (gimple_has_body_p (node->clone_of->decl))
2494 {
2495 if (cgraph_dump_file)
e20422ea 2496 {
0a10fd82 2497 fprintf (cgraph_dump_file, "cloning %s to %s\n",
e20422ea 2498 cgraph_node_name (node->clone_of),
2499 cgraph_node_name (node));
2500 if (node->clone.tree_map)
2501 {
2502 unsigned int i;
2503 fprintf (cgraph_dump_file, " replace map: ");
2504 for (i = 0; i < VEC_length (ipa_replace_map_p,
2505 node->clone.tree_map);
2506 i++)
2507 {
2508 struct ipa_replace_map *replace_info;
2509 replace_info = VEC_index (ipa_replace_map_p,
2510 node->clone.tree_map,
2511 i);
2512 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2513 fprintf (cgraph_dump_file, " -> ");
2514 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2515 fprintf (cgraph_dump_file, "%s%s;",
2516 replace_info->replace_p ? "(replace)":"",
2517 replace_info->ref_p ? "(ref)":"");
2518 }
2519 fprintf (cgraph_dump_file, "\n");
2520 }
2521 if (node->clone.args_to_skip)
2522 {
2523 fprintf (cgraph_dump_file, " args_to_skip: ");
2524 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2525 }
2526 if (node->clone.args_to_skip)
2527 {
2528 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2529 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2530 }
2531 }
ccf4ab6b 2532 cgraph_materialize_clone (node);
a510bd8d 2533 stabilized = false;
ccf4ab6b 2534 }
ccf4ab6b 2535 }
2536 }
2537 }
ee3f5fc0 2538 for (node = cgraph_nodes; node; node = node->next)
2539 if (!node->analyzed && node->callees)
2540 cgraph_node_remove_callees (node);
c596d830 2541 if (cgraph_dump_file)
2542 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
947781ac 2543#ifdef ENABLE_CHECKING
2544 verify_cgraph ();
2545#endif
ccf4ab6b 2546 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2547}
2548
a861fe52 2549#include "gt-cgraphunit.h"