]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
gcc/
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
cd6bca02 1/* Callgraph based interprocedural optimizations.
aed6e608 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
5c8ad8f5 3 2011, 2012 Free Software Foundation, Inc.
ae01b312 4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
ae01b312 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
cd6bca02 23 few basic interprocedural optimizers.
b0cdf642 24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
b326746d 35 (There is one exception needed for implementing GCC extern inline
36 function.)
b0cdf642 37
1d416bd7 38 - varpool_finalize_variable
b0cdf642 39
7bd28bba 40 This function has same behavior as the above but is used for static
b0cdf642 41 variables.
42
43 - cgraph_finalize_compilation_unit
44
b326746d 45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
b0cdf642 47
851d9296 48 In the call-graph construction and local function analysis takes
49 place here. Bodies of unreachable functions are released to
50 conserve memory usage.
b0cdf642 51
b326746d 52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
b0cdf642 54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
b0cdf642 62 - cgraph_mark_needed_node
1d416bd7 63 - varpool_mark_needed_node
b0cdf642 64
b326746d 65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
ccd2f3d1 69 used by C++ frontend to explicitly mark the keyed methods.
b0cdf642 70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
6329636b 80 Analyzing of all functions is deferred
b0cdf642 81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
ca67a72b 91 The intra-procedural information is produced and its existence
b0cdf642 92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
6329636b 106 where reference has been optimized out. */
121f3051 107
acc70efa 108
ae01b312 109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
b5530559 114#include "rtl.h"
acc70efa 115#include "tree-flow.h"
ae01b312 116#include "tree-inline.h"
117#include "langhooks.h"
c6224531 118#include "pointer-set.h"
ae01b312 119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
80a85d8a 125#include "diagnostic.h"
ce084dfc 126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
f79b6507 128#include "timevar.h"
d7c6d889 129#include "params.h"
130#include "fibheap.h"
611e5405 131#include "intl.h"
b69eb0ff 132#include "function.h"
b5d36404 133#include "ipa-prop.h"
75a70cf9 134#include "gimple.h"
135#include "tree-iterator.h"
f1e2a033 136#include "tree-pass.h"
bfec3452 137#include "tree-dump.h"
c1dcd13c 138#include "output.h"
9ed5b1f5 139#include "coverage.h"
c9036234 140#include "plugin.h"
a41f2a28 141#include "ipa-inline.h"
7771d558 142#include "ipa-utils.h"
a0605d65 143#include "lto-streamer.h"
d7c6d889 144
a6868229 145static void cgraph_expand_all_functions (void);
d9d9733a 146static void cgraph_mark_functions_to_output (void);
147static void cgraph_expand_function (struct cgraph_node *);
f788fff2 148static void cgraph_output_pending_asms (void);
25bb88de 149
ecb08119 150FILE *cgraph_dump_file;
121f3051 151
28454517 152/* Used for vtable lookup in thunk adjusting. */
153static GTY (()) tree vtable_entry_type;
154
2c0b522d 155/* Determine if function DECL is needed. That is, visible to something
156 either outside this translation unit, something magic in the system
6329636b 157 configury. */
2c0b522d 158
7bfefa9d 159bool
160cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
2c0b522d 161{
3f82b628 162 /* If the user told us it is used, then it must be so. */
05806473 163 if (node->local.externally_visible)
164 return true;
165
3f82b628 166 /* ??? If the assembler name is set by hand, it is possible to assemble
167 the name later after finalizing the function and the fact is noticed
168 in assemble_name then. This is arguably a bug. */
169 if (DECL_ASSEMBLER_NAME_SET_P (decl)
c70f46b0 170 && (!node->thunk.thunk_p && !node->same_body_alias)
3f82b628 171 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
172 return true;
173
55680bef 174 /* With -fkeep-inline-functions we are keeping all inline functions except
175 for extern inline ones. */
176 if (flag_keep_inline_functions
177 && DECL_DECLARED_INLINE_P (decl)
316ef6d8 178 && !DECL_EXTERNAL (decl)
cbd7f5a0 179 && !DECL_DISREGARD_INLINE_LIMITS (decl))
55680bef 180 return true;
181
2c0b522d 182 /* If we decided it was needed before, but at the time we didn't have
183 the body of the function available, then it's still needed. We have
184 to go back and re-check its dependencies now. */
185 if (node->needed)
186 return true;
187
188 /* Externally visible functions must be output. The exception is
a0c938f0 189 COMDAT functions that must be output only when they are needed.
8baa9d15 190
191 When not optimizing, also output the static functions. (see
95da6220 192 PR24561), but don't do so for always_inline functions, functions
0f9238c0 193 declared inline and nested functions. These were optimized out
d3d410e1 194 in the original implementation and it is unclear whether we want
554f2707 195 to change the behavior here. */
bba7ddf8 196 if (((TREE_PUBLIC (decl)
0f9238c0 197 || (!optimize
07828d90 198 && !node->same_body_alias
cbd7f5a0 199 && !DECL_DISREGARD_INLINE_LIMITS (decl)
d3d410e1 200 && !DECL_DECLARED_INLINE_P (decl)
0f9238c0 201 && !(DECL_CONTEXT (decl)
202 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
59dd4830 203 && !flag_whole_program
cbcf2791 204 && !flag_lto)
62eec3b4 205 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 206 return true;
207
2c0b522d 208 return false;
209}
210
bdc40eb8 211/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
523c1122 212 functions into callgraph in a way so they look like ordinary reachable
213 functions inserted into callgraph already at construction time. */
214
215bool
216cgraph_process_new_functions (void)
217{
218 bool output = false;
219 tree fndecl;
220 struct cgraph_node *node;
221
0cddb138 222 varpool_analyze_pending_decls ();
523c1122 223 /* Note that this queue may grow as its being processed, as the new
224 functions may generate new ones. */
225 while (cgraph_new_nodes)
226 {
227 node = cgraph_new_nodes;
228 fndecl = node->decl;
229 cgraph_new_nodes = cgraph_new_nodes->next_needed;
230 switch (cgraph_state)
231 {
232 case CGRAPH_STATE_CONSTRUCTION:
233 /* At construction time we just need to finalize function and move
234 it into reachable functions list. */
235
236 node->next_needed = NULL;
237 cgraph_finalize_function (fndecl, false);
238 cgraph_mark_reachable_node (node);
239 output = true;
4f7a1122 240 cgraph_call_function_insertion_hooks (node);
523c1122 241 break;
242
243 case CGRAPH_STATE_IPA:
f517b36e 244 case CGRAPH_STATE_IPA_SSA:
523c1122 245 /* When IPA optimization already started, do all essential
246 transformations that has been already performed on the whole
247 cgraph but not on this function. */
248
75a70cf9 249 gimple_register_cfg_hooks ();
523c1122 250 if (!node->analyzed)
251 cgraph_analyze_function (node);
252 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
253 current_function_decl = fndecl;
f517b36e 254 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
255 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
256 /* When not optimizing, be sure we run early local passes anyway
257 to expand OMP. */
258 || !optimize)
20099e35 259 execute_pass_list (pass_early_local_passes.pass.sub);
649597af 260 else
a41f2a28 261 compute_inline_parameters (node, true);
523c1122 262 free_dominance_info (CDI_POST_DOMINATORS);
263 free_dominance_info (CDI_DOMINATORS);
264 pop_cfun ();
265 current_function_decl = NULL;
4f7a1122 266 cgraph_call_function_insertion_hooks (node);
523c1122 267 break;
268
269 case CGRAPH_STATE_EXPANSION:
270 /* Functions created during expansion shall be compiled
271 directly. */
09fc9532 272 node->process = 0;
4f7a1122 273 cgraph_call_function_insertion_hooks (node);
523c1122 274 cgraph_expand_function (node);
275 break;
276
277 default:
278 gcc_unreachable ();
279 break;
280 }
0cddb138 281 varpool_analyze_pending_decls ();
523c1122 282 }
283 return output;
284}
285
9b8fb23a 286/* As an GCC extension we allow redefinition of the function. The
287 semantics when both copies of bodies differ is not well defined.
288 We replace the old body with new body so in unit at a time mode
289 we always use new body, while in normal mode we may end up with
290 old body inlined into some functions and new body expanded and
291 inlined in others.
292
293 ??? It may make more sense to use one body for inlining and other
294 body for expanding the function but this is difficult to do. */
295
296static void
297cgraph_reset_node (struct cgraph_node *node)
298{
09fc9532 299 /* If node->process is set, then we have already begun whole-unit analysis.
6329636b 300 This is *not* testing for whether we've already emitted the function.
301 That case can be sort-of legitimately seen with real function redefinition
302 errors. I would argue that the front end should never present us with
303 such a case, but don't enforce that for now. */
09fc9532 304 gcc_assert (!node->process);
9b8fb23a 305
306 /* Reset our data structures so we can analyze the function again. */
307 memset (&node->local, 0, sizeof (node->local));
308 memset (&node->global, 0, sizeof (node->global));
309 memset (&node->rtl, 0, sizeof (node->rtl));
310 node->analyzed = false;
9b8fb23a 311 node->local.finalized = false;
312
9b8fb23a 313 cgraph_node_remove_callees (node);
9b8fb23a 314}
c08871a9 315
28df663b 316/* DECL has been parsed. Take it, queue it, compile it at the whim of the
317 logic in effect. If NESTED is true, then our caller cannot stand to have
318 the garbage collector run at the moment. We would need to either create
319 a new GC context, or just not compile right now. */
ae01b312 320
321void
28df663b 322cgraph_finalize_function (tree decl, bool nested)
ae01b312 323{
5a90471f 324 struct cgraph_node *node = cgraph_get_create_node (decl);
ae01b312 325
c08871a9 326 if (node->local.finalized)
443089c1 327 {
328 cgraph_reset_node (node);
329 node->local.redefined_extern_inline = true;
330 }
28df663b 331
c08871a9 332 notice_global_symbol (decl);
79bb87b4 333 node->local.finalized = true;
e27482aa 334 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
ae01b312 335
7bfefa9d 336 if (cgraph_decide_is_function_needed (node, decl))
2c0b522d 337 cgraph_mark_needed_node (node);
338
ecda6e51 339 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 340 level unit, we need to be conservative about possible entry points
341 there. */
1e3aebec 342 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
343 || DECL_STATIC_CONSTRUCTOR (decl)
d050bafd 344 || DECL_STATIC_DESTRUCTOR (decl)
345 /* COMDAT virtual functions may be referenced by vtable from
0a10fd82 346 other compilation unit. Still we want to devirtualize calls
d050bafd 347 to those so we need to analyze them.
348 FIXME: We should introduce may edges for this purpose and update
349 their handling in unreachable function removal and inliner too. */
91bf9d9a 350 || (DECL_VIRTUAL_P (decl)
351 && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
3f82b628 352 cgraph_mark_reachable_node (node);
353
2c0b522d 354 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 355 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 356 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 357
b69eb0ff 358 /* Possibly warn about unused parameters. */
359 if (warn_unused_parameter)
360 do_warn_unused_parameter (decl);
6329636b 361
362 if (!nested)
363 ggc_collect ();
ae01b312 364}
365
0da03d11 366/* C99 extern inline keywords allow changing of declaration after function
367 has been finalized. We need to re-decide if we want to mark the function as
368 needed then. */
369
370void
371cgraph_mark_if_needed (tree decl)
372{
fd6a3c41 373 struct cgraph_node *node = cgraph_get_node (decl);
7bfefa9d 374 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
0da03d11 375 cgraph_mark_needed_node (node);
376}
377
ccf4ab6b 378/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
379static bool
380clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
381{
c70f46b0 382 node = cgraph_function_or_thunk_node (node, NULL);
383 node2 = cgraph_function_or_thunk_node (node2, NULL);
ccf4ab6b 384 while (node != node2 && node2)
385 node2 = node2->clone_of;
386 return node2 != NULL;
387}
388
1a036a3b 389/* Verify edge E count and frequency. */
390
391static bool
392verify_edge_count_and_frequency (struct cgraph_edge *e)
393{
394 bool error_found = false;
395 if (e->count < 0)
396 {
397 error ("caller edge count is negative");
398 error_found = true;
399 }
400 if (e->frequency < 0)
401 {
402 error ("caller edge frequency is negative");
403 error_found = true;
404 }
405 if (e->frequency > CGRAPH_FREQ_MAX)
406 {
407 error ("caller edge frequency is too large");
408 error_found = true;
409 }
410 if (gimple_has_body_p (e->caller->decl)
411 && !e->caller->global.inlined_to
8bae3ea4 412 /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
413 Remove this once edges are actualy removed from the function at that time. */
414 && (e->frequency
415 || (inline_edge_summary_vec
9ed50dd9 416 && ((VEC_length(inline_edge_summary_t, inline_edge_summary_vec)
417 <= (unsigned) e->uid)
418 || !inline_edge_summary (e)->predicate)))
1a036a3b 419 && (e->frequency
420 != compute_call_stmt_bb_frequency (e->caller->decl,
421 gimple_bb (e->call_stmt))))
422 {
0a10fd82 423 error ("caller edge frequency %i does not match BB frequency %i",
1a036a3b 424 e->frequency,
425 compute_call_stmt_bb_frequency (e->caller->decl,
426 gimple_bb (e->call_stmt)));
427 error_found = true;
428 }
429 return error_found;
430}
431
7b29dd2f 432/* Switch to THIS_CFUN if needed and print STMT to stderr. */
433static void
434cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
435{
436 /* debug_gimple_stmt needs correct cfun */
437 if (cfun != this_cfun)
438 set_cfun (this_cfun);
439 debug_gimple_stmt (stmt);
440}
441
2f9d66d3 442/* Verify that call graph edge E corresponds to DECL from the associated
443 statement. Return true if the verification should fail. */
444
445static bool
446verify_edge_corresponds_to_fndecl (struct cgraph_edge *e, tree decl)
447{
54e8af13 448 struct cgraph_node *node;
449
450 if (!decl || e->callee->global.inlined_to)
451 return false;
452 node = cgraph_get_node (decl);
453
454 /* We do not know if a node from a different partition is an alias or what it
455 aliases and therefore cannot do the former_clone_of check reliably. */
456 if (!node || node->in_other_partition)
457 return false;
458 node = cgraph_function_or_thunk_node (node, NULL);
459
cdf67cee 460 if ((e->callee->former_clone_of != node->decl
461 && (!node->same_body_alias
462 || e->callee->former_clone_of != node->thunk.alias))
2f9d66d3 463 /* IPA-CP sometimes redirect edge to clone and then back to the former
cdf67cee 464 function. This ping-pong has to go, eventually. */
54e8af13 465 && (node != cgraph_function_or_thunk_node (e->callee, NULL))
cdf67cee 466 && !clone_of_p (node, e->callee)
467 /* If decl is a same body alias of some other decl, allow e->callee to be
468 a clone of a clone of that other decl too. */
469 && (!node->same_body_alias
470 || !clone_of_p (cgraph_get_node (node->thunk.alias), e->callee)))
2f9d66d3 471 return true;
472 else
473 return false;
474}
475
b0cdf642 476/* Verify cgraph nodes of given cgraph node. */
4b987fac 477DEBUG_FUNCTION void
b0cdf642 478verify_cgraph_node (struct cgraph_node *node)
479{
480 struct cgraph_edge *e;
e27482aa 481 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
482 basic_block this_block;
75a70cf9 483 gimple_stmt_iterator gsi;
9bfec7c2 484 bool error_found = false;
b0cdf642 485
852f689e 486 if (seen_error ())
bd09cd3e 487 return;
488
b0cdf642 489 timevar_push (TV_CGRAPH_VERIFY);
b0cdf642 490 for (e = node->callees; e; e = e->next_callee)
491 if (e->aux)
492 {
0a81f5a0 493 error ("aux field set for edge %s->%s",
abd3e6b5 494 identifier_to_locale (cgraph_node_name (e->caller)),
495 identifier_to_locale (cgraph_node_name (e->callee)));
b0cdf642 496 error_found = true;
497 }
a2cb9b3b 498 if (node->count < 0)
499 {
bf776685 500 error ("execution count is negative");
a2cb9b3b 501 error_found = true;
502 }
59dd4830 503 if (node->global.inlined_to && node->local.externally_visible)
504 {
bf776685 505 error ("externally visible inline clone");
59dd4830 506 error_found = true;
507 }
508 if (node->global.inlined_to && node->address_taken)
509 {
bf776685 510 error ("inline clone with address taken");
59dd4830 511 error_found = true;
512 }
513 if (node->global.inlined_to && node->needed)
514 {
bf776685 515 error ("inline clone is needed");
59dd4830 516 error_found = true;
517 }
799c8711 518 for (e = node->indirect_calls; e; e = e->next_callee)
519 {
520 if (e->aux)
521 {
522 error ("aux field set for indirect edge from %s",
523 identifier_to_locale (cgraph_node_name (e->caller)));
524 error_found = true;
525 }
526 if (!e->indirect_unknown_callee
527 || !e->indirect_info)
528 {
529 error ("An indirect edge from %s is not marked as indirect or has "
530 "associated indirect_info, the corresponding statement is: ",
531 identifier_to_locale (cgraph_node_name (e->caller)));
7b29dd2f 532 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
799c8711 533 error_found = true;
534 }
535 }
b0cdf642 536 for (e = node->callers; e; e = e->next_caller)
537 {
1a036a3b 538 if (verify_edge_count_and_frequency (e))
539 error_found = true;
b0cdf642 540 if (!e->inline_failed)
541 {
542 if (node->global.inlined_to
543 != (e->caller->global.inlined_to
544 ? e->caller->global.inlined_to : e->caller))
545 {
0a81f5a0 546 error ("inlined_to pointer is wrong");
b0cdf642 547 error_found = true;
548 }
549 if (node->callers->next_caller)
550 {
0a81f5a0 551 error ("multiple inline callers");
b0cdf642 552 error_found = true;
553 }
554 }
555 else
556 if (node->global.inlined_to)
557 {
0a81f5a0 558 error ("inlined_to pointer set for noninline callers");
b0cdf642 559 error_found = true;
560 }
561 }
1a036a3b 562 for (e = node->indirect_calls; e; e = e->next_callee)
563 if (verify_edge_count_and_frequency (e))
564 error_found = true;
b0cdf642 565 if (!node->callers && node->global.inlined_to)
566 {
5cd75817 567 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 568 error_found = true;
569 }
570 if (node->global.inlined_to == node)
571 {
0a81f5a0 572 error ("inlined_to pointer refers to itself");
b0cdf642 573 error_found = true;
574 }
575
7019fd3f 576 if (!cgraph_get_node (node->decl))
b0cdf642 577 {
0f6439b9 578 error ("node not found in cgraph_hash");
b0cdf642 579 error_found = true;
580 }
a0c938f0 581
ccf4ab6b 582 if (node->clone_of)
583 {
584 struct cgraph_node *n;
585 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
586 if (n == node)
587 break;
588 if (!n)
589 {
590 error ("node has wrong clone_of");
591 error_found = true;
592 }
593 }
594 if (node->clones)
595 {
596 struct cgraph_node *n;
597 for (n = node->clones; n; n = n->next_sibling_clone)
598 if (n->clone_of != node)
599 break;
600 if (n)
601 {
602 error ("node has wrong clone list");
603 error_found = true;
604 }
605 }
606 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
607 {
608 error ("node is in clone list but it is not clone");
609 error_found = true;
610 }
611 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
612 {
613 error ("node has wrong prev_clone pointer");
614 error_found = true;
615 }
616 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
617 {
618 error ("double linked list of clones corrupted");
619 error_found = true;
620 }
c524ac5d 621 if (node->same_comdat_group)
622 {
623 struct cgraph_node *n = node->same_comdat_group;
624
625 if (!DECL_ONE_ONLY (node->decl))
626 {
627 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
628 error_found = true;
629 }
630 if (n == node)
631 {
632 error ("node is alone in a comdat group");
633 error_found = true;
634 }
635 do
636 {
637 if (!n->same_comdat_group)
638 {
639 error ("same_comdat_group is not a circular list");
640 error_found = true;
641 break;
642 }
643 n = n->same_comdat_group;
644 }
645 while (n != node);
646 }
ccf4ab6b 647
c70f46b0 648 if (node->analyzed && node->alias)
649 {
650 bool ref_found = false;
651 int i;
652 struct ipa_ref *ref;
653
654 if (node->callees)
655 {
656 error ("Alias has call edges");
657 error_found = true;
658 }
659 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
660 if (ref->use != IPA_REF_ALIAS)
661 {
70f89d12 662 error ("Alias has non-alias reference");
c70f46b0 663 error_found = true;
664 }
665 else if (ref_found)
666 {
667 error ("Alias has more than one alias reference");
668 error_found = true;
669 }
670 else
671 ref_found = true;
672 if (!ref_found)
673 {
674 error ("Analyzed alias has no reference");
675 error_found = true;
676 }
677 }
91bf9d9a 678 if (node->analyzed && node->thunk.thunk_p)
679 {
680 if (!node->callees)
681 {
682 error ("No edge out of thunk node");
683 error_found = true;
684 }
685 else if (node->callees->next_callee)
686 {
687 error ("More than one edge out of thunk node");
688 error_found = true;
689 }
690 if (gimple_has_body_p (node->decl))
691 {
692 error ("Thunk is not supposed to have body");
693 error_found = true;
694 }
695 }
696 else if (node->analyzed && gimple_has_body_p (node->decl)
697 && !TREE_ASM_WRITTEN (node->decl)
698 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
699 && !flag_wpa)
b0cdf642 700 {
e27482aa 701 if (this_cfun->cfg)
702 {
703 /* The nodes we're interested in are never shared, so walk
704 the tree ignoring duplicates. */
e7c352d1 705 struct pointer_set_t *visited_nodes = pointer_set_create ();
e27482aa 706 /* Reach the trees by walking over the CFG, and note the
707 enclosing basic-blocks in the call edges. */
708 FOR_EACH_BB_FN (this_block, this_cfun)
75a70cf9 709 for (gsi = gsi_start_bb (this_block);
710 !gsi_end_p (gsi);
711 gsi_next (&gsi))
9bfec7c2 712 {
75a70cf9 713 gimple stmt = gsi_stmt (gsi);
799c8711 714 if (is_gimple_call (stmt))
9bfec7c2 715 {
716 struct cgraph_edge *e = cgraph_edge (node, stmt);
799c8711 717 tree decl = gimple_call_fndecl (stmt);
9bfec7c2 718 if (e)
719 {
720 if (e->aux)
721 {
0a81f5a0 722 error ("shared call_stmt:");
7b29dd2f 723 cgraph_debug_gimple_stmt (this_cfun, stmt);
9bfec7c2 724 error_found = true;
725 }
799c8711 726 if (!e->indirect_unknown_callee)
28454517 727 {
2f9d66d3 728 if (verify_edge_corresponds_to_fndecl (e, decl))
799c8711 729 {
730 error ("edge points to wrong declaration:");
731 debug_tree (e->callee->decl);
732 fprintf (stderr," Instead of:");
733 debug_tree (decl);
734 error_found = true;
735 }
28454517 736 }
799c8711 737 else if (decl)
9bfec7c2 738 {
799c8711 739 error ("an indirect edge with unknown callee "
740 "corresponding to a call_stmt with "
741 "a known declaration:");
ee3f5fc0 742 error_found = true;
7b29dd2f 743 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
9bfec7c2 744 }
745 e->aux = (void *)1;
746 }
799c8711 747 else if (decl)
9bfec7c2 748 {
0a81f5a0 749 error ("missing callgraph edge for call stmt:");
7b29dd2f 750 cgraph_debug_gimple_stmt (this_cfun, stmt);
9bfec7c2 751 error_found = true;
752 }
753 }
754 }
e27482aa 755 pointer_set_destroy (visited_nodes);
e27482aa 756 }
757 else
758 /* No CFG available?! */
759 gcc_unreachable ();
760
b0cdf642 761 for (e = node->callees; e; e = e->next_callee)
762 {
799c8711 763 if (!e->aux)
b0cdf642 764 {
0a81f5a0 765 error ("edge %s->%s has no corresponding call_stmt",
abd3e6b5 766 identifier_to_locale (cgraph_node_name (e->caller)),
767 identifier_to_locale (cgraph_node_name (e->callee)));
7b29dd2f 768 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
b0cdf642 769 error_found = true;
770 }
771 e->aux = 0;
772 }
799c8711 773 for (e = node->indirect_calls; e; e = e->next_callee)
774 {
775 if (!e->aux)
776 {
777 error ("an indirect edge from %s has no corresponding call_stmt",
778 identifier_to_locale (cgraph_node_name (e->caller)));
7b29dd2f 779 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
799c8711 780 error_found = true;
781 }
782 e->aux = 0;
783 }
b0cdf642 784 }
785 if (error_found)
786 {
787 dump_cgraph_node (stderr, node);
0a81f5a0 788 internal_error ("verify_cgraph_node failed");
b0cdf642 789 }
790 timevar_pop (TV_CGRAPH_VERIFY);
791}
792
793/* Verify whole cgraph structure. */
4b987fac 794DEBUG_FUNCTION void
b0cdf642 795verify_cgraph (void)
796{
797 struct cgraph_node *node;
798
852f689e 799 if (seen_error ())
8ec2a798 800 return;
801
b0cdf642 802 for (node = cgraph_nodes; node; node = node->next)
803 verify_cgraph_node (node);
804}
805
56af936e 806/* Output all asm statements we have stored up to be output. */
807
808static void
809cgraph_output_pending_asms (void)
810{
811 struct cgraph_asm_node *can;
812
852f689e 813 if (seen_error ())
56af936e 814 return;
815
816 for (can = cgraph_asm_nodes; can; can = can->next)
817 assemble_asm (can->asm_str);
818 cgraph_asm_nodes = NULL;
819}
820
0785e435 821/* Analyze the function scheduled to be output. */
222bc9b9 822void
0785e435 823cgraph_analyze_function (struct cgraph_node *node)
824{
bfec3452 825 tree save = current_function_decl;
0785e435 826 tree decl = node->decl;
827
c70f46b0 828 if (node->alias && node->thunk.alias)
829 {
830 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
b0898cb7 831 struct cgraph_node *n;
832
833 for (n = tgt; n && n->alias;
834 n = n->analyzed ? cgraph_alias_aliased_node (n) : NULL)
835 if (n == node)
836 {
837 error ("function %q+D part of alias cycle", node->decl);
838 node->alias = false;
839 return;
840 }
c70f46b0 841 if (!VEC_length (ipa_ref_t, node->ref_list.references))
842 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
843 if (node->same_body_alias)
844 {
845 DECL_VIRTUAL_P (node->decl) = DECL_VIRTUAL_P (node->thunk.alias);
846 DECL_DECLARED_INLINE_P (node->decl)
847 = DECL_DECLARED_INLINE_P (node->thunk.alias);
848 DECL_DISREGARD_INLINE_LIMITS (node->decl)
849 = DECL_DISREGARD_INLINE_LIMITS (node->thunk.alias);
850 }
851
852 /* Fixup visibility nonsences C++ frontend produce on same body aliases. */
853 if (TREE_PUBLIC (node->decl) && node->same_body_alias)
854 {
855 DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (node->thunk.alias);
89bf5ca9 856 if (DECL_ONE_ONLY (node->thunk.alias))
c70f46b0 857 {
89bf5ca9 858 DECL_COMDAT (node->decl) = DECL_COMDAT (node->thunk.alias);
c70f46b0 859 DECL_COMDAT_GROUP (node->decl) = DECL_COMDAT_GROUP (node->thunk.alias);
860 if (DECL_ONE_ONLY (node->thunk.alias) && !node->same_comdat_group)
861 {
862 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
863 node->same_comdat_group = tgt;
864 if (!tgt->same_comdat_group)
865 tgt->same_comdat_group = node;
866 else
867 {
868 struct cgraph_node *n;
869 for (n = tgt->same_comdat_group;
870 n->same_comdat_group != tgt;
871 n = n->same_comdat_group)
872 ;
873 n->same_comdat_group = node;
874 }
875 }
876 }
877 }
878 cgraph_mark_reachable_node (cgraph_alias_aliased_node (node));
879 if (node->address_taken)
880 cgraph_mark_address_taken_node (cgraph_alias_aliased_node (node));
881 if (cgraph_decide_is_function_needed (node, node->decl))
882 cgraph_mark_needed_node (node);
883 }
884 else if (node->thunk.thunk_p)
91bf9d9a 885 {
886 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
887 NULL, 0, CGRAPH_FREQ_BASE);
888 }
889 else
890 {
891 current_function_decl = decl;
892 push_cfun (DECL_STRUCT_FUNCTION (decl));
bfec3452 893
91bf9d9a 894 assign_assembler_name_if_neeeded (node->decl);
6816d0c4 895
91bf9d9a 896 /* Make sure to gimplify bodies only once. During analyzing a
897 function we lower it, which will require gimplified nested
898 functions, so we can end up here with an already gimplified
899 body. */
900 if (!gimple_body (decl))
901 gimplify_function_tree (decl);
902 dump_function (TDI_generic, decl);
bfec3452 903
47199071 904 /* Lower the function. */
905 if (!node->lowered)
906 {
907 if (node->nested)
908 lower_nested_functions (node->decl);
909 gcc_assert (!node->nested);
910
911 gimple_register_cfg_hooks ();
912 bitmap_obstack_initialize (NULL);
913 execute_pass_list (all_lowering_passes);
914 free_dominance_info (CDI_POST_DOMINATORS);
915 free_dominance_info (CDI_DOMINATORS);
916 compact_blocks ();
917 bitmap_obstack_release (NULL);
918 node->lowered = true;
919 }
920
91bf9d9a 921 pop_cfun ();
922 }
6e8d6e86 923 node->analyzed = true;
0785e435 924
bfec3452 925 current_function_decl = save;
0785e435 926}
927
c70f46b0 928/* C++ frontend produce same body aliases all over the place, even before PCH
929 gets streamed out. It relies on us linking the aliases with their function
930 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
931 first produce aliases without links, but once C++ FE is sure he won't sream
932 PCH we build the links via this function. */
933
934void
935cgraph_process_same_body_aliases (void)
936{
937 struct cgraph_node *node;
938 for (node = cgraph_nodes; node; node = node->next)
939 if (node->same_body_alias
940 && !VEC_length (ipa_ref_t, node->ref_list.references))
941 {
942 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
943 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
944 }
945 same_body_aliases_done = true;
946}
947
d05db70d 948/* Process attributes common for vars and functions. */
949
950static void
951process_common_attributes (tree decl)
952{
953 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
954
955 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
956 {
957 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
958 "%<weakref%> attribute should be accompanied with"
959 " an %<alias%> attribute");
960 DECL_WEAK (decl) = 0;
40b32d93 961 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
962 DECL_ATTRIBUTES (decl));
d05db70d 963 }
964}
965
05806473 966/* Look for externally_visible and used attributes and mark cgraph nodes
967 accordingly.
968
969 We cannot mark the nodes at the point the attributes are processed (in
970 handle_*_attribute) because the copy of the declarations available at that
971 point may not be canonical. For example, in:
972
973 void f();
974 void f() __attribute__((used));
975
976 the declaration we see in handle_used_attribute will be the second
977 declaration -- but the front end will subsequently merge that declaration
978 with the original declaration and discard the second declaration.
979
980 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
981
982 void f() {}
983 void f() __attribute__((externally_visible));
984
985 is valid.
986
987 So, we walk the nodes at the end of the translation unit, applying the
988 attributes at that point. */
989
990static void
991process_function_and_variable_attributes (struct cgraph_node *first,
1d416bd7 992 struct varpool_node *first_var)
05806473 993{
994 struct cgraph_node *node;
1d416bd7 995 struct varpool_node *vnode;
05806473 996
997 for (node = cgraph_nodes; node != first; node = node->next)
998 {
999 tree decl = node->decl;
83a23b05 1000 if (DECL_PRESERVE_P (decl))
0b49f8f8 1001 cgraph_mark_needed_node (node);
62433d51 1002 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
1003 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
1004 && TREE_PUBLIC (node->decl))
1005 {
1006 if (node->local.finalized)
1007 cgraph_mark_needed_node (node);
1008 }
1009 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
05806473 1010 {
ba12ea31 1011 if (! TREE_PUBLIC (node->decl))
712d2297 1012 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
1013 "%<externally_visible%>"
1014 " attribute have effect only on public objects");
59dd4830 1015 else if (node->local.finalized)
1016 cgraph_mark_needed_node (node);
05806473 1017 }
40b32d93 1018 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
c70f46b0 1019 && (node->local.finalized && !node->alias))
40b32d93 1020 {
1021 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
1022 "%<weakref%> attribute ignored"
1023 " because function is defined");
1024 DECL_WEAK (decl) = 0;
1025 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1026 DECL_ATTRIBUTES (decl));
1027 }
a522e9eb 1028
1029 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
1030 && !DECL_DECLARED_INLINE_P (decl)
1031 /* redefining extern inline function makes it DECL_UNINLINABLE. */
1032 && !DECL_UNINLINABLE (decl))
1033 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
1034 "always_inline function might not be inlinable");
1035
d05db70d 1036 process_common_attributes (decl);
05806473 1037 }
1d416bd7 1038 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
05806473 1039 {
1040 tree decl = vnode->decl;
83a23b05 1041 if (DECL_PRESERVE_P (decl))
05806473 1042 {
22671757 1043 vnode->force_output = true;
05806473 1044 if (vnode->finalized)
1d416bd7 1045 varpool_mark_needed_node (vnode);
05806473 1046 }
62433d51 1047 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
1048 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
0d9d5d69 1049 && TREE_PUBLIC (vnode->decl))
62433d51 1050 {
1051 if (vnode->finalized)
1052 varpool_mark_needed_node (vnode);
1053 }
1054 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
05806473 1055 {
ba12ea31 1056 if (! TREE_PUBLIC (vnode->decl))
712d2297 1057 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1058 "%<externally_visible%>"
1059 " attribute have effect only on public objects");
59dd4830 1060 else if (vnode->finalized)
1061 varpool_mark_needed_node (vnode);
05806473 1062 }
40b32d93 1063 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
1064 && vnode->finalized
1065 && DECL_INITIAL (decl))
1066 {
1067 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1068 "%<weakref%> attribute ignored"
1069 " because variable is initialized");
1070 DECL_WEAK (decl) = 0;
1071 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1072 DECL_ATTRIBUTES (decl));
1073 }
d05db70d 1074 process_common_attributes (decl);
05806473 1075 }
1076}
1077
aeeb194b 1078/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1079 each reachable functions) and build cgraph.
1080 The function can be called multiple times after inserting new nodes
0d424440 1081 into beginning of queue. Just the new part of queue is re-scanned then. */
ae01b312 1082
aeeb194b 1083static void
1084cgraph_analyze_functions (void)
ae01b312 1085{
c1dcd13c 1086 /* Keep track of already processed nodes when called multiple times for
06b27565 1087 intermodule optimization. */
c1dcd13c 1088 static struct cgraph_node *first_analyzed;
c17d0de1 1089 struct cgraph_node *first_processed = first_analyzed;
1d416bd7 1090 static struct varpool_node *first_analyzed_var;
aeeb194b 1091 struct cgraph_node *node, *next;
ae01b312 1092
f1c35659 1093 bitmap_obstack_initialize (NULL);
c17d0de1 1094 process_function_and_variable_attributes (first_processed,
1095 first_analyzed_var);
1096 first_processed = cgraph_nodes;
1d416bd7 1097 first_analyzed_var = varpool_nodes;
1098 varpool_analyze_pending_decls ();
f79b6507 1099 if (cgraph_dump_file)
ae01b312 1100 {
e4200070 1101 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 1102 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1103 if (node->needed)
f79b6507 1104 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1105 fprintf (cgraph_dump_file, "\n");
ae01b312 1106 }
aeeb194b 1107 cgraph_process_new_functions ();
ae01b312 1108
e6d2b2d8 1109 /* Propagate reachability flag and lower representation of all reachable
1110 functions. In the future, lowering will introduce new functions and
1111 new entry points on the way (by template instantiation and virtual
1112 method table generation for instance). */
3d7bfc56 1113 while (cgraph_nodes_queue)
ae01b312 1114 {
0785e435 1115 struct cgraph_edge *edge;
3d7bfc56 1116 tree decl = cgraph_nodes_queue->decl;
1117
1118 node = cgraph_nodes_queue;
d87976fb 1119 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 1120 node->next_needed = NULL;
ae01b312 1121
638531ad 1122 /* ??? It is possible to create extern inline function and later using
bbd5cba2 1123 weak alias attribute to kill its body. See
638531ad 1124 gcc.c-torture/compile/20011119-1.c */
91bf9d9a 1125 if (!DECL_STRUCT_FUNCTION (decl)
c70f46b0 1126 && (!node->alias || !node->thunk.alias)
91bf9d9a 1127 && !node->thunk.thunk_p)
9b8fb23a 1128 {
1129 cgraph_reset_node (node);
443089c1 1130 node->local.redefined_extern_inline = true;
9b8fb23a 1131 continue;
1132 }
638531ad 1133
7bfefa9d 1134 if (!node->analyzed)
1135 cgraph_analyze_function (node);
2c0b522d 1136
ae01b312 1137 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 1138 if (!edge->callee->reachable)
2c0b522d 1139 cgraph_mark_reachable_node (edge->callee);
91bf9d9a 1140 for (edge = node->callers; edge; edge = edge->next_caller)
1141 if (!edge->caller->reachable && edge->caller->thunk.thunk_p)
1142 cgraph_mark_reachable_node (edge->caller);
2c0b522d 1143
61c2c7b1 1144 if (node->same_comdat_group)
1145 {
1146 for (next = node->same_comdat_group;
1147 next != node;
1148 next = next->same_comdat_group)
1149 cgraph_mark_reachable_node (next);
1150 }
1151
d544ceff 1152 /* If decl is a clone of an abstract function, mark that abstract
1153 function so that we don't release its body. The DECL_INITIAL() of that
fd6a3c41 1154 abstract function declaration will be later needed to output debug
1155 info. */
d544ceff 1156 if (DECL_ABSTRACT_ORIGIN (decl))
1157 {
fd6a3c41 1158 struct cgraph_node *origin_node;
1159 origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
d544ceff 1160 origin_node->abstract_and_needed = true;
1161 }
1162
c17d0de1 1163 /* We finalize local static variables during constructing callgraph
1164 edges. Process their attributes too. */
1165 process_function_and_variable_attributes (first_processed,
1166 first_analyzed_var);
1167 first_processed = cgraph_nodes;
1d416bd7 1168 first_analyzed_var = varpool_nodes;
1169 varpool_analyze_pending_decls ();
aeeb194b 1170 cgraph_process_new_functions ();
ae01b312 1171 }
2c0b522d 1172
aa5e06c7 1173 /* Collect entry points to the unit. */
f79b6507 1174 if (cgraph_dump_file)
3d7bfc56 1175 {
e4200070 1176 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 1177 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1178 if (node->needed)
f79b6507 1179 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 1180 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 1181 dump_cgraph (cgraph_dump_file);
7410370b 1182 dump_varpool (cgraph_dump_file);
3d7bfc56 1183 }
e6d2b2d8 1184
f79b6507 1185 if (cgraph_dump_file)
1186 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 1187
f4ec5ce1 1188 for (node = cgraph_nodes; node != first_analyzed; node = next)
ae01b312 1189 {
1190 tree decl = node->decl;
f4ec5ce1 1191 next = node->next;
ae01b312 1192
91bf9d9a 1193 if (node->local.finalized && !gimple_has_body_p (decl)
c70f46b0 1194 && (!node->alias || !node->thunk.alias)
91bf9d9a 1195 && !node->thunk.thunk_p)
a0c938f0 1196 cgraph_reset_node (node);
9b8fb23a 1197
91bf9d9a 1198 if (!node->reachable
c70f46b0 1199 && (gimple_has_body_p (decl) || node->thunk.thunk_p
1200 || (node->alias && node->thunk.alias)))
ae01b312 1201 {
f79b6507 1202 if (cgraph_dump_file)
1203 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 1204 cgraph_remove_node (node);
9b8fb23a 1205 continue;
ae01b312 1206 }
bc5cab3b 1207 else
1208 node->next_needed = NULL;
91bf9d9a 1209 gcc_assert (!node->local.finalized || node->thunk.thunk_p
c70f46b0 1210 || node->alias
91bf9d9a 1211 || gimple_has_body_p (decl));
9b8fb23a 1212 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 1213 }
f79b6507 1214 if (cgraph_dump_file)
e4200070 1215 {
1216 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1217 dump_cgraph (cgraph_dump_file);
7410370b 1218 dump_varpool (cgraph_dump_file);
e4200070 1219 }
f1c35659 1220 bitmap_obstack_release (NULL);
c1dcd13c 1221 first_analyzed = cgraph_nodes;
ae01b312 1222 ggc_collect ();
aeeb194b 1223}
1224
3a849bc1 1225/* Translate the ugly representation of aliases as alias pairs into nice
1226 representation in callgraph. We don't handle all cases yet,
1227 unforutnately. */
1228
1229static void
1230handle_alias_pairs (void)
1231{
1232 alias_pair *p;
1233 unsigned i;
1234 struct cgraph_node *target_node;
1235 struct cgraph_node *src_node;
e0eaac80 1236 struct varpool_node *target_vnode;
3a849bc1 1237
1238 for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p);)
1239 {
1240 if (TREE_CODE (p->decl) == FUNCTION_DECL
3a849bc1 1241 && (target_node = cgraph_node_for_asm (p->target)) != NULL)
1242 {
1243 src_node = cgraph_get_node (p->decl);
1244 if (src_node && src_node->local.finalized)
1245 cgraph_reset_node (src_node);
1246 /* Normally EXTERNAL flag is used to mark external inlines,
1247 however for aliases it seems to be allowed to use it w/o
1248 any meaning. See gcc.dg/attr-alias-3.c
1249 However for weakref we insist on EXTERNAL flag being set.
1250 See gcc.dg/attr-alias-5.c */
1251 if (DECL_EXTERNAL (p->decl))
5e712541 1252 DECL_EXTERNAL (p->decl) = lookup_attribute ("weakref",
1253 DECL_ATTRIBUTES (p->decl)) != NULL;
3a849bc1 1254 cgraph_create_function_alias (p->decl, target_node->decl);
1255 VEC_unordered_remove (alias_pair, alias_pairs, i);
1256 }
e0eaac80 1257 else if (TREE_CODE (p->decl) == VAR_DECL
e0eaac80 1258 && (target_vnode = varpool_node_for_asm (p->target)) != NULL)
1259 {
1260 /* Normally EXTERNAL flag is used to mark external inlines,
1261 however for aliases it seems to be allowed to use it w/o
1262 any meaning. See gcc.dg/attr-alias-3.c
1263 However for weakref we insist on EXTERNAL flag being set.
1264 See gcc.dg/attr-alias-5.c */
1265 if (DECL_EXTERNAL (p->decl))
5e712541 1266 DECL_EXTERNAL (p->decl) = lookup_attribute ("weakref",
1267 DECL_ATTRIBUTES (p->decl)) != NULL;
e0eaac80 1268 varpool_create_variable_alias (p->decl, target_vnode->decl);
1269 VEC_unordered_remove (alias_pair, alias_pairs, i);
1270 }
badeded8 1271 /* Weakrefs with target not defined in current unit are easy to handle; they
1272 behave just as external variables except we need to note the alias flag
1273 to later output the weakref pseudo op into asm file. */
1274 else if (lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL
1275 && (TREE_CODE (p->decl) == FUNCTION_DECL
1276 ? (varpool_node_for_asm (p->target) == NULL)
1277 : (cgraph_node_for_asm (p->target) == NULL)))
1278 {
1279 if (TREE_CODE (p->decl) == FUNCTION_DECL)
1280 cgraph_get_create_node (p->decl)->alias = true;
1281 else
1282 varpool_get_node (p->decl)->alias = true;
1283 DECL_EXTERNAL (p->decl) = 1;
1284 VEC_unordered_remove (alias_pair, alias_pairs, i);
1285 }
3a849bc1 1286 else
1287 {
1288 if (dump_file)
1289 fprintf (dump_file, "Unhandled alias %s->%s\n",
1290 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
1291 IDENTIFIER_POINTER (p->target));
1292
1293 i++;
1294 }
1295 }
1296}
1297
8f69fd82 1298
ae01b312 1299/* Figure out what functions we want to assemble. */
1300
1301static void
d9d9733a 1302cgraph_mark_functions_to_output (void)
ae01b312 1303{
1304 struct cgraph_node *node;
61c2c7b1 1305#ifdef ENABLE_CHECKING
1306 bool check_same_comdat_groups = false;
1307
1308 for (node = cgraph_nodes; node; node = node->next)
1309 gcc_assert (!node->process);
1310#endif
ae01b312 1311
ae01b312 1312 for (node = cgraph_nodes; node; node = node->next)
1313 {
1314 tree decl = node->decl;
d7c6d889 1315 struct cgraph_edge *e;
a0c938f0 1316
61c2c7b1 1317 gcc_assert (!node->process || node->same_comdat_group);
1318 if (node->process)
1319 continue;
d7c6d889 1320
1321 for (e = node->callers; e; e = e->next_caller)
611e5405 1322 if (e->inline_failed)
d7c6d889 1323 break;
ae01b312 1324
e6d2b2d8 1325 /* We need to output all local functions that are used and not
1326 always inlined, as well as those that are reachable from
1327 outside the current compilation unit. */
1a1a827a 1328 if (node->analyzed
91bf9d9a 1329 && !node->thunk.thunk_p
c70f46b0 1330 && !node->alias
b0cdf642 1331 && !node->global.inlined_to
1e3aebec 1332 && (!cgraph_only_called_directly_p (node)
c70f46b0 1333 || ((e || ipa_ref_has_aliases_p (&node->ref_list))
1334 && node->reachable))
4ee9c684 1335 && !TREE_ASM_WRITTEN (decl)
ae01b312 1336 && !DECL_EXTERNAL (decl))
61c2c7b1 1337 {
1338 node->process = 1;
1339 if (node->same_comdat_group)
1340 {
1341 struct cgraph_node *next;
1342 for (next = node->same_comdat_group;
1343 next != node;
1344 next = next->same_comdat_group)
c70f46b0 1345 if (!next->thunk.thunk_p && !next->alias)
91bf9d9a 1346 next->process = 1;
61c2c7b1 1347 }
1348 }
1349 else if (node->same_comdat_group)
1350 {
1351#ifdef ENABLE_CHECKING
1352 check_same_comdat_groups = true;
1353#endif
1354 }
cc636d56 1355 else
9cee7c3f 1356 {
1357 /* We should've reclaimed all functions that are not needed. */
1358#ifdef ENABLE_CHECKING
75a70cf9 1359 if (!node->global.inlined_to
1a1a827a 1360 && gimple_has_body_p (decl)
08843223 1361 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1362 are inside partition, we can end up not removing the body since we no longer
1363 have analyzed node pointing to it. */
1364 && !node->in_other_partition
c70f46b0 1365 && !node->alias
9cee7c3f 1366 && !DECL_EXTERNAL (decl))
1367 {
1368 dump_cgraph_node (stderr, node);
1369 internal_error ("failed to reclaim unneeded function");
1370 }
1371#endif
75a70cf9 1372 gcc_assert (node->global.inlined_to
1a1a827a 1373 || !gimple_has_body_p (decl)
08843223 1374 || node->in_other_partition
9cee7c3f 1375 || DECL_EXTERNAL (decl));
1376
1377 }
a0c938f0 1378
961e3b13 1379 }
61c2c7b1 1380#ifdef ENABLE_CHECKING
1381 if (check_same_comdat_groups)
1382 for (node = cgraph_nodes; node; node = node->next)
1383 if (node->same_comdat_group && !node->process)
1384 {
1385 tree decl = node->decl;
1386 if (!node->global.inlined_to
1387 && gimple_has_body_p (decl)
6d36105a 1388 /* FIXME: in an ltrans unit when the offline copy is outside a
1389 partition but inline copies are inside a partition, we can
1390 end up not removing the body since we no longer have an
1391 analyzed node pointing to it. */
08843223 1392 && !node->in_other_partition
61c2c7b1 1393 && !DECL_EXTERNAL (decl))
1394 {
1395 dump_cgraph_node (stderr, node);
6d36105a 1396 internal_error ("failed to reclaim unneeded function in same "
1397 "comdat group");
61c2c7b1 1398 }
1399 }
1400#endif
961e3b13 1401}
1402
28454517 1403/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1404 in lowered gimple form.
1405
1406 Set current_function_decl and cfun to newly constructed empty function body.
1407 return basic block in the function body. */
1408
1409static basic_block
1410init_lowered_empty_function (tree decl)
1411{
1412 basic_block bb;
1413
1414 current_function_decl = decl;
1415 allocate_struct_function (decl, false);
1416 gimple_register_cfg_hooks ();
1417 init_empty_tree_cfg ();
1418 init_tree_ssa (cfun);
1419 init_ssa_operands ();
1420 cfun->gimple_df->in_ssa_p = true;
1421 DECL_INITIAL (decl) = make_node (BLOCK);
1422
1423 DECL_SAVED_TREE (decl) = error_mark_node;
1424 cfun->curr_properties |=
1425 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
657e3a56 1426 PROP_ssa | PROP_gimple_any);
28454517 1427
1428 /* Create BB for body of the function and connect it properly. */
1429 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
167ef6d9 1430 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1431 make_edge (bb, EXIT_BLOCK_PTR, 0);
28454517 1432
1433 return bb;
1434}
1435
1436/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1437 offset indicated by VIRTUAL_OFFSET, if that is
1438 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1439 zero for a result adjusting thunk. */
1440
1441static tree
1442thunk_adjust (gimple_stmt_iterator * bsi,
1443 tree ptr, bool this_adjusting,
1444 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1445{
1446 gimple stmt;
1447 tree ret;
1448
55d6cb23 1449 if (this_adjusting
1450 && fixed_offset != 0)
28454517 1451 {
2cc66f2a 1452 stmt = gimple_build_assign
1453 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1454 ptr,
1455 fixed_offset));
28454517 1456 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1457 }
1458
1459 /* If there's a virtual offset, look up that value in the vtable and
1460 adjust the pointer again. */
1461 if (virtual_offset)
1462 {
1463 tree vtabletmp;
1464 tree vtabletmp2;
1465 tree vtabletmp3;
28454517 1466
1467 if (!vtable_entry_type)
1468 {
1469 tree vfunc_type = make_node (FUNCTION_TYPE);
1470 TREE_TYPE (vfunc_type) = integer_type_node;
1471 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1472 layout_type (vfunc_type);
1473
1474 vtable_entry_type = build_pointer_type (vfunc_type);
1475 }
1476
1477 vtabletmp =
1478 create_tmp_var (build_pointer_type
1479 (build_pointer_type (vtable_entry_type)), "vptr");
1480
1481 /* The vptr is always at offset zero in the object. */
1482 stmt = gimple_build_assign (vtabletmp,
1483 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1484 ptr));
1485 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1486 mark_symbols_for_renaming (stmt);
1487 find_referenced_vars_in (stmt);
1488
1489 /* Form the vtable address. */
1490 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1491 "vtableaddr");
1492 stmt = gimple_build_assign (vtabletmp2,
182cf5a9 1493 build_simple_mem_ref (vtabletmp));
28454517 1494 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1495 mark_symbols_for_renaming (stmt);
1496 find_referenced_vars_in (stmt);
1497
1498 /* Find the entry with the vcall offset. */
1499 stmt = gimple_build_assign (vtabletmp2,
2cc66f2a 1500 fold_build_pointer_plus_loc (input_location,
1501 vtabletmp2,
1502 virtual_offset));
28454517 1503 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1504
1505 /* Get the offset itself. */
1506 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1507 "vcalloffset");
1508 stmt = gimple_build_assign (vtabletmp3,
182cf5a9 1509 build_simple_mem_ref (vtabletmp2));
28454517 1510 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1511 mark_symbols_for_renaming (stmt);
1512 find_referenced_vars_in (stmt);
1513
28454517 1514 /* Adjust the `this' pointer. */
a0553bff 1515 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1516 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1517 GSI_CONTINUE_LINKING);
28454517 1518 }
1519
55d6cb23 1520 if (!this_adjusting
1521 && fixed_offset != 0)
28454517 1522 /* Adjust the pointer by the constant. */
1523 {
1524 tree ptrtmp;
1525
1526 if (TREE_CODE (ptr) == VAR_DECL)
1527 ptrtmp = ptr;
1528 else
1529 {
1530 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1531 stmt = gimple_build_assign (ptrtmp, ptr);
1532 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1533 mark_symbols_for_renaming (stmt);
1534 find_referenced_vars_in (stmt);
1535 }
2cc66f2a 1536 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1537 ptrtmp, fixed_offset);
28454517 1538 }
1539
1540 /* Emit the statement and gimplify the adjustment expression. */
1541 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1542 stmt = gimple_build_assign (ret, ptr);
1543 mark_symbols_for_renaming (stmt);
1544 find_referenced_vars_in (stmt);
1545 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1546
1547 return ret;
1548}
1549
1550/* Produce assembler for thunk NODE. */
1551
1552static void
1553assemble_thunk (struct cgraph_node *node)
1554{
1555 bool this_adjusting = node->thunk.this_adjusting;
1556 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1557 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1558 tree virtual_offset = NULL;
1559 tree alias = node->thunk.alias;
1560 tree thunk_fndecl = node->decl;
1561 tree a = DECL_ARGUMENTS (thunk_fndecl);
1562
1563 current_function_decl = thunk_fndecl;
1564
aed6e608 1565 /* Ensure thunks are emitted in their correct sections. */
1566 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1567
28454517 1568 if (this_adjusting
1569 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1570 virtual_value, alias))
1571 {
1572 const char *fnname;
1573 tree fn_block;
28b2c6a7 1574 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
28454517 1575
1576 DECL_RESULT (thunk_fndecl)
1577 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
28b2c6a7 1578 RESULT_DECL, 0, restype);
22ea3b47 1579 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
28454517 1580
1581 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1582 create one. */
1583 fn_block = make_node (BLOCK);
1584 BLOCK_VARS (fn_block) = a;
1585 DECL_INITIAL (thunk_fndecl) = fn_block;
1586 init_function_start (thunk_fndecl);
1587 cfun->is_thunk = 1;
1588 assemble_start_function (thunk_fndecl, fnname);
1589
1590 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1591 fixed_offset, virtual_value, alias);
1592
1593 assemble_end_function (thunk_fndecl, fnname);
1594 init_insn_lengths ();
1595 free_after_compilation (cfun);
1596 set_cfun (NULL);
1597 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
91bf9d9a 1598 node->thunk.thunk_p = false;
1599 node->analyzed = false;
28454517 1600 }
1601 else
1602 {
1603 tree restype;
1604 basic_block bb, then_bb, else_bb, return_bb;
1605 gimple_stmt_iterator bsi;
1606 int nargs = 0;
1607 tree arg;
1608 int i;
1609 tree resdecl;
1610 tree restmp = NULL;
1611 VEC(tree, heap) *vargs;
1612
1613 gimple call;
1614 gimple ret;
1615
1616 DECL_IGNORED_P (thunk_fndecl) = 1;
1617 bitmap_obstack_initialize (NULL);
1618
1619 if (node->thunk.virtual_offset_p)
1620 virtual_offset = size_int (virtual_value);
1621
1622 /* Build the return declaration for the function. */
1623 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1624 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1625 {
1626 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1627 DECL_ARTIFICIAL (resdecl) = 1;
1628 DECL_IGNORED_P (resdecl) = 1;
1629 DECL_RESULT (thunk_fndecl) = resdecl;
1630 }
1631 else
1632 resdecl = DECL_RESULT (thunk_fndecl);
1633
1634 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1635
1636 bsi = gsi_start_bb (bb);
1637
1638 /* Build call to the function being thunked. */
1639 if (!VOID_TYPE_P (restype))
1640 {
1641 if (!is_gimple_reg_type (restype))
1642 {
1643 restmp = resdecl;
2ab2ce89 1644 add_local_decl (cfun, restmp);
28454517 1645 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1646 }
1647 else
1648 restmp = create_tmp_var_raw (restype, "retval");
1649 }
1650
1767a056 1651 for (arg = a; arg; arg = DECL_CHAIN (arg))
28454517 1652 nargs++;
1653 vargs = VEC_alloc (tree, heap, nargs);
1654 if (this_adjusting)
1655 VEC_quick_push (tree, vargs,
1656 thunk_adjust (&bsi,
1657 a, 1, fixed_offset,
1658 virtual_offset));
1659 else
1660 VEC_quick_push (tree, vargs, a);
1767a056 1661 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
28454517 1662 VEC_quick_push (tree, vargs, arg);
1663 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1664 VEC_free (tree, heap, vargs);
28454517 1665 gimple_call_set_from_thunk (call, true);
1666 if (restmp)
1667 gimple_call_set_lhs (call, restmp);
1668 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1669 mark_symbols_for_renaming (call);
1670 find_referenced_vars_in (call);
1671 update_stmt (call);
1672
1673 if (restmp && !this_adjusting)
1674 {
57ab8ec3 1675 tree true_label = NULL_TREE;
28454517 1676
1677 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1678 {
1679 gimple stmt;
1680 /* If the return type is a pointer, we need to
1681 protect against NULL. We know there will be an
1682 adjustment, because that's why we're emitting a
1683 thunk. */
1684 then_bb = create_basic_block (NULL, (void *) 0, bb);
1685 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1686 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1687 remove_edge (single_succ_edge (bb));
1688 true_label = gimple_block_label (then_bb);
28454517 1689 stmt = gimple_build_cond (NE_EXPR, restmp,
385f3f36 1690 build_zero_cst (TREE_TYPE (restmp)),
28454517 1691 NULL_TREE, NULL_TREE);
1692 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1693 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1694 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1695 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1696 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1697 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1698 bsi = gsi_last_bb (then_bb);
1699 }
1700
1701 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1702 fixed_offset, virtual_offset);
1703 if (true_label)
1704 {
1705 gimple stmt;
1706 bsi = gsi_last_bb (else_bb);
385f3f36 1707 stmt = gimple_build_assign (restmp,
1708 build_zero_cst (TREE_TYPE (restmp)));
28454517 1709 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1710 bsi = gsi_last_bb (return_bb);
1711 }
1712 }
1713 else
1714 gimple_call_set_tail (call, true);
1715
1716 /* Build return value. */
1717 ret = gimple_build_return (restmp);
1718 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1719
1720 delete_unreachable_blocks ();
1721 update_ssa (TODO_update_ssa);
1722
28454517 1723 /* Since we want to emit the thunk, we explicitly mark its name as
1724 referenced. */
91bf9d9a 1725 node->thunk.thunk_p = false;
1726 cgraph_node_remove_callees (node);
28454517 1727 cgraph_add_new_function (thunk_fndecl, true);
1728 bitmap_obstack_release (NULL);
1729 }
1730 current_function_decl = NULL;
1731}
1732
91bf9d9a 1733
c70f46b0 1734
1735/* Assemble thunks and aliases asociated to NODE. */
91bf9d9a 1736
1737static void
c70f46b0 1738assemble_thunks_and_aliases (struct cgraph_node *node)
91bf9d9a 1739{
1740 struct cgraph_edge *e;
c70f46b0 1741 int i;
1742 struct ipa_ref *ref;
1743
91bf9d9a 1744 for (e = node->callers; e;)
1745 if (e->caller->thunk.thunk_p)
1746 {
1747 struct cgraph_node *thunk = e->caller;
1748
1749 e = e->next_caller;
c70f46b0 1750 assemble_thunks_and_aliases (thunk);
91bf9d9a 1751 assemble_thunk (thunk);
1752 }
1753 else
1754 e = e->next_caller;
c70f46b0 1755 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
1756 if (ref->use == IPA_REF_ALIAS)
1757 {
1758 struct cgraph_node *alias = ipa_ref_refering_node (ref);
968b8c52 1759 bool saved_written = TREE_ASM_WRITTEN (alias->thunk.alias);
1760
1761 /* Force assemble_alias to really output the alias this time instead
1762 of buffering it in same alias pairs. */
1763 TREE_ASM_WRITTEN (alias->thunk.alias) = 1;
c70f46b0 1764 assemble_alias (alias->decl,
1765 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1766 assemble_thunks_and_aliases (alias);
968b8c52 1767 TREE_ASM_WRITTEN (alias->thunk.alias) = saved_written;
c70f46b0 1768 }
91bf9d9a 1769}
1770
ae01b312 1771/* Expand function specified by NODE. */
e6d2b2d8 1772
ae01b312 1773static void
d9d9733a 1774cgraph_expand_function (struct cgraph_node *node)
ae01b312 1775{
1776 tree decl = node->decl;
1777
b0cdf642 1778 /* We ought to not compile any inline clones. */
cc636d56 1779 gcc_assert (!node->global.inlined_to);
b0cdf642 1780
6329636b 1781 announce_function (decl);
09fc9532 1782 node->process = 0;
f7777314 1783 gcc_assert (node->lowered);
1784
1785 /* Generate RTL for the body of DECL. */
1786 tree_rest_of_compilation (decl);
1787
1788 /* Make sure that BE didn't give up on compiling. */
1789 gcc_assert (TREE_ASM_WRITTEN (decl));
1790 current_function_decl = NULL;
cc91b414 1791 gcc_assert (!cgraph_preserve_function_body_p (node));
f76f7453 1792
1793 /* It would make a lot more sense to output thunks before function body to get more
1794 forward and lest backwarding jumps. This is however would need solving problem
1795 with comdats. See PR48668. Also aliases must come after function itself to
1796 make one pass assemblers, like one on AIX happy. See PR 50689.
1797 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1798 groups. */
1799 assemble_thunks_and_aliases (node);
1a1a827a 1800 cgraph_release_function_body (node);
1801 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1802 points to the dead function body. */
1803 cgraph_node_remove_callees (node);
e1be32b8 1804
1805 cgraph_function_flags_ready = true;
ae01b312 1806}
1807
b0cdf642 1808/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1809
1810bool
326a9581 1811cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
d7c6d889 1812{
b0cdf642 1813 *reason = e->inline_failed;
1814 return !e->inline_failed;
d7c6d889 1815}
b0cdf642 1816
acc70efa 1817
acc70efa 1818
d9d9733a 1819/* Expand all functions that must be output.
1820
d7c6d889 1821 Attempt to topologically sort the nodes so function is output when
1822 all called functions are already assembled to allow data to be
91c82c20 1823 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1824 between a function and its callees (later we may choose to use a more
d7c6d889 1825 sophisticated algorithm for function reordering; we will likely want
1826 to use subsections to make the output functions appear in top-down
1827 order). */
1828
1829static void
a6868229 1830cgraph_expand_all_functions (void)
d7c6d889 1831{
1832 struct cgraph_node *node;
4c36ffe6 1833 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
c04e3894 1834 int order_pos, new_order_pos = 0;
d7c6d889 1835 int i;
1836
7771d558 1837 order_pos = ipa_reverse_postorder (order);
cc636d56 1838 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1839
7bd28bba 1840 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1841 optimization. So we must be sure to not reference them. */
1842 for (i = 0; i < order_pos; i++)
09fc9532 1843 if (order[i]->process)
b0cdf642 1844 order[new_order_pos++] = order[i];
1845
1846 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1847 {
1848 node = order[i];
09fc9532 1849 if (node->process)
d7c6d889 1850 {
cc636d56 1851 gcc_assert (node->reachable);
09fc9532 1852 node->process = 0;
d7c6d889 1853 cgraph_expand_function (node);
1854 }
1855 }
523c1122 1856 cgraph_process_new_functions ();
773c5ba7 1857
d7c6d889 1858 free (order);
773c5ba7 1859
d7c6d889 1860}
1861
56af936e 1862/* This is used to sort the node types by the cgraph order number. */
1863
0b09525f 1864enum cgraph_order_sort_kind
1865{
1866 ORDER_UNDEFINED = 0,
1867 ORDER_FUNCTION,
1868 ORDER_VAR,
1869 ORDER_ASM
1870};
1871
56af936e 1872struct cgraph_order_sort
1873{
0b09525f 1874 enum cgraph_order_sort_kind kind;
56af936e 1875 union
1876 {
1877 struct cgraph_node *f;
1d416bd7 1878 struct varpool_node *v;
56af936e 1879 struct cgraph_asm_node *a;
1880 } u;
1881};
1882
1883/* Output all functions, variables, and asm statements in the order
1884 according to their order fields, which is the order in which they
1885 appeared in the file. This implements -fno-toplevel-reorder. In
1886 this mode we may output functions and variables which don't really
1887 need to be output. */
1888
1889static void
1890cgraph_output_in_order (void)
1891{
1892 int max;
56af936e 1893 struct cgraph_order_sort *nodes;
1894 int i;
1895 struct cgraph_node *pf;
1d416bd7 1896 struct varpool_node *pv;
56af936e 1897 struct cgraph_asm_node *pa;
1898
1899 max = cgraph_order;
3e1cde87 1900 nodes = XCNEWVEC (struct cgraph_order_sort, max);
56af936e 1901
1d416bd7 1902 varpool_analyze_pending_decls ();
56af936e 1903
1904 for (pf = cgraph_nodes; pf; pf = pf->next)
1905 {
c70f46b0 1906 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
56af936e 1907 {
1908 i = pf->order;
1909 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1910 nodes[i].kind = ORDER_FUNCTION;
1911 nodes[i].u.f = pf;
1912 }
1913 }
1914
1d416bd7 1915 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
56af936e 1916 {
1917 i = pv->order;
1918 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1919 nodes[i].kind = ORDER_VAR;
1920 nodes[i].u.v = pv;
1921 }
1922
1923 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1924 {
1925 i = pa->order;
1926 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1927 nodes[i].kind = ORDER_ASM;
1928 nodes[i].u.a = pa;
1929 }
56af936e 1930
304e5318 1931 /* In toplevel reorder mode we output all statics; mark them as needed. */
1932 for (i = 0; i < max; ++i)
1933 {
1934 if (nodes[i].kind == ORDER_VAR)
1935 {
1936 varpool_mark_needed_node (nodes[i].u.v);
1937 }
1938 }
1939 varpool_empty_needed_queue ();
1940
91da0f1c 1941 for (i = 0; i < max; ++i)
1942 if (nodes[i].kind == ORDER_VAR)
1943 varpool_finalize_named_section_flags (nodes[i].u.v);
1944
56af936e 1945 for (i = 0; i < max; ++i)
1946 {
1947 switch (nodes[i].kind)
1948 {
1949 case ORDER_FUNCTION:
09fc9532 1950 nodes[i].u.f->process = 0;
56af936e 1951 cgraph_expand_function (nodes[i].u.f);
1952 break;
1953
1954 case ORDER_VAR:
1d416bd7 1955 varpool_assemble_decl (nodes[i].u.v);
56af936e 1956 break;
1957
1958 case ORDER_ASM:
1959 assemble_asm (nodes[i].u.a->asm_str);
1960 break;
1961
1962 case ORDER_UNDEFINED:
1963 break;
1964
1965 default:
1966 gcc_unreachable ();
1967 }
1968 }
4b4ea2db 1969
1970 cgraph_asm_nodes = NULL;
3e1cde87 1971 free (nodes);
56af936e 1972}
1973
b0cdf642 1974/* Return true when function body of DECL still needs to be kept around
1975 for later re-use. */
1976bool
cc91b414 1977cgraph_preserve_function_body_p (struct cgraph_node *node)
b0cdf642 1978{
8d8c4c8d 1979 gcc_assert (cgraph_global_info_ready);
c70f46b0 1980 gcc_assert (!node->alias && !node->thunk.thunk_p);
cc91b414 1981
b0cdf642 1982 /* Look if there is any clone around. */
ccf4ab6b 1983 if (node->clones)
1984 return true;
b0cdf642 1985 return false;
1986}
1987
77fce4cd 1988static void
1989ipa_passes (void)
1990{
87d4aa85 1991 set_cfun (NULL);
4b14adf9 1992 current_function_decl = NULL;
75a70cf9 1993 gimple_register_cfg_hooks ();
77fce4cd 1994 bitmap_obstack_initialize (NULL);
59dd4830 1995
c9036234 1996 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1997
59dd4830 1998 if (!in_lto_p)
7b2e8956 1999 {
2000 execute_ipa_pass_list (all_small_ipa_passes);
2001 if (seen_error ())
2002 return;
2003 }
9ed5b1f5 2004
941125aa 2005 /* We never run removal of unreachable nodes after early passes. This is
2006 because TODO is run before the subpasses. It is important to remove
2007 the unreachable functions to save works at IPA level and to get LTO
2008 symbol tables right. */
2009 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
2010
7bfefa9d 2011 /* If pass_all_early_optimizations was not scheduled, the state of
2012 the cgraph will not be properly updated. Update it now. */
2013 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
2014 cgraph_state = CGRAPH_STATE_IPA_SSA;
9ed5b1f5 2015
7bfefa9d 2016 if (!in_lto_p)
2017 {
2018 /* Generate coverage variables and constructors. */
2019 coverage_finish ();
2020
2021 /* Process new functions added. */
2022 set_cfun (NULL);
2023 current_function_decl = NULL;
2024 cgraph_process_new_functions ();
7bfefa9d 2025
c9036234 2026 execute_ipa_summary_passes
2027 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
8867b500 2028 }
23433d72 2029
2030 /* Some targets need to handle LTO assembler output specially. */
2031 if (flag_generate_lto)
2032 targetm.asm_out.lto_start ();
2033
7bfefa9d 2034 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
2035
2036 if (!in_lto_p)
2037 ipa_write_summaries ();
2038
23433d72 2039 if (flag_generate_lto)
2040 targetm.asm_out.lto_end ();
2041
b33542ab 2042 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
8867b500 2043 execute_ipa_pass_list (all_regular_ipa_passes);
c9036234 2044 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
9ed5b1f5 2045
77fce4cd 2046 bitmap_obstack_release (NULL);
2047}
2048
badeded8 2049
2050/* Return string alias is alias of. */
2051
2052static tree
2053get_alias_symbol (tree decl)
2054{
2055 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2056 return get_identifier (TREE_STRING_POINTER
2057 (TREE_VALUE (TREE_VALUE (alias))));
2058}
2059
2060
5e712541 2061/* Weakrefs may be associated to external decls and thus not output
2062 at expansion time. Emit all neccesary aliases. */
2063
5139ff04 2064static void
5e712541 2065output_weakrefs (void)
2066{
2067 struct cgraph_node *node;
2068 struct varpool_node *vnode;
2069 for (node = cgraph_nodes; node; node = node->next)
badeded8 2070 if (node->alias && DECL_EXTERNAL (node->decl)
892b9268 2071 && !TREE_ASM_WRITTEN (node->decl)
2072 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
5e712541 2073 assemble_alias (node->decl,
badeded8 2074 node->thunk.alias ? DECL_ASSEMBLER_NAME (node->thunk.alias)
2075 : get_alias_symbol (node->decl));
5e712541 2076 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
badeded8 2077 if (vnode->alias && DECL_EXTERNAL (vnode->decl)
892b9268 2078 && !TREE_ASM_WRITTEN (vnode->decl)
2079 && lookup_attribute ("weakref", DECL_ATTRIBUTES (vnode->decl)))
5e712541 2080 assemble_alias (vnode->decl,
badeded8 2081 vnode->alias_of ? DECL_ASSEMBLER_NAME (vnode->alias_of)
2082 : get_alias_symbol (vnode->decl));
5e712541 2083}
2084
34e5cced 2085
34e5cced 2086
121f3051 2087void
2088init_cgraph (void)
2089{
01ec0a6c 2090 if (!cgraph_dump_file)
2091 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
121f3051 2092}
b5d36404 2093
a0c938f0 2094/* The edges representing the callers of the NEW_VERSION node were
b5d36404 2095 fixed by cgraph_function_versioning (), now the call_expr in their
2096 respective tree code should be updated to call the NEW_VERSION. */
2097
2098static void
2099update_call_expr (struct cgraph_node *new_version)
2100{
2101 struct cgraph_edge *e;
2102
2103 gcc_assert (new_version);
75a70cf9 2104
2105 /* Update the call expr on the edges to call the new version. */
b5d36404 2106 for (e = new_version->callers; e; e = e->next_caller)
e03a95e7 2107 {
2108 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2109 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
e38def9c 2110 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
e03a95e7 2111 }
b5d36404 2112}
2113
2114
2115/* Create a new cgraph node which is the new version of
2116 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2117 edges which should be redirected to point to
2118 NEW_VERSION. ALL the callees edges of OLD_VERSION
2119 are cloned to the new version node. Return the new
b06ab5fa 2120 version node.
2121
2122 If non-NULL BLOCK_TO_COPY determine what basic blocks
2123 was copied to prevent duplications of calls that are dead
2124 in the clone. */
b5d36404 2125
4c0315d0 2126struct cgraph_node *
b5d36404 2127cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 2128 tree new_decl,
b06ab5fa 2129 VEC(cgraph_edge_p,heap) *redirect_callers,
2130 bitmap bbs_to_copy)
2131 {
b5d36404 2132 struct cgraph_node *new_version;
32936803 2133 struct cgraph_edge *e;
b5d36404 2134 unsigned i;
2135
2136 gcc_assert (old_version);
a0c938f0 2137
5a90471f 2138 new_version = cgraph_create_node (new_decl);
b5d36404 2139
4c0315d0 2140 new_version->analyzed = old_version->analyzed;
b5d36404 2141 new_version->local = old_version->local;
a70a5e2c 2142 new_version->local.externally_visible = false;
2143 new_version->local.local = true;
b5d36404 2144 new_version->global = old_version->global;
a93f1c3b 2145 new_version->rtl = old_version->rtl;
b5d36404 2146 new_version->reachable = true;
2147 new_version->count = old_version->count;
2148
a70a5e2c 2149 for (e = old_version->callees; e; e=e->next_callee)
b06ab5fa 2150 if (!bbs_to_copy
2151 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2152 cgraph_clone_edge (e, new_version, e->call_stmt,
2153 e->lto_stmt_uid, REG_BR_PROB_BASE,
2154 CGRAPH_FREQ_BASE,
0835ad03 2155 true);
a70a5e2c 2156 for (e = old_version->indirect_calls; e; e=e->next_callee)
b06ab5fa 2157 if (!bbs_to_copy
2158 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2159 cgraph_clone_edge (e, new_version, e->call_stmt,
2160 e->lto_stmt_uid, REG_BR_PROB_BASE,
2161 CGRAPH_FREQ_BASE,
0835ad03 2162 true);
48148244 2163 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
4460a647 2164 {
2165 /* Redirect calls to the old version node to point to its new
2166 version. */
2167 cgraph_redirect_edge_callee (e, new_version);
2168 }
b5d36404 2169
ad687a96 2170 cgraph_call_node_duplication_hooks (old_version, new_version);
2171
b5d36404 2172 return new_version;
2173 }
2174
2175 /* Perform function versioning.
a0c938f0 2176 Function versioning includes copying of the tree and
b5d36404 2177 a callgraph update (creating a new cgraph node and updating
2178 its callees and callers).
2179
2180 REDIRECT_CALLERS varray includes the edges to be redirected
2181 to the new version.
2182
2183 TREE_MAP is a mapping of tree nodes we want to replace with
2184 new ones (according to results of prior analysis).
2185 OLD_VERSION_NODE is the node that is versioned.
7a3ec978 2186
b06ab5fa 2187 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2188 from new version.
7a3ec978 2189 If SKIP_RETURN is true, the new version will return void.
b06ab5fa 2190 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
7a3ec978 2191 If non_NULL NEW_ENTRY determine new entry BB of the clone.
2192
2193 Return the new version's cgraph node. */
b5d36404 2194
2195struct cgraph_node *
2196cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 2197 VEC(cgraph_edge_p,heap) *redirect_callers,
ccf4ab6b 2198 VEC (ipa_replace_map_p,gc)* tree_map,
a70a5e2c 2199 bitmap args_to_skip,
7a3ec978 2200 bool skip_return,
b06ab5fa 2201 bitmap bbs_to_copy,
2202 basic_block new_entry_block,
a70a5e2c 2203 const char *clone_name)
b5d36404 2204{
2205 tree old_decl = old_version_node->decl;
2206 struct cgraph_node *new_version_node = NULL;
2207 tree new_decl;
2208
2209 if (!tree_versionable_function_p (old_decl))
2210 return NULL;
2211
3c97c75d 2212 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
2213
7a3ec978 2214 /* Make a new FUNCTION_DECL tree node for the new version. */
2215 if (!args_to_skip && !skip_return)
5afe38fe 2216 new_decl = copy_node (old_decl);
2217 else
7a3ec978 2218 new_decl
2219 = build_function_decl_skip_args (old_decl, args_to_skip, skip_return);
b5d36404 2220
df0b8dfb 2221 /* Generate a new name for the new version. */
2222 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2223 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2224 SET_DECL_RTL (new_decl, NULL);
2225
e54aa8a4 2226 /* When the old decl was a con-/destructor make sure the clone isn't. */
2227 DECL_STATIC_CONSTRUCTOR(new_decl) = 0;
2228 DECL_STATIC_DESTRUCTOR(new_decl) = 0;
2229
b5d36404 2230 /* Create the new version's call-graph node.
2231 and update the edges of the new node. */
2232 new_version_node =
2233 cgraph_copy_node_for_versioning (old_version_node, new_decl,
b06ab5fa 2234 redirect_callers, bbs_to_copy);
b5d36404 2235
2236 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 2237 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
7a3ec978 2238 skip_return, bbs_to_copy, new_entry_block);
b5d36404 2239
a0c938f0 2240 /* Update the new version's properties.
e03a95e7 2241 Make The new version visible only within this translation unit. Make sure
2242 that is not weak also.
a0c938f0 2243 ??? We cannot use COMDAT linkage because there is no
b5d36404 2244 ABI support for this. */
6137cc9f 2245 cgraph_make_decl_local (new_version_node->decl);
f014e39d 2246 DECL_VIRTUAL_P (new_version_node->decl) = 0;
b5d36404 2247 new_version_node->local.externally_visible = 0;
2248 new_version_node->local.local = 1;
2249 new_version_node->lowered = true;
f014e39d 2250
e03a95e7 2251 /* Update the call_expr on the edges to call the new version node. */
2252 update_call_expr (new_version_node);
48e1416a 2253
50828ed8 2254 cgraph_call_function_insertion_hooks (new_version_node);
b5d36404 2255 return new_version_node;
2256}
469679ab 2257
ccf4ab6b 2258/* Given virtual clone, turn it into actual clone. */
2259static void
2260cgraph_materialize_clone (struct cgraph_node *node)
2261{
2262 bitmap_obstack_initialize (NULL);
e748b31d 2263 node->former_clone_of = node->clone_of->decl;
2264 if (node->clone_of->former_clone_of)
2265 node->former_clone_of = node->clone_of->former_clone_of;
ccf4ab6b 2266 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2267 tree_function_versioning (node->clone_of->decl, node->decl,
2268 node->clone.tree_map, true,
7a3ec978 2269 node->clone.args_to_skip, false,
2270 NULL, NULL);
e20422ea 2271 if (cgraph_dump_file)
2272 {
2273 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2274 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2275 }
ccf4ab6b 2276
2277 /* Function is no longer clone. */
2278 if (node->next_sibling_clone)
2279 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2280 if (node->prev_sibling_clone)
2281 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2282 else
2283 node->clone_of->clones = node->next_sibling_clone;
2284 node->next_sibling_clone = NULL;
2285 node->prev_sibling_clone = NULL;
6d1cc52c 2286 if (!node->clone_of->analyzed && !node->clone_of->clones)
7d6a1ec8 2287 {
2288 cgraph_release_function_body (node->clone_of);
2289 cgraph_node_remove_callees (node->clone_of);
2290 ipa_remove_all_references (&node->clone_of->ref_list);
2291 }
ccf4ab6b 2292 node->clone_of = NULL;
2293 bitmap_obstack_release (NULL);
2294}
2295
c596d830 2296/* If necessary, change the function declaration in the call statement
2297 associated with E so that it corresponds to the edge callee. */
2298
2299gimple
2300cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2301{
2302 tree decl = gimple_call_fndecl (e->call_stmt);
2303 gimple new_stmt;
3fd0ca33 2304 gimple_stmt_iterator gsi;
1f449108 2305#ifdef ENABLE_CHECKING
2306 struct cgraph_node *node;
2307#endif
c596d830 2308
1caef38b 2309 if (e->indirect_unknown_callee
0a31490e 2310 || decl == e->callee->decl)
c596d830 2311 return e->call_stmt;
2312
1f449108 2313#ifdef ENABLE_CHECKING
1caef38b 2314 if (decl)
2315 {
2316 node = cgraph_get_node (decl);
2317 gcc_assert (!node || !node->clone.combined_args_to_skip);
2318 }
1f449108 2319#endif
e748b31d 2320
c596d830 2321 if (cgraph_dump_file)
2322 {
2323 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2324 cgraph_node_name (e->caller), e->caller->uid,
2325 cgraph_node_name (e->callee), e->callee->uid);
2326 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e748b31d 2327 if (e->callee->clone.combined_args_to_skip)
91aba934 2328 {
2329 fprintf (cgraph_dump_file, " combined args to skip: ");
2330 dump_bitmap (cgraph_dump_file,
2331 e->callee->clone.combined_args_to_skip);
e748b31d 2332 }
c596d830 2333 }
2334
2335 if (e->callee->clone.combined_args_to_skip)
91aba934 2336 {
092cd838 2337 int lp_nr;
91aba934 2338
2339 new_stmt
2340 = gimple_call_copy_skip_args (e->call_stmt,
2341 e->callee->clone.combined_args_to_skip);
75c7f5a5 2342 gimple_call_set_fndecl (new_stmt, e->callee->decl);
91aba934 2343
2344 if (gimple_vdef (new_stmt)
2345 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2346 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2347
d4e80e2b 2348 gsi = gsi_for_stmt (e->call_stmt);
9126b675 2349 gsi_replace (&gsi, new_stmt, false);
092cd838 2350 /* We need to defer cleaning EH info on the new statement to
2351 fixup-cfg. We may not have dominator information at this point
2352 and thus would end up with unreachable blocks and have no way
2353 to communicate that we need to run CFG cleanup then. */
2354 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2355 if (lp_nr != 0)
2356 {
2357 remove_stmt_from_eh_lp (e->call_stmt);
2358 add_stmt_to_eh_lp (new_stmt, lp_nr);
2359 }
91aba934 2360 }
c596d830 2361 else
75c7f5a5 2362 {
2363 new_stmt = e->call_stmt;
2364 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2365 update_stmt (new_stmt);
2366 }
c596d830 2367
c596d830 2368 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2369
2370 if (cgraph_dump_file)
2371 {
2372 fprintf (cgraph_dump_file, " updated to:");
2373 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2374 }
2375 return new_stmt;
2376}
2377
ccf4ab6b 2378/* Once all functions from compilation unit are in memory, produce all clones
c596d830 2379 and update all calls. We might also do this on demand if we don't want to
2380 bring all functions to memory prior compilation, but current WHOPR
2381 implementation does that and it is is bit easier to keep everything right in
2382 this order. */
d2bb3f9d 2383static void
ccf4ab6b 2384cgraph_materialize_all_clones (void)
2385{
2386 struct cgraph_node *node;
2387 bool stabilized = false;
2388
2389 if (cgraph_dump_file)
2390 fprintf (cgraph_dump_file, "Materializing clones\n");
2391#ifdef ENABLE_CHECKING
2392 verify_cgraph ();
2393#endif
2394
2395 /* We can also do topological order, but number of iterations should be
2396 bounded by number of IPA passes since single IPA pass is probably not
2397 going to create clones of clones it created itself. */
2398 while (!stabilized)
2399 {
2400 stabilized = true;
2401 for (node = cgraph_nodes; node; node = node->next)
2402 {
2403 if (node->clone_of && node->decl != node->clone_of->decl
2404 && !gimple_has_body_p (node->decl))
2405 {
2406 if (gimple_has_body_p (node->clone_of->decl))
2407 {
2408 if (cgraph_dump_file)
e20422ea 2409 {
0a10fd82 2410 fprintf (cgraph_dump_file, "cloning %s to %s\n",
e20422ea 2411 cgraph_node_name (node->clone_of),
2412 cgraph_node_name (node));
2413 if (node->clone.tree_map)
2414 {
2415 unsigned int i;
2416 fprintf (cgraph_dump_file, " replace map: ");
2417 for (i = 0; i < VEC_length (ipa_replace_map_p,
2418 node->clone.tree_map);
2419 i++)
2420 {
2421 struct ipa_replace_map *replace_info;
2422 replace_info = VEC_index (ipa_replace_map_p,
2423 node->clone.tree_map,
2424 i);
2425 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2426 fprintf (cgraph_dump_file, " -> ");
2427 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2428 fprintf (cgraph_dump_file, "%s%s;",
2429 replace_info->replace_p ? "(replace)":"",
2430 replace_info->ref_p ? "(ref)":"");
2431 }
2432 fprintf (cgraph_dump_file, "\n");
2433 }
2434 if (node->clone.args_to_skip)
2435 {
2436 fprintf (cgraph_dump_file, " args_to_skip: ");
2437 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2438 }
2439 if (node->clone.args_to_skip)
2440 {
2441 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2442 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2443 }
2444 }
ccf4ab6b 2445 cgraph_materialize_clone (node);
a510bd8d 2446 stabilized = false;
ccf4ab6b 2447 }
ccf4ab6b 2448 }
2449 }
2450 }
ee3f5fc0 2451 for (node = cgraph_nodes; node; node = node->next)
2452 if (!node->analyzed && node->callees)
2453 cgraph_node_remove_callees (node);
c596d830 2454 if (cgraph_dump_file)
2455 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
947781ac 2456#ifdef ENABLE_CHECKING
2457 verify_cgraph ();
2458#endif
ccf4ab6b 2459 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2460}
2461
d2bb3f9d 2462
2463/* Perform simple optimizations based on callgraph. */
2464
2465void
2466cgraph_optimize (void)
2467{
2468 if (seen_error ())
2469 return;
2470
2471#ifdef ENABLE_CHECKING
2472 verify_cgraph ();
2473#endif
2474
2475 /* Frontend may output common variables after the unit has been finalized.
2476 It is safe to deal with them here as they are always zero initialized. */
2477 varpool_analyze_pending_decls ();
2478
2479 timevar_push (TV_CGRAPHOPT);
2480 if (pre_ipa_mem_report)
2481 {
2482 fprintf (stderr, "Memory consumption before IPA\n");
2483 dump_memory_report (false);
2484 }
2485 if (!quiet_flag)
2486 fprintf (stderr, "Performing interprocedural optimizations\n");
2487 cgraph_state = CGRAPH_STATE_IPA;
2488
2489 /* Don't run the IPA passes if there was any error or sorry messages. */
2490 if (!seen_error ())
2491 ipa_passes ();
2492
2493 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2494 if (seen_error ()
2495 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2496 {
2497 timevar_pop (TV_CGRAPHOPT);
2498 return;
2499 }
2500
2501 /* This pass remove bodies of extern inline functions we never inlined.
2502 Do this later so other IPA passes see what is really going on. */
2503 cgraph_remove_unreachable_nodes (false, dump_file);
2504 cgraph_global_info_ready = true;
2505 if (cgraph_dump_file)
2506 {
2507 fprintf (cgraph_dump_file, "Optimized ");
2508 dump_cgraph (cgraph_dump_file);
2509 dump_varpool (cgraph_dump_file);
2510 }
2511 if (post_ipa_mem_report)
2512 {
2513 fprintf (stderr, "Memory consumption after IPA\n");
2514 dump_memory_report (false);
2515 }
2516 timevar_pop (TV_CGRAPHOPT);
2517
2518 /* Output everything. */
2519 (*debug_hooks->assembly_start) ();
2520 if (!quiet_flag)
2521 fprintf (stderr, "Assembling functions:\n");
2522#ifdef ENABLE_CHECKING
2523 verify_cgraph ();
2524#endif
2525
2526 cgraph_materialize_all_clones ();
2527 bitmap_obstack_initialize (NULL);
2528 execute_ipa_pass_list (all_late_ipa_passes);
2529 cgraph_remove_unreachable_nodes (true, dump_file);
2530#ifdef ENABLE_CHECKING
2531 verify_cgraph ();
2532#endif
2533 bitmap_obstack_release (NULL);
2534 cgraph_mark_functions_to_output ();
2535 output_weakrefs ();
2536
2537 cgraph_state = CGRAPH_STATE_EXPANSION;
2538 if (!flag_toplevel_reorder)
2539 cgraph_output_in_order ();
2540 else
2541 {
2542 cgraph_output_pending_asms ();
2543
2544 cgraph_expand_all_functions ();
2545 varpool_remove_unreferenced_decls ();
2546
2547 varpool_assemble_pending_decls ();
2548 }
2549
2550 cgraph_process_new_functions ();
2551 cgraph_state = CGRAPH_STATE_FINISHED;
2552
2553 if (cgraph_dump_file)
2554 {
2555 fprintf (cgraph_dump_file, "\nFinal ");
2556 dump_cgraph (cgraph_dump_file);
2557 dump_varpool (cgraph_dump_file);
2558 }
2559#ifdef ENABLE_CHECKING
2560 verify_cgraph ();
2561 /* Double check that all inline clones are gone and that all
2562 function bodies have been released from memory. */
2563 if (!seen_error ())
2564 {
2565 struct cgraph_node *node;
2566 bool error_found = false;
2567
2568 for (node = cgraph_nodes; node; node = node->next)
2569 if (node->analyzed
2570 && (node->global.inlined_to
2571 || gimple_has_body_p (node->decl)))
2572 {
2573 error_found = true;
2574 dump_cgraph_node (stderr, node);
2575 }
2576 if (error_found)
2577 internal_error ("nodes with unreleased memory found");
2578 }
2579#endif
2580}
2581
2582
2583/* Analyze the whole compilation unit once it is parsed completely. */
2584
2585void
2586cgraph_finalize_compilation_unit (void)
2587{
2588 timevar_push (TV_CGRAPH);
2589
2590 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2591 if (flag_lto)
2592 lto_streamer_hooks_init ();
2593
2594 /* If we're here there's no current function anymore. Some frontends
2595 are lazy in clearing these. */
2596 current_function_decl = NULL;
2597 set_cfun (NULL);
2598
2599 /* Do not skip analyzing the functions if there were errors, we
2600 miss diagnostics for following functions otherwise. */
2601
2602 /* Emit size functions we didn't inline. */
2603 finalize_size_functions ();
2604
2605 /* Mark alias targets necessary and emit diagnostics. */
2606 finish_aliases_1 ();
2607 handle_alias_pairs ();
2608
2609 if (!quiet_flag)
2610 {
2611 fprintf (stderr, "\nAnalyzing compilation unit\n");
2612 fflush (stderr);
2613 }
2614
2615 if (flag_dump_passes)
2616 dump_passes ();
2617
2618 /* Gimplify and lower all functions, compute reachability and
2619 remove unreachable nodes. */
2620 cgraph_analyze_functions ();
2621
2622 /* Mark alias targets necessary and emit diagnostics. */
2623 finish_aliases_1 ();
2624 handle_alias_pairs ();
2625
2626 /* Gimplify and lower thunks. */
2627 cgraph_analyze_functions ();
2628
2629 /* Finally drive the pass manager. */
2630 cgraph_optimize ();
2631
2632 timevar_pop (TV_CGRAPH);
2633}
2634
2635
a861fe52 2636#include "gt-cgraphunit.h"