]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
gimplify.c (gimplify_modify_expr): When assigning to volatiles, copy the src value...
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
a418679d 1/* Callgraph based interprocedural optimizations.
566f27e4 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
cac67c08 3 Free Software Foundation, Inc.
1c4a429a
JH
4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1c4a429a
JH
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
1c4a429a 21
18c6ada9 22/* This module implements main driver of compilation process as well as
a418679d 23 few basic interprocedural optimizers.
18c6ada9
JH
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
efe75b6f
JH
35 (There is one exception needed for implementing GCC extern inline
36 function.)
18c6ada9 37
8a4a83ed 38 - varpool_finalize_variable
18c6ada9 39
1ae58c30 40 This function has same behavior as the above but is used for static
18c6ada9
JH
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
efe75b6f
JH
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
18c6ada9 47
7e8b322a 48 In the the call-graph construction and local function
18c6ada9
JH
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
efe75b6f
JH
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
18c6ada9
JH
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
18c6ada9 62 - cgraph_mark_needed_node
8a4a83ed 63 - varpool_mark_needed_node
18c6ada9 64
efe75b6f
JH
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
dbb23ff7 69 used by C++ frontend to explicitly mark the keyed methods.
18c6ada9
JH
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
7e8b322a 80 Analyzing of all functions is deferred
18c6ada9
JH
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
e1990f69 91 The intra-procedural information is produced and its existence
18c6ada9
JH
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
7e8b322a 106 where reference has been optimized out. */
9b3e897d 107
6674a6ce 108
1c4a429a
JH
109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
c9b9aa64 114#include "rtl.h"
6674a6ce 115#include "tree-flow.h"
1c4a429a
JH
116#include "tree-inline.h"
117#include "langhooks.h"
0c58f841 118#include "pointer-set.h"
1c4a429a
JH
119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
dafc5b82 125#include "diagnostic.h"
cf835838
JM
126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
a194aa56 128#include "timevar.h"
b58b1157
JH
129#include "params.h"
130#include "fibheap.h"
dc0bfe6a 131#include "intl.h"
902edd36 132#include "function.h"
57fb5341 133#include "ipa-prop.h"
726a989a
RB
134#include "gimple.h"
135#include "tree-iterator.h"
b4861090 136#include "tree-pass.h"
a406865a 137#include "tree-dump.h"
cd9c7bd2 138#include "output.h"
3baf459d 139#include "coverage.h"
090fa0ab 140#include "plugin.h"
b58b1157 141
a20af5b8 142static void cgraph_expand_all_functions (void);
db0e878d
AJ
143static void cgraph_mark_functions_to_output (void);
144static void cgraph_expand_function (struct cgraph_node *);
21c4a6a7 145static void cgraph_output_pending_asms (void);
a406865a 146static void cgraph_analyze_function (struct cgraph_node *);
7dff32e6 147
0a5fa5a1 148FILE *cgraph_dump_file;
9b3e897d 149
873c7164
MM
150/* A vector of FUNCTION_DECLs declared as static constructors. */
151static GTY (()) VEC(tree, gc) *static_ctors;
152/* A vector of FUNCTION_DECLs declared as static destructors. */
153static GTY (()) VEC(tree, gc) *static_dtors;
7be82279 154
6744a6ab
JH
155/* Used for vtable lookup in thunk adjusting. */
156static GTY (()) tree vtable_entry_type;
157
7be82279 158/* When target does not have ctors and dtors, we call all constructor
c80b4100 159 and destructor by special initialization/destruction function
b8698a0f
L
160 recognized by collect2.
161
7be82279
JH
162 When we are going to build this function, collect all constructors and
163 destructors and turn them into normal functions. */
164
165static void
166record_cdtor_fn (tree fndecl)
167{
bd85fcee
JH
168 struct cgraph_node *node;
169 if (targetm.have_ctors_dtors
170 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
171 && !DECL_STATIC_DESTRUCTOR (fndecl)))
7be82279
JH
172 return;
173
174 if (DECL_STATIC_CONSTRUCTOR (fndecl))
175 {
873c7164 176 VEC_safe_push (tree, gc, static_ctors, fndecl);
7be82279 177 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
7be82279
JH
178 }
179 if (DECL_STATIC_DESTRUCTOR (fndecl))
180 {
873c7164 181 VEC_safe_push (tree, gc, static_dtors, fndecl);
7be82279 182 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
7be82279 183 }
bd85fcee
JH
184 node = cgraph_node (fndecl);
185 node->local.disregard_inline_limits = 1;
186 cgraph_mark_reachable_node (node);
7be82279
JH
187}
188
873c7164
MM
189/* Define global constructors/destructor functions for the CDTORS, of
190 which they are LEN. The CDTORS are sorted by initialization
191 priority. If CTOR_P is true, these are constructors; otherwise,
192 they are destructors. */
193
7be82279 194static void
873c7164 195build_cdtor (bool ctor_p, tree *cdtors, size_t len)
7be82279 196{
873c7164 197 size_t i;
7be82279 198
873c7164
MM
199 i = 0;
200 while (i < len)
201 {
202 tree body;
203 tree fn;
204 priority_type priority;
205
206 priority = 0;
207 body = NULL_TREE;
208 /* Find the next batch of constructors/destructors with the same
209 initialization priority. */
210 do
211 {
212 priority_type p;
3bb06db4 213 tree call;
873c7164
MM
214 fn = cdtors[i];
215 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
216 if (!body)
217 priority = p;
218 else if (p != priority)
219 break;
3bb06db4
NF
220 call = build_call_expr (fn, 0);
221 append_to_statement_list (call, &body);
873c7164
MM
222 ++i;
223 }
224 while (i < len);
225 gcc_assert (body != NULL_TREE);
226 /* Generate a function to call all the function of like
227 priority. */
228 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
229 }
230}
231
232/* Comparison function for qsort. P1 and P2 are actually of type
233 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
234 used to determine the sort order. */
7be82279 235
873c7164
MM
236static int
237compare_ctor (const void *p1, const void *p2)
238{
239 tree f1;
240 tree f2;
241 int priority1;
242 int priority2;
243
244 f1 = *(const tree *)p1;
245 f2 = *(const tree *)p2;
246 priority1 = DECL_INIT_PRIORITY (f1);
247 priority2 = DECL_INIT_PRIORITY (f2);
b8698a0f 248
873c7164
MM
249 if (priority1 < priority2)
250 return -1;
251 else if (priority1 > priority2)
252 return 1;
253 else
254 /* Ensure a stable sort. */
255 return (const tree *)p1 - (const tree *)p2;
256}
257
258/* Comparison function for qsort. P1 and P2 are actually of type
259 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
260 used to determine the sort order. */
7be82279 261
873c7164
MM
262static int
263compare_dtor (const void *p1, const void *p2)
264{
265 tree f1;
266 tree f2;
267 int priority1;
268 int priority2;
269
270 f1 = *(const tree *)p1;
271 f2 = *(const tree *)p2;
272 priority1 = DECL_FINI_PRIORITY (f1);
273 priority2 = DECL_FINI_PRIORITY (f2);
b8698a0f 274
873c7164
MM
275 if (priority1 < priority2)
276 return -1;
277 else if (priority1 > priority2)
278 return 1;
279 else
280 /* Ensure a stable sort. */
281 return (const tree *)p1 - (const tree *)p2;
7be82279
JH
282}
283
284/* Generate functions to call static constructors and destructors
285 for targets that do not support .ctors/.dtors sections. These
286 functions have magic names which are detected by collect2. */
287
288static void
289cgraph_build_cdtor_fns (void)
290{
873c7164 291 if (!VEC_empty (tree, static_ctors))
7be82279 292 {
873c7164
MM
293 gcc_assert (!targetm.have_ctors_dtors);
294 qsort (VEC_address (tree, static_ctors),
b8698a0f 295 VEC_length (tree, static_ctors),
873c7164
MM
296 sizeof (tree),
297 compare_ctor);
298 build_cdtor (/*ctor_p=*/true,
299 VEC_address (tree, static_ctors),
b8698a0f 300 VEC_length (tree, static_ctors));
873c7164 301 VEC_truncate (tree, static_ctors, 0);
7be82279 302 }
873c7164
MM
303
304 if (!VEC_empty (tree, static_dtors))
7be82279 305 {
873c7164
MM
306 gcc_assert (!targetm.have_ctors_dtors);
307 qsort (VEC_address (tree, static_dtors),
b8698a0f 308 VEC_length (tree, static_dtors),
873c7164
MM
309 sizeof (tree),
310 compare_dtor);
311 build_cdtor (/*ctor_p=*/false,
312 VEC_address (tree, static_dtors),
b8698a0f 313 VEC_length (tree, static_dtors));
873c7164 314 VEC_truncate (tree, static_dtors, 0);
7be82279
JH
315 }
316}
317
8dafba3c
RH
318/* Determine if function DECL is needed. That is, visible to something
319 either outside this translation unit, something magic in the system
7e8b322a 320 configury. */
8dafba3c 321
d7f09764
DN
322bool
323cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
8dafba3c 324{
e7d6beb0 325 /* If the user told us it is used, then it must be so. */
386b46cf
JH
326 if (node->local.externally_visible)
327 return true;
328
e7d6beb0
JH
329 /* ??? If the assembler name is set by hand, it is possible to assemble
330 the name later after finalizing the function and the fact is noticed
331 in assemble_name then. This is arguably a bug. */
332 if (DECL_ASSEMBLER_NAME_SET_P (decl)
333 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
334 return true;
335
a1d31187
JH
336 /* With -fkeep-inline-functions we are keeping all inline functions except
337 for extern inline ones. */
338 if (flag_keep_inline_functions
339 && DECL_DECLARED_INLINE_P (decl)
b521dcbe
JH
340 && !DECL_EXTERNAL (decl)
341 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
a1d31187
JH
342 return true;
343
8dafba3c
RH
344 /* If we decided it was needed before, but at the time we didn't have
345 the body of the function available, then it's still needed. We have
346 to go back and re-check its dependencies now. */
347 if (node->needed)
348 return true;
349
350 /* Externally visible functions must be output. The exception is
c22cacf3 351 COMDAT functions that must be output only when they are needed.
04f77d0f
JH
352
353 When not optimizing, also output the static functions. (see
46f5f7f2 354 PR24561), but don't do so for always_inline functions, functions
c5d01958 355 declared inline and nested functions. These were optimized out
b633db7b 356 in the original implementation and it is unclear whether we want
6fc0bb99 357 to change the behavior here. */
5d342ef9 358 if (((TREE_PUBLIC (decl)
c5d01958
EB
359 || (!optimize
360 && !node->local.disregard_inline_limits
b633db7b 361 && !DECL_DECLARED_INLINE_P (decl)
c5d01958
EB
362 && !(DECL_CONTEXT (decl)
363 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
b20996ff
JH
364 && !flag_whole_program
365 && !flag_lto
366 && !flag_whopr)
ce91e74c 367 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
8dafba3c
RH
368 return true;
369
370 /* Constructors and destructors are reachable from the runtime by
371 some mechanism. */
372 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
373 return true;
374
8dafba3c
RH
375 return false;
376}
377
d60ab196 378/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
f45e0ad1
JH
379 functions into callgraph in a way so they look like ordinary reachable
380 functions inserted into callgraph already at construction time. */
381
382bool
383cgraph_process_new_functions (void)
384{
385 bool output = false;
386 tree fndecl;
387 struct cgraph_node *node;
388
2942c502 389 varpool_analyze_pending_decls ();
f45e0ad1
JH
390 /* Note that this queue may grow as its being processed, as the new
391 functions may generate new ones. */
392 while (cgraph_new_nodes)
393 {
394 node = cgraph_new_nodes;
395 fndecl = node->decl;
396 cgraph_new_nodes = cgraph_new_nodes->next_needed;
397 switch (cgraph_state)
398 {
399 case CGRAPH_STATE_CONSTRUCTION:
400 /* At construction time we just need to finalize function and move
401 it into reachable functions list. */
402
403 node->next_needed = NULL;
404 cgraph_finalize_function (fndecl, false);
405 cgraph_mark_reachable_node (node);
406 output = true;
407 break;
408
409 case CGRAPH_STATE_IPA:
7a388ee4 410 case CGRAPH_STATE_IPA_SSA:
f45e0ad1
JH
411 /* When IPA optimization already started, do all essential
412 transformations that has been already performed on the whole
413 cgraph but not on this function. */
414
726a989a 415 gimple_register_cfg_hooks ();
f45e0ad1
JH
416 if (!node->analyzed)
417 cgraph_analyze_function (node);
418 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
419 current_function_decl = fndecl;
1920df6c 420 compute_inline_parameters (node);
7a388ee4
JH
421 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
422 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
423 /* When not optimizing, be sure we run early local passes anyway
424 to expand OMP. */
425 || !optimize)
8ddbbcae 426 execute_pass_list (pass_early_local_passes.pass.sub);
f45e0ad1
JH
427 free_dominance_info (CDI_POST_DOMINATORS);
428 free_dominance_info (CDI_DOMINATORS);
429 pop_cfun ();
430 current_function_decl = NULL;
431 break;
432
433 case CGRAPH_STATE_EXPANSION:
434 /* Functions created during expansion shall be compiled
435 directly. */
257eb6e3 436 node->process = 0;
f45e0ad1
JH
437 cgraph_expand_function (node);
438 break;
439
440 default:
441 gcc_unreachable ();
442 break;
443 }
129a37fc 444 cgraph_call_function_insertion_hooks (node);
2942c502 445 varpool_analyze_pending_decls ();
f45e0ad1
JH
446 }
447 return output;
448}
449
d71cc23f
JH
450/* As an GCC extension we allow redefinition of the function. The
451 semantics when both copies of bodies differ is not well defined.
452 We replace the old body with new body so in unit at a time mode
453 we always use new body, while in normal mode we may end up with
454 old body inlined into some functions and new body expanded and
455 inlined in others.
456
457 ??? It may make more sense to use one body for inlining and other
458 body for expanding the function but this is difficult to do. */
459
460static void
461cgraph_reset_node (struct cgraph_node *node)
462{
257eb6e3 463 /* If node->process is set, then we have already begun whole-unit analysis.
7e8b322a
JH
464 This is *not* testing for whether we've already emitted the function.
465 That case can be sort-of legitimately seen with real function redefinition
466 errors. I would argue that the front end should never present us with
467 such a case, but don't enforce that for now. */
257eb6e3 468 gcc_assert (!node->process);
d71cc23f
JH
469
470 /* Reset our data structures so we can analyze the function again. */
471 memset (&node->local, 0, sizeof (node->local));
472 memset (&node->global, 0, sizeof (node->global));
473 memset (&node->rtl, 0, sizeof (node->rtl));
474 node->analyzed = false;
475 node->local.redefined_extern_inline = true;
476 node->local.finalized = false;
477
d71cc23f
JH
478 cgraph_node_remove_callees (node);
479
480 /* We may need to re-queue the node for assembling in case
b86b3ea3
RG
481 we already proceeded it and ignored as not needed or got
482 a re-declaration in IMA mode. */
483 if (node->reachable)
d71cc23f
JH
484 {
485 struct cgraph_node *n;
486
487 for (n = cgraph_nodes_queue; n; n = n->next_needed)
488 if (n == node)
489 break;
490 if (!n)
491 node->reachable = 0;
492 }
493}
d853a20e 494
953ff289
DN
495static void
496cgraph_lower_function (struct cgraph_node *node)
497{
498 if (node->lowered)
499 return;
a406865a
RG
500
501 if (node->nested)
502 lower_nested_functions (node->decl);
503 gcc_assert (!node->nested);
504
953ff289
DN
505 tree_lowering_passes (node->decl);
506 node->lowered = true;
507}
508
6b00c969
RH
509/* DECL has been parsed. Take it, queue it, compile it at the whim of the
510 logic in effect. If NESTED is true, then our caller cannot stand to have
511 the garbage collector run at the moment. We would need to either create
512 a new GC context, or just not compile right now. */
1c4a429a
JH
513
514void
6b00c969 515cgraph_finalize_function (tree decl, bool nested)
1c4a429a
JH
516{
517 struct cgraph_node *node = cgraph_node (decl);
518
d853a20e 519 if (node->local.finalized)
d71cc23f 520 cgraph_reset_node (node);
6b00c969 521
6bad2617 522 node->pid = cgraph_max_pid ++;
d853a20e 523 notice_global_symbol (decl);
f6981e16 524 node->local.finalized = true;
e21aff8a 525 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
d88e5c37 526 node->finalized_by_frontend = true;
7be82279 527 record_cdtor_fn (node->decl);
1c4a429a 528
d7f09764 529 if (cgraph_decide_is_function_needed (node, decl))
8dafba3c
RH
530 cgraph_mark_needed_node (node);
531
ff5c4582 532 /* Since we reclaim unreachable nodes at the end of every language
e7d6beb0
JH
533 level unit, we need to be conservative about possible entry points
534 there. */
ce91e74c 535 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
e7d6beb0
JH
536 cgraph_mark_reachable_node (node);
537
8dafba3c 538 /* If we've not yet emitted decl, tell the debug info about it. */
6b00c969 539 if (!TREE_ASM_WRITTEN (decl))
8dafba3c 540 (*debug_hooks->deferred_inline_function) (decl);
d173e685 541
902edd36
JH
542 /* Possibly warn about unused parameters. */
543 if (warn_unused_parameter)
544 do_warn_unused_parameter (decl);
7e8b322a
JH
545
546 if (!nested)
547 ggc_collect ();
1c4a429a
JH
548}
549
f0c882ab
JH
550/* C99 extern inline keywords allow changing of declaration after function
551 has been finalized. We need to re-decide if we want to mark the function as
552 needed then. */
553
554void
555cgraph_mark_if_needed (tree decl)
556{
557 struct cgraph_node *node = cgraph_node (decl);
d7f09764 558 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
f0c882ab
JH
559 cgraph_mark_needed_node (node);
560}
561
753d358d 562#ifdef ENABLE_CHECKING
9187e02d
JH
563/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
564static bool
565clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
566{
567 while (node != node2 && node2)
568 node2 = node2->clone_of;
569 return node2 != NULL;
570}
753d358d 571#endif
9187e02d 572
02ec6988
MJ
573/* Verify edge E count and frequency. */
574
575static bool
576verify_edge_count_and_frequency (struct cgraph_edge *e)
577{
578 bool error_found = false;
579 if (e->count < 0)
580 {
581 error ("caller edge count is negative");
582 error_found = true;
583 }
584 if (e->frequency < 0)
585 {
586 error ("caller edge frequency is negative");
587 error_found = true;
588 }
589 if (e->frequency > CGRAPH_FREQ_MAX)
590 {
591 error ("caller edge frequency is too large");
592 error_found = true;
593 }
594 if (gimple_has_body_p (e->caller->decl)
595 && !e->caller->global.inlined_to
596 && (e->frequency
597 != compute_call_stmt_bb_frequency (e->caller->decl,
598 gimple_bb (e->call_stmt))))
599 {
600 error ("caller edge frequency %i does not match BB freqency %i",
601 e->frequency,
602 compute_call_stmt_bb_frequency (e->caller->decl,
603 gimple_bb (e->call_stmt)));
604 error_found = true;
605 }
606 return error_found;
607}
608
18c6ada9 609/* Verify cgraph nodes of given cgraph node. */
24e47c76 610DEBUG_FUNCTION void
18c6ada9
JH
611verify_cgraph_node (struct cgraph_node *node)
612{
613 struct cgraph_edge *e;
e21aff8a 614 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
2bafad93 615 struct function *saved_cfun = cfun;
e21aff8a 616 basic_block this_block;
726a989a 617 gimple_stmt_iterator gsi;
e0704a46 618 bool error_found = false;
18c6ada9 619
1da2ed5f 620 if (seen_error ())
5771bd91
RG
621 return;
622
18c6ada9 623 timevar_push (TV_CGRAPH_VERIFY);
2bafad93
JJ
624 /* debug_generic_stmt needs correct cfun */
625 set_cfun (this_cfun);
18c6ada9
JH
626 for (e = node->callees; e; e = e->next_callee)
627 if (e->aux)
628 {
ab532386 629 error ("aux field set for edge %s->%s",
4f1e4960
JM
630 identifier_to_locale (cgraph_node_name (e->caller)),
631 identifier_to_locale (cgraph_node_name (e->callee)));
18c6ada9
JH
632 error_found = true;
633 }
06191a23
JH
634 if (node->count < 0)
635 {
636 error ("Execution count is negative");
637 error_found = true;
638 }
b20996ff
JH
639 if (node->global.inlined_to && node->local.externally_visible)
640 {
641 error ("Externally visible inline clone");
642 error_found = true;
643 }
644 if (node->global.inlined_to && node->address_taken)
645 {
646 error ("Inline clone with address taken");
647 error_found = true;
648 }
649 if (node->global.inlined_to && node->needed)
650 {
651 error ("Inline clone is needed");
652 error_found = true;
653 }
e33c6cd6
MJ
654 for (e = node->indirect_calls; e; e = e->next_callee)
655 {
656 if (e->aux)
657 {
658 error ("aux field set for indirect edge from %s",
659 identifier_to_locale (cgraph_node_name (e->caller)));
660 error_found = true;
661 }
662 if (!e->indirect_unknown_callee
663 || !e->indirect_info)
664 {
665 error ("An indirect edge from %s is not marked as indirect or has "
666 "associated indirect_info, the corresponding statement is: ",
667 identifier_to_locale (cgraph_node_name (e->caller)));
668 debug_gimple_stmt (e->call_stmt);
669 error_found = true;
670 }
671 }
18c6ada9
JH
672 for (e = node->callers; e; e = e->next_caller)
673 {
02ec6988
MJ
674 if (verify_edge_count_and_frequency (e))
675 error_found = true;
18c6ada9
JH
676 if (!e->inline_failed)
677 {
678 if (node->global.inlined_to
679 != (e->caller->global.inlined_to
680 ? e->caller->global.inlined_to : e->caller))
681 {
ab532386 682 error ("inlined_to pointer is wrong");
18c6ada9
JH
683 error_found = true;
684 }
685 if (node->callers->next_caller)
686 {
ab532386 687 error ("multiple inline callers");
18c6ada9
JH
688 error_found = true;
689 }
690 }
691 else
692 if (node->global.inlined_to)
693 {
ab532386 694 error ("inlined_to pointer set for noninline callers");
18c6ada9
JH
695 error_found = true;
696 }
697 }
02ec6988
MJ
698 for (e = node->indirect_calls; e; e = e->next_callee)
699 if (verify_edge_count_and_frequency (e))
700 error_found = true;
18c6ada9
JH
701 if (!node->callers && node->global.inlined_to)
702 {
95a52ebb 703 error ("inlined_to pointer is set but no predecessors found");
18c6ada9
JH
704 error_found = true;
705 }
706 if (node->global.inlined_to == node)
707 {
ab532386 708 error ("inlined_to pointer refers to itself");
18c6ada9
JH
709 error_found = true;
710 }
711
69fb1284 712 if (!cgraph_node (node->decl))
18c6ada9 713 {
69fb1284 714 error ("node not found in cgraph_hash");
18c6ada9
JH
715 error_found = true;
716 }
c22cacf3 717
9187e02d
JH
718 if (node->clone_of)
719 {
720 struct cgraph_node *n;
721 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
722 if (n == node)
723 break;
724 if (!n)
725 {
726 error ("node has wrong clone_of");
727 error_found = true;
728 }
729 }
730 if (node->clones)
731 {
732 struct cgraph_node *n;
733 for (n = node->clones; n; n = n->next_sibling_clone)
734 if (n->clone_of != node)
735 break;
736 if (n)
737 {
738 error ("node has wrong clone list");
739 error_found = true;
740 }
741 }
742 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
743 {
744 error ("node is in clone list but it is not clone");
745 error_found = true;
746 }
747 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
748 {
749 error ("node has wrong prev_clone pointer");
750 error_found = true;
751 }
752 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
753 {
754 error ("double linked list of clones corrupted");
755 error_found = true;
756 }
78eaf7bf
MJ
757 if (node->same_comdat_group)
758 {
759 struct cgraph_node *n = node->same_comdat_group;
760
761 if (!DECL_ONE_ONLY (node->decl))
762 {
763 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
764 error_found = true;
765 }
766 if (n == node)
767 {
768 error ("node is alone in a comdat group");
769 error_found = true;
770 }
771 do
772 {
773 if (!n->same_comdat_group)
774 {
775 error ("same_comdat_group is not a circular list");
776 error_found = true;
777 break;
778 }
779 n = n->same_comdat_group;
780 }
781 while (n != node);
782 }
9187e02d
JH
783
784 if (node->analyzed && gimple_has_body_p (node->decl)
726a989a 785 && !TREE_ASM_WRITTEN (node->decl)
d7f09764
DN
786 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
787 && !flag_wpa)
18c6ada9 788 {
e21aff8a
SB
789 if (this_cfun->cfg)
790 {
791 /* The nodes we're interested in are never shared, so walk
792 the tree ignoring duplicates. */
2dee695b 793 struct pointer_set_t *visited_nodes = pointer_set_create ();
e21aff8a
SB
794 /* Reach the trees by walking over the CFG, and note the
795 enclosing basic-blocks in the call edges. */
796 FOR_EACH_BB_FN (this_block, this_cfun)
726a989a
RB
797 for (gsi = gsi_start_bb (this_block);
798 !gsi_end_p (gsi);
799 gsi_next (&gsi))
e0704a46 800 {
726a989a 801 gimple stmt = gsi_stmt (gsi);
e33c6cd6 802 if (is_gimple_call (stmt))
e0704a46
JH
803 {
804 struct cgraph_edge *e = cgraph_edge (node, stmt);
e33c6cd6 805 tree decl = gimple_call_fndecl (stmt);
e0704a46
JH
806 if (e)
807 {
808 if (e->aux)
809 {
ab532386 810 error ("shared call_stmt:");
726a989a 811 debug_gimple_stmt (stmt);
e0704a46
JH
812 error_found = true;
813 }
e33c6cd6 814 if (!e->indirect_unknown_callee)
6744a6ab 815 {
e33c6cd6
MJ
816 if (e->callee->same_body_alias)
817 {
818 error ("edge points to same body alias:");
819 debug_tree (e->callee->decl);
820 error_found = true;
821 }
e466e2ce
JH
822#ifdef ENABLE_CHECKING
823 else if (!e->callee->global.inlined_to
e33c6cd6 824 && decl
e466e2ce
JH
825 && cgraph_get_node (decl)
826 && (e->callee->former_clone_of
827 != cgraph_get_node (decl)->decl)
e33c6cd6
MJ
828 && !clone_of_p (cgraph_node (decl),
829 e->callee))
830 {
831 error ("edge points to wrong declaration:");
832 debug_tree (e->callee->decl);
833 fprintf (stderr," Instead of:");
834 debug_tree (decl);
835 error_found = true;
836 }
e466e2ce 837#endif
6744a6ab 838 }
e33c6cd6 839 else if (decl)
e0704a46 840 {
e33c6cd6
MJ
841 error ("an indirect edge with unknown callee "
842 "corresponding to a call_stmt with "
843 "a known declaration:");
47cb0d7d 844 error_found = true;
e33c6cd6 845 debug_gimple_stmt (e->call_stmt);
e0704a46
JH
846 }
847 e->aux = (void *)1;
848 }
e33c6cd6 849 else if (decl)
e0704a46 850 {
ab532386 851 error ("missing callgraph edge for call stmt:");
726a989a 852 debug_gimple_stmt (stmt);
e0704a46
JH
853 error_found = true;
854 }
855 }
856 }
e21aff8a 857 pointer_set_destroy (visited_nodes);
e21aff8a
SB
858 }
859 else
860 /* No CFG available?! */
861 gcc_unreachable ();
862
18c6ada9
JH
863 for (e = node->callees; e; e = e->next_callee)
864 {
e33c6cd6 865 if (!e->aux)
18c6ada9 866 {
ab532386 867 error ("edge %s->%s has no corresponding call_stmt",
4f1e4960
JM
868 identifier_to_locale (cgraph_node_name (e->caller)),
869 identifier_to_locale (cgraph_node_name (e->callee)));
726a989a 870 debug_gimple_stmt (e->call_stmt);
18c6ada9
JH
871 error_found = true;
872 }
873 e->aux = 0;
874 }
e33c6cd6
MJ
875 for (e = node->indirect_calls; e; e = e->next_callee)
876 {
877 if (!e->aux)
878 {
879 error ("an indirect edge from %s has no corresponding call_stmt",
880 identifier_to_locale (cgraph_node_name (e->caller)));
881 debug_gimple_stmt (e->call_stmt);
882 error_found = true;
883 }
884 e->aux = 0;
885 }
18c6ada9
JH
886 }
887 if (error_found)
888 {
889 dump_cgraph_node (stderr, node);
ab532386 890 internal_error ("verify_cgraph_node failed");
18c6ada9 891 }
2bafad93 892 set_cfun (saved_cfun);
18c6ada9
JH
893 timevar_pop (TV_CGRAPH_VERIFY);
894}
895
896/* Verify whole cgraph structure. */
24e47c76 897DEBUG_FUNCTION void
18c6ada9
JH
898verify_cgraph (void)
899{
900 struct cgraph_node *node;
901
1da2ed5f 902 if (seen_error ())
89480522
JH
903 return;
904
18c6ada9
JH
905 for (node = cgraph_nodes; node; node = node->next)
906 verify_cgraph_node (node);
907}
908
474eccc6
ILT
909/* Output all asm statements we have stored up to be output. */
910
911static void
912cgraph_output_pending_asms (void)
913{
914 struct cgraph_asm_node *can;
915
1da2ed5f 916 if (seen_error ())
474eccc6
ILT
917 return;
918
919 for (can = cgraph_asm_nodes; can; can = can->next)
920 assemble_asm (can->asm_str);
921 cgraph_asm_nodes = NULL;
922}
923
e767b5be 924/* Analyze the function scheduled to be output. */
a406865a 925static void
e767b5be
JH
926cgraph_analyze_function (struct cgraph_node *node)
927{
a406865a 928 tree save = current_function_decl;
e767b5be
JH
929 tree decl = node->decl;
930
25c84396 931 current_function_decl = decl;
e21aff8a 932 push_cfun (DECL_STRUCT_FUNCTION (decl));
a406865a 933
0e0a1359
MJ
934 assign_assembler_name_if_neeeded (node->decl);
935
a406865a
RG
936 /* Make sure to gimplify bodies only once. During analyzing a
937 function we lower it, which will require gimplified nested
938 functions, so we can end up here with an already gimplified
939 body. */
940 if (!gimple_body (decl))
941 gimplify_function_tree (decl);
942 dump_function (TDI_generic, decl);
943
e21aff8a 944 cgraph_lower_function (node);
6a84c098 945 node->analyzed = true;
e767b5be 946
e21aff8a 947 pop_cfun ();
a406865a 948 current_function_decl = save;
e767b5be
JH
949}
950
386b46cf
JH
951/* Look for externally_visible and used attributes and mark cgraph nodes
952 accordingly.
953
954 We cannot mark the nodes at the point the attributes are processed (in
955 handle_*_attribute) because the copy of the declarations available at that
956 point may not be canonical. For example, in:
957
958 void f();
959 void f() __attribute__((used));
960
961 the declaration we see in handle_used_attribute will be the second
962 declaration -- but the front end will subsequently merge that declaration
963 with the original declaration and discard the second declaration.
964
965 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
966
967 void f() {}
968 void f() __attribute__((externally_visible));
969
970 is valid.
971
972 So, we walk the nodes at the end of the translation unit, applying the
973 attributes at that point. */
974
975static void
976process_function_and_variable_attributes (struct cgraph_node *first,
8a4a83ed 977 struct varpool_node *first_var)
386b46cf
JH
978{
979 struct cgraph_node *node;
8a4a83ed 980 struct varpool_node *vnode;
386b46cf
JH
981
982 for (node = cgraph_nodes; node != first; node = node->next)
983 {
984 tree decl = node->decl;
b42186f1 985 if (DECL_PRESERVE_P (decl))
152464d2 986 cgraph_mark_needed_node (node);
386b46cf
JH
987 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
988 {
343d4b27 989 if (! TREE_PUBLIC (node->decl))
c5d75364
MLI
990 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
991 "%<externally_visible%>"
992 " attribute have effect only on public objects");
b20996ff
JH
993 else if (node->local.finalized)
994 cgraph_mark_needed_node (node);
386b46cf
JH
995 }
996 }
8a4a83ed 997 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
386b46cf
JH
998 {
999 tree decl = vnode->decl;
b42186f1 1000 if (DECL_PRESERVE_P (decl))
386b46cf 1001 {
a8289259 1002 vnode->force_output = true;
386b46cf 1003 if (vnode->finalized)
8a4a83ed 1004 varpool_mark_needed_node (vnode);
386b46cf
JH
1005 }
1006 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
1007 {
343d4b27 1008 if (! TREE_PUBLIC (vnode->decl))
c5d75364
MLI
1009 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1010 "%<externally_visible%>"
1011 " attribute have effect only on public objects");
b20996ff
JH
1012 else if (vnode->finalized)
1013 varpool_mark_needed_node (vnode);
386b46cf
JH
1014 }
1015 }
1016}
1017
151e6f24
JH
1018/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1019 each reachable functions) and build cgraph.
1020 The function can be called multiple times after inserting new nodes
88512ba0 1021 into beginning of queue. Just the new part of queue is re-scanned then. */
1c4a429a 1022
151e6f24
JH
1023static void
1024cgraph_analyze_functions (void)
1c4a429a 1025{
cd9c7bd2 1026 /* Keep track of already processed nodes when called multiple times for
aabcd309 1027 intermodule optimization. */
cd9c7bd2 1028 static struct cgraph_node *first_analyzed;
61e00a5e 1029 struct cgraph_node *first_processed = first_analyzed;
8a4a83ed 1030 static struct varpool_node *first_analyzed_var;
151e6f24 1031 struct cgraph_node *node, *next;
1c4a429a 1032
61e00a5e
JH
1033 process_function_and_variable_attributes (first_processed,
1034 first_analyzed_var);
1035 first_processed = cgraph_nodes;
8a4a83ed
JH
1036 first_analyzed_var = varpool_nodes;
1037 varpool_analyze_pending_decls ();
a194aa56 1038 if (cgraph_dump_file)
1c4a429a 1039 {
7d82fe7c 1040 fprintf (cgraph_dump_file, "Initial entry points:");
cd9c7bd2 1041 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
39ecc018 1042 if (node->needed)
a194aa56
JH
1043 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1044 fprintf (cgraph_dump_file, "\n");
1c4a429a 1045 }
151e6f24 1046 cgraph_process_new_functions ();
1c4a429a 1047
7660e67e
SB
1048 /* Propagate reachability flag and lower representation of all reachable
1049 functions. In the future, lowering will introduce new functions and
1050 new entry points on the way (by template instantiation and virtual
1051 method table generation for instance). */
1668aabc 1052 while (cgraph_nodes_queue)
1c4a429a 1053 {
e767b5be 1054 struct cgraph_edge *edge;
1668aabc
JH
1055 tree decl = cgraph_nodes_queue->decl;
1056
1057 node = cgraph_nodes_queue;
8bd87c4e 1058 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
18c6ada9 1059 node->next_needed = NULL;
1c4a429a 1060
cd4dea62 1061 /* ??? It is possible to create extern inline function and later using
9d203871 1062 weak alias attribute to kill its body. See
cd4dea62 1063 gcc.c-torture/compile/20011119-1.c */
726a989a 1064 if (!DECL_STRUCT_FUNCTION (decl))
d71cc23f
JH
1065 {
1066 cgraph_reset_node (node);
1067 continue;
1068 }
cd4dea62 1069
d7f09764
DN
1070 if (!node->analyzed)
1071 cgraph_analyze_function (node);
8dafba3c 1072
1c4a429a 1073 for (edge = node->callees; edge; edge = edge->next_callee)
e767b5be 1074 if (!edge->callee->reachable)
8dafba3c
RH
1075 cgraph_mark_reachable_node (edge->callee);
1076
b66887e4
JJ
1077 if (node->same_comdat_group)
1078 {
1079 for (next = node->same_comdat_group;
1080 next != node;
1081 next = next->same_comdat_group)
1082 cgraph_mark_reachable_node (next);
1083 }
1084
6b20f353
DS
1085 /* If decl is a clone of an abstract function, mark that abstract
1086 function so that we don't release its body. The DECL_INITIAL() of that
1087 abstract function declaration will be later needed to output debug info. */
1088 if (DECL_ABSTRACT_ORIGIN (decl))
1089 {
1090 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
1091 origin_node->abstract_and_needed = true;
1092 }
1093
61e00a5e
JH
1094 /* We finalize local static variables during constructing callgraph
1095 edges. Process their attributes too. */
1096 process_function_and_variable_attributes (first_processed,
1097 first_analyzed_var);
1098 first_processed = cgraph_nodes;
8a4a83ed
JH
1099 first_analyzed_var = varpool_nodes;
1100 varpool_analyze_pending_decls ();
151e6f24 1101 cgraph_process_new_functions ();
1c4a429a 1102 }
8dafba3c 1103
564738df 1104 /* Collect entry points to the unit. */
a194aa56 1105 if (cgraph_dump_file)
1668aabc 1106 {
7d82fe7c 1107 fprintf (cgraph_dump_file, "Unit entry points:");
cd9c7bd2 1108 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
39ecc018 1109 if (node->needed)
a194aa56 1110 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
7d82fe7c 1111 fprintf (cgraph_dump_file, "\n\nInitial ");
e767b5be 1112 dump_cgraph (cgraph_dump_file);
1668aabc 1113 }
7660e67e 1114
a194aa56
JH
1115 if (cgraph_dump_file)
1116 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1c4a429a 1117
96fc428c 1118 for (node = cgraph_nodes; node != first_analyzed; node = next)
1c4a429a
JH
1119 {
1120 tree decl = node->decl;
96fc428c 1121 next = node->next;
1c4a429a 1122
39ecc018 1123 if (node->local.finalized && !gimple_has_body_p (decl))
c22cacf3 1124 cgraph_reset_node (node);
d71cc23f 1125
39ecc018 1126 if (!node->reachable && gimple_has_body_p (decl))
1c4a429a 1127 {
a194aa56
JH
1128 if (cgraph_dump_file)
1129 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
18c6ada9 1130 cgraph_remove_node (node);
d71cc23f 1131 continue;
1c4a429a 1132 }
9b0436b7
JH
1133 else
1134 node->next_needed = NULL;
39ecc018 1135 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
d71cc23f 1136 gcc_assert (node->analyzed == node->local.finalized);
1c4a429a 1137 }
a194aa56 1138 if (cgraph_dump_file)
7d82fe7c
KC
1139 {
1140 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1141 dump_cgraph (cgraph_dump_file);
1142 }
cd9c7bd2 1143 first_analyzed = cgraph_nodes;
1c4a429a 1144 ggc_collect ();
151e6f24
JH
1145}
1146
5f1a9ebb 1147
151e6f24
JH
1148/* Analyze the whole compilation unit once it is parsed completely. */
1149
1150void
1151cgraph_finalize_compilation_unit (void)
1152{
90097c67
RG
1153 timevar_push (TV_CGRAPH);
1154
a406865a
RG
1155 /* Do not skip analyzing the functions if there were errors, we
1156 miss diagnostics for following functions otherwise. */
151e6f24 1157
5f1a9ebb 1158 /* Emit size functions we didn't inline. */
f82a627c 1159 finalize_size_functions ();
5f1a9ebb 1160
5f1a9ebb
RG
1161 /* Call functions declared with the "constructor" or "destructor"
1162 attribute. */
1163 cgraph_build_cdtor_fns ();
151e6f24 1164
90097c67
RG
1165 /* Mark alias targets necessary and emit diagnostics. */
1166 finish_aliases_1 ();
1167
151e6f24
JH
1168 if (!quiet_flag)
1169 {
1170 fprintf (stderr, "\nAnalyzing compilation unit\n");
1171 fflush (stderr);
1172 }
1173
90097c67
RG
1174 /* Gimplify and lower all functions, compute reachability and
1175 remove unreachable nodes. */
1176 cgraph_analyze_functions ();
1177
5f1a9ebb
RG
1178 /* Mark alias targets necessary and emit diagnostics. */
1179 finish_aliases_1 ();
1180
90097c67 1181 /* Gimplify and lower thunks. */
151e6f24 1182 cgraph_analyze_functions ();
a406865a 1183
90097c67 1184 /* Finally drive the pass manager. */
a406865a 1185 cgraph_optimize ();
90097c67
RG
1186
1187 timevar_pop (TV_CGRAPH);
1c4a429a 1188}
3baf459d
DN
1189
1190
1c4a429a
JH
1191/* Figure out what functions we want to assemble. */
1192
1193static void
db0e878d 1194cgraph_mark_functions_to_output (void)
1c4a429a
JH
1195{
1196 struct cgraph_node *node;
b66887e4
JJ
1197#ifdef ENABLE_CHECKING
1198 bool check_same_comdat_groups = false;
1199
1200 for (node = cgraph_nodes; node; node = node->next)
1201 gcc_assert (!node->process);
1202#endif
1c4a429a 1203
1c4a429a
JH
1204 for (node = cgraph_nodes; node; node = node->next)
1205 {
1206 tree decl = node->decl;
b58b1157 1207 struct cgraph_edge *e;
c22cacf3 1208
b66887e4
JJ
1209 gcc_assert (!node->process || node->same_comdat_group);
1210 if (node->process)
1211 continue;
b58b1157
JH
1212
1213 for (e = node->callers; e; e = e->next_caller)
dc0bfe6a 1214 if (e->inline_failed)
b58b1157 1215 break;
1c4a429a 1216
7660e67e
SB
1217 /* We need to output all local functions that are used and not
1218 always inlined, as well as those that are reachable from
1219 outside the current compilation unit. */
39ecc018 1220 if (node->analyzed
18c6ada9 1221 && !node->global.inlined_to
a837268b 1222 && (node->needed || node->reachable_from_other_partition
bd3cdcc0 1223 || node->address_taken
b58b1157 1224 || (e && node->reachable))
6de9cd9a 1225 && !TREE_ASM_WRITTEN (decl)
1c4a429a 1226 && !DECL_EXTERNAL (decl))
b66887e4
JJ
1227 {
1228 node->process = 1;
1229 if (node->same_comdat_group)
1230 {
1231 struct cgraph_node *next;
1232 for (next = node->same_comdat_group;
1233 next != node;
1234 next = next->same_comdat_group)
1235 next->process = 1;
1236 }
1237 }
1238 else if (node->same_comdat_group)
1239 {
1240#ifdef ENABLE_CHECKING
1241 check_same_comdat_groups = true;
1242#endif
1243 }
341c100f 1244 else
1a2caa7a
NS
1245 {
1246 /* We should've reclaimed all functions that are not needed. */
1247#ifdef ENABLE_CHECKING
726a989a 1248 if (!node->global.inlined_to
39ecc018 1249 && gimple_has_body_p (decl)
a837268b
JH
1250 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1251 are inside partition, we can end up not removing the body since we no longer
1252 have analyzed node pointing to it. */
1253 && !node->in_other_partition
1a2caa7a
NS
1254 && !DECL_EXTERNAL (decl))
1255 {
1256 dump_cgraph_node (stderr, node);
1257 internal_error ("failed to reclaim unneeded function");
1258 }
1259#endif
726a989a 1260 gcc_assert (node->global.inlined_to
39ecc018 1261 || !gimple_has_body_p (decl)
a837268b 1262 || node->in_other_partition
1a2caa7a
NS
1263 || DECL_EXTERNAL (decl));
1264
1265 }
c22cacf3 1266
18d13f34 1267 }
b66887e4
JJ
1268#ifdef ENABLE_CHECKING
1269 if (check_same_comdat_groups)
1270 for (node = cgraph_nodes; node; node = node->next)
1271 if (node->same_comdat_group && !node->process)
1272 {
1273 tree decl = node->decl;
1274 if (!node->global.inlined_to
1275 && gimple_has_body_p (decl)
a837268b
JH
1276 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1277 are inside partition, we can end up not removing the body since we no longer
1278 have analyzed node pointing to it. */
1279 && !node->in_other_partition
b66887e4
JJ
1280 && !DECL_EXTERNAL (decl))
1281 {
1282 dump_cgraph_node (stderr, node);
1283 internal_error ("failed to reclaim unneeded function");
1284 }
1285 }
1286#endif
18d13f34
JH
1287}
1288
6744a6ab
JH
1289/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1290 in lowered gimple form.
1291
1292 Set current_function_decl and cfun to newly constructed empty function body.
1293 return basic block in the function body. */
1294
1295static basic_block
1296init_lowered_empty_function (tree decl)
1297{
1298 basic_block bb;
1299
1300 current_function_decl = decl;
1301 allocate_struct_function (decl, false);
1302 gimple_register_cfg_hooks ();
1303 init_empty_tree_cfg ();
1304 init_tree_ssa (cfun);
1305 init_ssa_operands ();
1306 cfun->gimple_df->in_ssa_p = true;
1307 DECL_INITIAL (decl) = make_node (BLOCK);
1308
1309 DECL_SAVED_TREE (decl) = error_mark_node;
1310 cfun->curr_properties |=
1311 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1312 PROP_ssa);
1313
1314 /* Create BB for body of the function and connect it properly. */
1315 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1316 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1317 make_edge (bb, EXIT_BLOCK_PTR, 0);
1318
1319 return bb;
1320}
1321
1322/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1323 offset indicated by VIRTUAL_OFFSET, if that is
1324 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1325 zero for a result adjusting thunk. */
1326
1327static tree
1328thunk_adjust (gimple_stmt_iterator * bsi,
1329 tree ptr, bool this_adjusting,
1330 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1331{
1332 gimple stmt;
1333 tree ret;
1334
313333a6
RG
1335 if (this_adjusting
1336 && fixed_offset != 0)
6744a6ab
JH
1337 {
1338 stmt = gimple_build_assign (ptr,
1339 fold_build2_loc (input_location,
1340 POINTER_PLUS_EXPR,
1341 TREE_TYPE (ptr), ptr,
1342 size_int (fixed_offset)));
1343 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1344 }
1345
1346 /* If there's a virtual offset, look up that value in the vtable and
1347 adjust the pointer again. */
1348 if (virtual_offset)
1349 {
1350 tree vtabletmp;
1351 tree vtabletmp2;
1352 tree vtabletmp3;
1353 tree offsettmp;
1354
1355 if (!vtable_entry_type)
1356 {
1357 tree vfunc_type = make_node (FUNCTION_TYPE);
1358 TREE_TYPE (vfunc_type) = integer_type_node;
1359 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1360 layout_type (vfunc_type);
1361
1362 vtable_entry_type = build_pointer_type (vfunc_type);
1363 }
1364
1365 vtabletmp =
1366 create_tmp_var (build_pointer_type
1367 (build_pointer_type (vtable_entry_type)), "vptr");
1368
1369 /* The vptr is always at offset zero in the object. */
1370 stmt = gimple_build_assign (vtabletmp,
1371 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1372 ptr));
1373 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1374 mark_symbols_for_renaming (stmt);
1375 find_referenced_vars_in (stmt);
1376
1377 /* Form the vtable address. */
1378 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1379 "vtableaddr");
1380 stmt = gimple_build_assign (vtabletmp2,
70f34814 1381 build_simple_mem_ref (vtabletmp));
6744a6ab
JH
1382 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1383 mark_symbols_for_renaming (stmt);
1384 find_referenced_vars_in (stmt);
1385
1386 /* Find the entry with the vcall offset. */
1387 stmt = gimple_build_assign (vtabletmp2,
1388 fold_build2_loc (input_location,
1389 POINTER_PLUS_EXPR,
1390 TREE_TYPE (vtabletmp2),
1391 vtabletmp2,
1392 fold_convert (sizetype,
1393 virtual_offset)));
1394 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1395
1396 /* Get the offset itself. */
1397 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1398 "vcalloffset");
1399 stmt = gimple_build_assign (vtabletmp3,
70f34814 1400 build_simple_mem_ref (vtabletmp2));
6744a6ab
JH
1401 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1402 mark_symbols_for_renaming (stmt);
1403 find_referenced_vars_in (stmt);
1404
1405 /* Cast to sizetype. */
1406 offsettmp = create_tmp_var (sizetype, "offset");
1407 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1408 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1409 mark_symbols_for_renaming (stmt);
1410 find_referenced_vars_in (stmt);
1411
1412 /* Adjust the `this' pointer. */
1413 ptr = fold_build2_loc (input_location,
1414 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1415 offsettmp);
1416 }
1417
313333a6
RG
1418 if (!this_adjusting
1419 && fixed_offset != 0)
6744a6ab
JH
1420 /* Adjust the pointer by the constant. */
1421 {
1422 tree ptrtmp;
1423
1424 if (TREE_CODE (ptr) == VAR_DECL)
1425 ptrtmp = ptr;
1426 else
1427 {
1428 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1429 stmt = gimple_build_assign (ptrtmp, ptr);
1430 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1431 mark_symbols_for_renaming (stmt);
1432 find_referenced_vars_in (stmt);
1433 }
1434 ptr = fold_build2_loc (input_location,
1435 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1436 size_int (fixed_offset));
1437 }
1438
1439 /* Emit the statement and gimplify the adjustment expression. */
1440 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1441 stmt = gimple_build_assign (ret, ptr);
1442 mark_symbols_for_renaming (stmt);
1443 find_referenced_vars_in (stmt);
1444 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1445
1446 return ret;
1447}
1448
1449/* Produce assembler for thunk NODE. */
1450
1451static void
1452assemble_thunk (struct cgraph_node *node)
1453{
1454 bool this_adjusting = node->thunk.this_adjusting;
1455 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1456 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1457 tree virtual_offset = NULL;
1458 tree alias = node->thunk.alias;
1459 tree thunk_fndecl = node->decl;
1460 tree a = DECL_ARGUMENTS (thunk_fndecl);
1461
1462 current_function_decl = thunk_fndecl;
1463
1464 if (this_adjusting
1465 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1466 virtual_value, alias))
1467 {
1468 const char *fnname;
1469 tree fn_block;
1470
1471 DECL_RESULT (thunk_fndecl)
1472 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1473 RESULT_DECL, 0, integer_type_node);
15488554 1474 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
6744a6ab
JH
1475
1476 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1477 create one. */
1478 fn_block = make_node (BLOCK);
1479 BLOCK_VARS (fn_block) = a;
1480 DECL_INITIAL (thunk_fndecl) = fn_block;
1481 init_function_start (thunk_fndecl);
1482 cfun->is_thunk = 1;
1483 assemble_start_function (thunk_fndecl, fnname);
1484
1485 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1486 fixed_offset, virtual_value, alias);
1487
1488 assemble_end_function (thunk_fndecl, fnname);
1489 init_insn_lengths ();
1490 free_after_compilation (cfun);
1491 set_cfun (NULL);
1492 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1493 }
1494 else
1495 {
1496 tree restype;
1497 basic_block bb, then_bb, else_bb, return_bb;
1498 gimple_stmt_iterator bsi;
1499 int nargs = 0;
1500 tree arg;
1501 int i;
1502 tree resdecl;
1503 tree restmp = NULL;
1504 VEC(tree, heap) *vargs;
1505
1506 gimple call;
1507 gimple ret;
1508
1509 DECL_IGNORED_P (thunk_fndecl) = 1;
1510 bitmap_obstack_initialize (NULL);
1511
1512 if (node->thunk.virtual_offset_p)
1513 virtual_offset = size_int (virtual_value);
1514
1515 /* Build the return declaration for the function. */
1516 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1517 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1518 {
1519 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1520 DECL_ARTIFICIAL (resdecl) = 1;
1521 DECL_IGNORED_P (resdecl) = 1;
1522 DECL_RESULT (thunk_fndecl) = resdecl;
1523 }
1524 else
1525 resdecl = DECL_RESULT (thunk_fndecl);
1526
1527 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1528
1529 bsi = gsi_start_bb (bb);
1530
1531 /* Build call to the function being thunked. */
1532 if (!VOID_TYPE_P (restype))
1533 {
1534 if (!is_gimple_reg_type (restype))
1535 {
1536 restmp = resdecl;
c021f10b 1537 add_local_decl (cfun, restmp);
6744a6ab
JH
1538 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1539 }
1540 else
1541 restmp = create_tmp_var_raw (restype, "retval");
1542 }
1543
910ad8de 1544 for (arg = a; arg; arg = DECL_CHAIN (arg))
6744a6ab
JH
1545 nargs++;
1546 vargs = VEC_alloc (tree, heap, nargs);
1547 if (this_adjusting)
1548 VEC_quick_push (tree, vargs,
1549 thunk_adjust (&bsi,
1550 a, 1, fixed_offset,
1551 virtual_offset));
1552 else
1553 VEC_quick_push (tree, vargs, a);
910ad8de 1554 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
6744a6ab
JH
1555 VEC_quick_push (tree, vargs, arg);
1556 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1557 VEC_free (tree, heap, vargs);
1558 gimple_call_set_cannot_inline (call, true);
1559 gimple_call_set_from_thunk (call, true);
1560 if (restmp)
1561 gimple_call_set_lhs (call, restmp);
1562 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1563 mark_symbols_for_renaming (call);
1564 find_referenced_vars_in (call);
1565 update_stmt (call);
1566
1567 if (restmp && !this_adjusting)
1568 {
1124098b 1569 tree true_label = NULL_TREE;
6744a6ab
JH
1570
1571 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1572 {
1573 gimple stmt;
1574 /* If the return type is a pointer, we need to
1575 protect against NULL. We know there will be an
1576 adjustment, because that's why we're emitting a
1577 thunk. */
1578 then_bb = create_basic_block (NULL, (void *) 0, bb);
1579 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1580 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1581 remove_edge (single_succ_edge (bb));
1582 true_label = gimple_block_label (then_bb);
6744a6ab
JH
1583 stmt = gimple_build_cond (NE_EXPR, restmp,
1584 fold_convert (TREE_TYPE (restmp),
1585 integer_zero_node),
1586 NULL_TREE, NULL_TREE);
1587 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1588 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1589 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1590 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1591 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1592 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1593 bsi = gsi_last_bb (then_bb);
1594 }
1595
1596 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1597 fixed_offset, virtual_offset);
1598 if (true_label)
1599 {
1600 gimple stmt;
1601 bsi = gsi_last_bb (else_bb);
1602 stmt = gimple_build_assign (restmp, fold_convert (TREE_TYPE (restmp),
1603 integer_zero_node));
1604 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1605 bsi = gsi_last_bb (return_bb);
1606 }
1607 }
1608 else
1609 gimple_call_set_tail (call, true);
1610
1611 /* Build return value. */
1612 ret = gimple_build_return (restmp);
1613 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1614
1615 delete_unreachable_blocks ();
1616 update_ssa (TODO_update_ssa);
1617
1618 cgraph_remove_same_body_alias (node);
1619 /* Since we want to emit the thunk, we explicitly mark its name as
1620 referenced. */
6744a6ab
JH
1621 cgraph_add_new_function (thunk_fndecl, true);
1622 bitmap_obstack_release (NULL);
1623 }
1624 current_function_decl = NULL;
1625}
1626
1c4a429a 1627/* Expand function specified by NODE. */
7660e67e 1628
1c4a429a 1629static void
db0e878d 1630cgraph_expand_function (struct cgraph_node *node)
1c4a429a
JH
1631{
1632 tree decl = node->decl;
1633
18c6ada9 1634 /* We ought to not compile any inline clones. */
341c100f 1635 gcc_assert (!node->global.inlined_to);
18c6ada9 1636
7e8b322a 1637 announce_function (decl);
257eb6e3 1638 node->process = 0;
18d13f34 1639
2dee695b 1640 gcc_assert (node->lowered);
776b966e 1641
a3546141 1642 /* Generate RTL for the body of DECL. */
e89d6010 1643 tree_rest_of_compilation (decl);
18d13f34 1644
6de9cd9a 1645 /* Make sure that BE didn't give up on compiling. */
f30cfcb1 1646 gcc_assert (TREE_ASM_WRITTEN (decl));
1c4a429a 1647 current_function_decl = NULL;
b2583345
JJ
1648 if (node->same_body)
1649 {
6744a6ab 1650 struct cgraph_node *alias, *next;
b2583345 1651 bool saved_alias = node->alias;
6744a6ab
JH
1652 for (alias = node->same_body;
1653 alias && alias->next; alias = alias->next)
1654 ;
1655 /* Walk aliases in the order they were created; it is possible that
1656 thunks reffers to the aliases made earlier. */
1657 for (; alias; alias = next)
1658 {
1659 next = alias->previous;
1660 if (!alias->thunk.thunk_p)
1661 assemble_alias (alias->decl,
1662 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1663 else
1664 assemble_thunk (alias);
1665 }
b2583345
JJ
1666 node->alias = saved_alias;
1667 }
39ecc018
JH
1668 gcc_assert (!cgraph_preserve_function_body_p (decl));
1669 cgraph_release_function_body (node);
1670 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1671 points to the dead function body. */
1672 cgraph_node_remove_callees (node);
6b02a499
JH
1673
1674 cgraph_function_flags_ready = true;
1c4a429a
JH
1675}
1676
18c6ada9 1677/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
b58b1157
JH
1678
1679bool
61a05df1 1680cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
b58b1157 1681{
18c6ada9
JH
1682 *reason = e->inline_failed;
1683 return !e->inline_failed;
b58b1157 1684}
18c6ada9 1685
6674a6ce 1686
6674a6ce 1687
db0e878d
AJ
1688/* Expand all functions that must be output.
1689
b58b1157
JH
1690 Attempt to topologically sort the nodes so function is output when
1691 all called functions are already assembled to allow data to be
a98ebe2e 1692 propagated across the callgraph. Use a stack to get smaller distance
d1a6adeb 1693 between a function and its callees (later we may choose to use a more
b58b1157
JH
1694 sophisticated algorithm for function reordering; we will likely want
1695 to use subsections to make the output functions appear in top-down
1696 order). */
1697
1698static void
a20af5b8 1699cgraph_expand_all_functions (void)
b58b1157
JH
1700{
1701 struct cgraph_node *node;
5ed6ace5 1702 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
f30cfcb1 1703 int order_pos, new_order_pos = 0;
b58b1157
JH
1704 int i;
1705
b58b1157 1706 order_pos = cgraph_postorder (order);
341c100f 1707 gcc_assert (order_pos == cgraph_n_nodes);
b58b1157 1708
1ae58c30 1709 /* Garbage collector may remove inline clones we eliminate during
18c6ada9
JH
1710 optimization. So we must be sure to not reference them. */
1711 for (i = 0; i < order_pos; i++)
257eb6e3 1712 if (order[i]->process)
18c6ada9
JH
1713 order[new_order_pos++] = order[i];
1714
1715 for (i = new_order_pos - 1; i >= 0; i--)
b58b1157
JH
1716 {
1717 node = order[i];
257eb6e3 1718 if (node->process)
b58b1157 1719 {
341c100f 1720 gcc_assert (node->reachable);
257eb6e3 1721 node->process = 0;
b58b1157
JH
1722 cgraph_expand_function (node);
1723 }
1724 }
f45e0ad1 1725 cgraph_process_new_functions ();
50674e96 1726
b58b1157 1727 free (order);
50674e96 1728
b58b1157
JH
1729}
1730
474eccc6
ILT
1731/* This is used to sort the node types by the cgraph order number. */
1732
24b97832
ILT
1733enum cgraph_order_sort_kind
1734{
1735 ORDER_UNDEFINED = 0,
1736 ORDER_FUNCTION,
1737 ORDER_VAR,
1738 ORDER_ASM
1739};
1740
474eccc6
ILT
1741struct cgraph_order_sort
1742{
24b97832 1743 enum cgraph_order_sort_kind kind;
474eccc6
ILT
1744 union
1745 {
1746 struct cgraph_node *f;
8a4a83ed 1747 struct varpool_node *v;
474eccc6
ILT
1748 struct cgraph_asm_node *a;
1749 } u;
1750};
1751
1752/* Output all functions, variables, and asm statements in the order
1753 according to their order fields, which is the order in which they
1754 appeared in the file. This implements -fno-toplevel-reorder. In
1755 this mode we may output functions and variables which don't really
1756 need to be output. */
1757
1758static void
1759cgraph_output_in_order (void)
1760{
1761 int max;
474eccc6
ILT
1762 struct cgraph_order_sort *nodes;
1763 int i;
1764 struct cgraph_node *pf;
8a4a83ed 1765 struct varpool_node *pv;
474eccc6
ILT
1766 struct cgraph_asm_node *pa;
1767
1768 max = cgraph_order;
33283dad 1769 nodes = XCNEWVEC (struct cgraph_order_sort, max);
474eccc6 1770
8a4a83ed 1771 varpool_analyze_pending_decls ();
474eccc6
ILT
1772
1773 for (pf = cgraph_nodes; pf; pf = pf->next)
1774 {
257eb6e3 1775 if (pf->process)
474eccc6
ILT
1776 {
1777 i = pf->order;
1778 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1779 nodes[i].kind = ORDER_FUNCTION;
1780 nodes[i].u.f = pf;
1781 }
1782 }
1783
8a4a83ed 1784 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
474eccc6
ILT
1785 {
1786 i = pv->order;
1787 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1788 nodes[i].kind = ORDER_VAR;
1789 nodes[i].u.v = pv;
1790 }
1791
1792 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1793 {
1794 i = pa->order;
1795 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1796 nodes[i].kind = ORDER_ASM;
1797 nodes[i].u.a = pa;
1798 }
474eccc6 1799
7386e3ee
JH
1800 /* In toplevel reorder mode we output all statics; mark them as needed. */
1801 for (i = 0; i < max; ++i)
1802 {
1803 if (nodes[i].kind == ORDER_VAR)
1804 {
1805 varpool_mark_needed_node (nodes[i].u.v);
1806 }
1807 }
1808 varpool_empty_needed_queue ();
1809
474eccc6
ILT
1810 for (i = 0; i < max; ++i)
1811 {
1812 switch (nodes[i].kind)
1813 {
1814 case ORDER_FUNCTION:
257eb6e3 1815 nodes[i].u.f->process = 0;
474eccc6
ILT
1816 cgraph_expand_function (nodes[i].u.f);
1817 break;
1818
1819 case ORDER_VAR:
8a4a83ed 1820 varpool_assemble_decl (nodes[i].u.v);
474eccc6
ILT
1821 break;
1822
1823 case ORDER_ASM:
1824 assemble_asm (nodes[i].u.a->asm_str);
1825 break;
1826
1827 case ORDER_UNDEFINED:
1828 break;
1829
1830 default:
1831 gcc_unreachable ();
1832 }
1833 }
e7b9eb2c
ILT
1834
1835 cgraph_asm_nodes = NULL;
33283dad 1836 free (nodes);
474eccc6
ILT
1837}
1838
18c6ada9
JH
1839/* Return true when function body of DECL still needs to be kept around
1840 for later re-use. */
1841bool
1842cgraph_preserve_function_body_p (tree decl)
1843{
1844 struct cgraph_node *node;
c37f4ba4
JH
1845
1846 gcc_assert (cgraph_global_info_ready);
18c6ada9 1847 /* Look if there is any clone around. */
9187e02d
JH
1848 node = cgraph_node (decl);
1849 if (node->clones)
1850 return true;
18c6ada9
JH
1851 return false;
1852}
1853
ef330312
PB
1854static void
1855ipa_passes (void)
1856{
db2960f4 1857 set_cfun (NULL);
04b201a2 1858 current_function_decl = NULL;
726a989a 1859 gimple_register_cfg_hooks ();
ef330312 1860 bitmap_obstack_initialize (NULL);
b20996ff 1861
090fa0ab
GF
1862 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1863
b20996ff
JH
1864 if (!in_lto_p)
1865 execute_ipa_pass_list (all_small_ipa_passes);
3baf459d 1866
d7f09764
DN
1867 /* If pass_all_early_optimizations was not scheduled, the state of
1868 the cgraph will not be properly updated. Update it now. */
1869 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1870 cgraph_state = CGRAPH_STATE_IPA_SSA;
3baf459d 1871
d7f09764
DN
1872 if (!in_lto_p)
1873 {
1874 /* Generate coverage variables and constructors. */
1875 coverage_finish ();
1876
1877 /* Process new functions added. */
1878 set_cfun (NULL);
1879 current_function_decl = NULL;
1880 cgraph_process_new_functions ();
d7f09764 1881
090fa0ab
GF
1882 execute_ipa_summary_passes
1883 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
fb3f88cc 1884 }
c082f9f3
SB
1885
1886 /* Some targets need to handle LTO assembler output specially. */
1887 if (flag_generate_lto)
1888 targetm.asm_out.lto_start ();
1889
d7f09764
DN
1890 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1891
1892 if (!in_lto_p)
1893 ipa_write_summaries ();
1894
c082f9f3
SB
1895 if (flag_generate_lto)
1896 targetm.asm_out.lto_end ();
1897
fb3f88cc
JH
1898 if (!flag_ltrans)
1899 execute_ipa_pass_list (all_regular_ipa_passes);
090fa0ab 1900 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
3baf459d 1901
ef330312
PB
1902 bitmap_obstack_release (NULL);
1903}
1904
4537ec0c 1905
1c4a429a
JH
1906/* Perform simple optimizations based on callgraph. */
1907
d7f09764 1908void
db0e878d 1909cgraph_optimize (void)
1c4a429a 1910{
1da2ed5f 1911 if (seen_error ())
413803d3
VR
1912 return;
1913
18c6ada9
JH
1914#ifdef ENABLE_CHECKING
1915 verify_cgraph ();
1916#endif
7be82279 1917
cd9c7bd2
JH
1918 /* Frontend may output common variables after the unit has been finalized.
1919 It is safe to deal with them here as they are always zero initialized. */
8a4a83ed 1920 varpool_analyze_pending_decls ();
857e7259 1921
a194aa56 1922 timevar_push (TV_CGRAPHOPT);
a5573239
JH
1923 if (pre_ipa_mem_report)
1924 {
1925 fprintf (stderr, "Memory consumption before IPA\n");
1926 dump_memory_report (false);
1927 }
b58b1157 1928 if (!quiet_flag)
a418679d 1929 fprintf (stderr, "Performing interprocedural optimizations\n");
f45e0ad1 1930 cgraph_state = CGRAPH_STATE_IPA;
f30cfcb1 1931
7e2fe9d8 1932 /* Don't run the IPA passes if there was any error or sorry messages. */
1da2ed5f 1933 if (!seen_error ())
7e2fe9d8
AP
1934 ipa_passes ();
1935
4537ec0c 1936 /* Do nothing else if any IPA pass found errors. */
1da2ed5f 1937 if (seen_error ())
9ba0399e
RH
1938 {
1939 timevar_pop (TV_CGRAPHOPT);
1940 return;
1941 }
4537ec0c 1942
6b02a499
JH
1943 /* This pass remove bodies of extern inline functions we never inlined.
1944 Do this later so other IPA passes see what is really going on. */
1945 cgraph_remove_unreachable_nodes (false, dump_file);
dafc5b82 1946 cgraph_global_info_ready = true;
a194aa56
JH
1947 if (cgraph_dump_file)
1948 {
7d82fe7c 1949 fprintf (cgraph_dump_file, "Optimized ");
a194aa56 1950 dump_cgraph (cgraph_dump_file);
cd9c7bd2 1951 dump_varpool (cgraph_dump_file);
a194aa56 1952 }
a5573239
JH
1953 if (post_ipa_mem_report)
1954 {
7fa982e5 1955 fprintf (stderr, "Memory consumption after IPA\n");
a5573239
JH
1956 dump_memory_report (false);
1957 }
a194aa56 1958 timevar_pop (TV_CGRAPHOPT);
1c4a429a 1959
b58b1157 1960 /* Output everything. */
3df9609a 1961 (*debug_hooks->assembly_start) ();
7d82fe7c
KC
1962 if (!quiet_flag)
1963 fprintf (stderr, "Assembling functions:\n");
18c6ada9
JH
1964#ifdef ENABLE_CHECKING
1965 verify_cgraph ();
1966#endif
474eccc6 1967
9187e02d 1968 cgraph_materialize_all_clones ();
6674a6ce 1969 cgraph_mark_functions_to_output ();
cd9c7bd2 1970
f45e0ad1 1971 cgraph_state = CGRAPH_STATE_EXPANSION;
474eccc6
ILT
1972 if (!flag_toplevel_reorder)
1973 cgraph_output_in_order ();
1974 else
1975 {
1976 cgraph_output_pending_asms ();
1977
1978 cgraph_expand_all_functions ();
8a4a83ed 1979 varpool_remove_unreferenced_decls ();
474eccc6 1980
8a4a83ed 1981 varpool_assemble_pending_decls ();
474eccc6 1982 }
f45e0ad1
JH
1983 cgraph_process_new_functions ();
1984 cgraph_state = CGRAPH_STATE_FINISHED;
cd9c7bd2 1985
a194aa56
JH
1986 if (cgraph_dump_file)
1987 {
7d82fe7c 1988 fprintf (cgraph_dump_file, "\nFinal ");
a194aa56
JH
1989 dump_cgraph (cgraph_dump_file);
1990 }
18c6ada9
JH
1991#ifdef ENABLE_CHECKING
1992 verify_cgraph ();
6de9cd9a
DN
1993 /* Double check that all inline clones are gone and that all
1994 function bodies have been released from memory. */
1da2ed5f 1995 if (!seen_error ())
6de9cd9a
DN
1996 {
1997 struct cgraph_node *node;
1998 bool error_found = false;
1999
2000 for (node = cgraph_nodes; node; node = node->next)
2001 if (node->analyzed
2002 && (node->global.inlined_to
39ecc018 2003 || gimple_has_body_p (node->decl)))
6de9cd9a
DN
2004 {
2005 error_found = true;
2006 dump_cgraph_node (stderr, node);
c22cacf3 2007 }
6de9cd9a 2008 if (error_found)
f30cfcb1 2009 internal_error ("nodes with unreleased memory found");
6de9cd9a 2010 }
18c6ada9 2011#endif
1c4a429a 2012}
4537ec0c
DN
2013
2014
873c7164
MM
2015/* Generate and emit a static constructor or destructor. WHICH must
2016 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
2017 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
fa10beec 2018 initialization priority for this constructor or destructor. */
c9b9aa64
RH
2019
2020void
35b6fdcf 2021cgraph_build_static_cdtor (char which, tree body, int priority)
c9b9aa64
RH
2022{
2023 static int counter = 0;
2024 char which_buf[16];
b785f485 2025 tree decl, name, resdecl;
c9b9aa64 2026
873c7164
MM
2027 /* The priority is encoded in the constructor or destructor name.
2028 collect2 will sort the names and arrange that they are called at
2029 program startup. */
2030 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
5880f14f 2031 name = get_file_function_name (which_buf);
c9b9aa64 2032
c2255bc4 2033 decl = build_decl (input_location, FUNCTION_DECL, name,
6a4825bd 2034 build_function_type_list (void_type_node, NULL_TREE));
c9b9aa64
RH
2035 current_function_decl = decl;
2036
c2255bc4
AH
2037 resdecl = build_decl (input_location,
2038 RESULT_DECL, NULL_TREE, void_type_node);
b785f485 2039 DECL_ARTIFICIAL (resdecl) = 1;
b785f485 2040 DECL_RESULT (decl) = resdecl;
07485407 2041 DECL_CONTEXT (resdecl) = decl;
b785f485 2042
182e0d71 2043 allocate_struct_function (decl, false);
c9b9aa64
RH
2044
2045 TREE_STATIC (decl) = 1;
2046 TREE_USED (decl) = 1;
2047 DECL_ARTIFICIAL (decl) = 1;
c9b9aa64
RH
2048 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
2049 DECL_SAVED_TREE (decl) = body;
b932b8b1
JDA
2050 if (!targetm.have_ctors_dtors)
2051 {
2052 TREE_PUBLIC (decl) = 1;
2053 DECL_PRESERVE_P (decl) = 1;
2054 }
c9b9aa64
RH
2055 DECL_UNINLINABLE (decl) = 1;
2056
2057 DECL_INITIAL (decl) = make_node (BLOCK);
2058 TREE_USED (DECL_INITIAL (decl)) = 1;
2059
2060 DECL_SOURCE_LOCATION (decl) = input_location;
2061 cfun->function_end_locus = input_location;
2062
341c100f
NS
2063 switch (which)
2064 {
2065 case 'I':
2066 DECL_STATIC_CONSTRUCTOR (decl) = 1;
395a40e0 2067 decl_init_priority_insert (decl, priority);
341c100f
NS
2068 break;
2069 case 'D':
2070 DECL_STATIC_DESTRUCTOR (decl) = 1;
395a40e0 2071 decl_fini_priority_insert (decl, priority);
341c100f
NS
2072 break;
2073 default:
2074 gcc_unreachable ();
2075 }
c9b9aa64
RH
2076
2077 gimplify_function_tree (decl);
2078
f45e0ad1
JH
2079 cgraph_add_new_function (decl, false);
2080 cgraph_mark_needed_node (cgraph_node (decl));
c5f77dcb 2081
cac67c08 2082 set_cfun (NULL);
c5f77dcb 2083 current_function_decl = NULL;
c9b9aa64 2084}
9b3e897d
PB
2085
2086void
2087init_cgraph (void)
2088{
a05541a9
JH
2089 if (!cgraph_dump_file)
2090 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
9b3e897d 2091}
57fb5341 2092
c22cacf3 2093/* The edges representing the callers of the NEW_VERSION node were
57fb5341
RL
2094 fixed by cgraph_function_versioning (), now the call_expr in their
2095 respective tree code should be updated to call the NEW_VERSION. */
2096
2097static void
2098update_call_expr (struct cgraph_node *new_version)
2099{
2100 struct cgraph_edge *e;
2101
2102 gcc_assert (new_version);
726a989a
RB
2103
2104 /* Update the call expr on the edges to call the new version. */
57fb5341 2105 for (e = new_version->callers; e; e = e->next_caller)
c0ab1df3
AP
2106 {
2107 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2108 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
1d65f45c 2109 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
c0ab1df3 2110 }
57fb5341
RL
2111}
2112
2113
2114/* Create a new cgraph node which is the new version of
2115 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2116 edges which should be redirected to point to
2117 NEW_VERSION. ALL the callees edges of OLD_VERSION
2118 are cloned to the new version node. Return the new
91382288
JH
2119 version node.
2120
2121 If non-NULL BLOCK_TO_COPY determine what basic blocks
2122 was copied to prevent duplications of calls that are dead
2123 in the clone. */
57fb5341
RL
2124
2125static struct cgraph_node *
2126cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
b2c0ad40 2127 tree new_decl,
91382288
JH
2128 VEC(cgraph_edge_p,heap) *redirect_callers,
2129 bitmap bbs_to_copy)
2130 {
57fb5341 2131 struct cgraph_node *new_version;
ae2b0888 2132 struct cgraph_edge *e;
57fb5341
RL
2133 unsigned i;
2134
2135 gcc_assert (old_version);
c22cacf3 2136
57fb5341
RL
2137 new_version = cgraph_node (new_decl);
2138
2139 new_version->analyzed = true;
2140 new_version->local = old_version->local;
036546e5
JH
2141 new_version->local.externally_visible = false;
2142 new_version->local.local = true;
2143 new_version->local.vtable_method = false;
57fb5341 2144 new_version->global = old_version->global;
8cf9feca 2145 new_version->rtl = old_version->rtl;
57fb5341
RL
2146 new_version->reachable = true;
2147 new_version->count = old_version->count;
2148
036546e5 2149 for (e = old_version->callees; e; e=e->next_callee)
91382288
JH
2150 if (!bbs_to_copy
2151 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2152 cgraph_clone_edge (e, new_version, e->call_stmt,
2153 e->lto_stmt_uid, REG_BR_PROB_BASE,
2154 CGRAPH_FREQ_BASE,
2155 e->loop_nest, true);
036546e5 2156 for (e = old_version->indirect_calls; e; e=e->next_callee)
91382288
JH
2157 if (!bbs_to_copy
2158 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2159 cgraph_clone_edge (e, new_version, e->call_stmt,
2160 e->lto_stmt_uid, REG_BR_PROB_BASE,
2161 CGRAPH_FREQ_BASE,
2162 e->loop_nest, true);
b2c0ad40
KH
2163 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
2164 {
2165 /* Redirect calls to the old version node to point to its new
2166 version. */
2167 cgraph_redirect_edge_callee (e, new_version);
2168 }
57fb5341
RL
2169
2170 return new_version;
2171 }
2172
2173 /* Perform function versioning.
c22cacf3 2174 Function versioning includes copying of the tree and
57fb5341
RL
2175 a callgraph update (creating a new cgraph node and updating
2176 its callees and callers).
2177
2178 REDIRECT_CALLERS varray includes the edges to be redirected
2179 to the new version.
2180
2181 TREE_MAP is a mapping of tree nodes we want to replace with
2182 new ones (according to results of prior analysis).
2183 OLD_VERSION_NODE is the node that is versioned.
b8698a0f 2184 It returns the new version's cgraph node.
91382288
JH
2185 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2186 from new version.
2187 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2188 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
57fb5341
RL
2189
2190struct cgraph_node *
2191cgraph_function_versioning (struct cgraph_node *old_version_node,
b2c0ad40 2192 VEC(cgraph_edge_p,heap) *redirect_callers,
9187e02d 2193 VEC (ipa_replace_map_p,gc)* tree_map,
036546e5 2194 bitmap args_to_skip,
91382288
JH
2195 bitmap bbs_to_copy,
2196 basic_block new_entry_block,
036546e5 2197 const char *clone_name)
57fb5341
RL
2198{
2199 tree old_decl = old_version_node->decl;
2200 struct cgraph_node *new_version_node = NULL;
2201 tree new_decl;
2202
2203 if (!tree_versionable_function_p (old_decl))
2204 return NULL;
2205
2206 /* Make a new FUNCTION_DECL tree node for the
2207 new version. */
c6f7cfc1
JH
2208 if (!args_to_skip)
2209 new_decl = copy_node (old_decl);
2210 else
2211 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
57fb5341 2212
9990e02a
JH
2213 /* Generate a new name for the new version. */
2214 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2215 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2216 SET_DECL_RTL (new_decl, NULL);
2217
57fb5341
RL
2218 /* Create the new version's call-graph node.
2219 and update the edges of the new node. */
2220 new_version_node =
2221 cgraph_copy_node_for_versioning (old_version_node, new_decl,
91382288 2222 redirect_callers, bbs_to_copy);
57fb5341
RL
2223
2224 /* Copy the OLD_VERSION_NODE function tree to the new version. */
91382288
JH
2225 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2226 bbs_to_copy, new_entry_block);
57fb5341 2227
c22cacf3 2228 /* Update the new version's properties.
c0ab1df3
AP
2229 Make The new version visible only within this translation unit. Make sure
2230 that is not weak also.
c22cacf3 2231 ??? We cannot use COMDAT linkage because there is no
57fb5341 2232 ABI support for this. */
715a4e08 2233 cgraph_make_decl_local (new_version_node->decl);
e6e1c050 2234 DECL_VIRTUAL_P (new_version_node->decl) = 0;
57fb5341
RL
2235 new_version_node->local.externally_visible = 0;
2236 new_version_node->local.local = 1;
2237 new_version_node->lowered = true;
e6e1c050 2238
c0ab1df3
AP
2239 /* Update the call_expr on the edges to call the new version node. */
2240 update_call_expr (new_version_node);
b8698a0f 2241
129a37fc 2242 cgraph_call_function_insertion_hooks (new_version_node);
57fb5341
RL
2243 return new_version_node;
2244}
ea99e0be
JH
2245
2246/* Produce separate function body for inline clones so the offline copy can be
2247 modified without affecting them. */
2248struct cgraph_node *
2249save_inline_function_body (struct cgraph_node *node)
2250{
9187e02d 2251 struct cgraph_node *first_clone, *n;
ea99e0be
JH
2252
2253 gcc_assert (node == cgraph_node (node->decl));
2254
2255 cgraph_lower_function (node);
2256
9187e02d 2257 first_clone = node->clones;
ea99e0be
JH
2258
2259 first_clone->decl = copy_node (node->decl);
ea99e0be
JH
2260 cgraph_insert_node_to_hashtable (first_clone);
2261 gcc_assert (first_clone == cgraph_node (first_clone->decl));
9187e02d
JH
2262 if (first_clone->next_sibling_clone)
2263 {
2264 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2265 n->clone_of = first_clone;
2266 n->clone_of = first_clone;
2267 n->next_sibling_clone = first_clone->clones;
2268 if (first_clone->clones)
2269 first_clone->clones->prev_sibling_clone = n;
2270 first_clone->clones = first_clone->next_sibling_clone;
2271 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2272 first_clone->next_sibling_clone = NULL;
2273 gcc_assert (!first_clone->prev_sibling_clone);
2274 }
2275 first_clone->clone_of = NULL;
2276 node->clones = NULL;
2277
2278 if (first_clone->clones)
2279 for (n = first_clone->clones; n != first_clone;)
2280 {
2281 gcc_assert (n->decl == node->decl);
2282 n->decl = first_clone->decl;
2283 if (n->clones)
2284 n = n->clones;
2285 else if (n->next_sibling_clone)
2286 n = n->next_sibling_clone;
2287 else
2288 {
2289 while (n != first_clone && !n->next_sibling_clone)
2290 n = n->clone_of;
2291 if (n != first_clone)
2292 n = n->next_sibling_clone;
2293 }
2294 }
ea99e0be
JH
2295
2296 /* Copy the OLD_VERSION_NODE function tree to the new version. */
91382288
JH
2297 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
2298 NULL, NULL);
ea99e0be
JH
2299
2300 DECL_EXTERNAL (first_clone->decl) = 0;
fc26fae3 2301 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
ea99e0be
JH
2302 TREE_PUBLIC (first_clone->decl) = 0;
2303 DECL_COMDAT (first_clone->decl) = 0;
21ecdec5 2304 VEC_free (ipa_opt_pass, heap,
0e3776db
JH
2305 first_clone->ipa_transforms_to_apply);
2306 first_clone->ipa_transforms_to_apply = NULL;
ea99e0be 2307
ea99e0be
JH
2308#ifdef ENABLE_CHECKING
2309 verify_cgraph_node (first_clone);
2310#endif
2311 return first_clone;
2312}
7be82279 2313
9187e02d
JH
2314/* Given virtual clone, turn it into actual clone. */
2315static void
2316cgraph_materialize_clone (struct cgraph_node *node)
2317{
2318 bitmap_obstack_initialize (NULL);
e466e2ce
JH
2319#ifdef ENABLE_CHECKING
2320 node->former_clone_of = node->clone_of->decl;
2321 if (node->clone_of->former_clone_of)
2322 node->former_clone_of = node->clone_of->former_clone_of;
2323#endif
9187e02d
JH
2324 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2325 tree_function_versioning (node->clone_of->decl, node->decl,
2326 node->clone.tree_map, true,
91382288 2327 node->clone.args_to_skip, NULL, NULL);
08ad1d6d
JH
2328 if (cgraph_dump_file)
2329 {
2330 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2331 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2332 }
9187e02d
JH
2333
2334 /* Function is no longer clone. */
2335 if (node->next_sibling_clone)
2336 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2337 if (node->prev_sibling_clone)
2338 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2339 else
2340 node->clone_of->clones = node->next_sibling_clone;
2341 node->next_sibling_clone = NULL;
2342 node->prev_sibling_clone = NULL;
0e3776db 2343 if (!node->clone_of->analyzed && !node->clone_of->clones)
f0c418dc
JH
2344 {
2345 cgraph_release_function_body (node->clone_of);
2346 cgraph_node_remove_callees (node->clone_of);
2347 ipa_remove_all_references (&node->clone_of->ref_list);
2348 }
9187e02d
JH
2349 node->clone_of = NULL;
2350 bitmap_obstack_release (NULL);
2351}
2352
8132a837
MJ
2353/* If necessary, change the function declaration in the call statement
2354 associated with E so that it corresponds to the edge callee. */
2355
2356gimple
2357cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2358{
2359 tree decl = gimple_call_fndecl (e->call_stmt);
2360 gimple new_stmt;
437ffe7b
JH
2361#ifdef ENABLE_CHECKING
2362 struct cgraph_node *node;
2363#endif
8132a837 2364
3949c4a7
MJ
2365 if (e->indirect_unknown_callee
2366 || decl == e->callee->decl
8132a837 2367 /* Don't update call from same body alias to the real function. */
3949c4a7 2368 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
8132a837
MJ
2369 return e->call_stmt;
2370
437ffe7b 2371#ifdef ENABLE_CHECKING
3949c4a7
MJ
2372 if (decl)
2373 {
2374 node = cgraph_get_node (decl);
2375 gcc_assert (!node || !node->clone.combined_args_to_skip);
2376 }
437ffe7b 2377#endif
e466e2ce 2378
8132a837
MJ
2379 if (cgraph_dump_file)
2380 {
2381 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2382 cgraph_node_name (e->caller), e->caller->uid,
2383 cgraph_node_name (e->callee), e->callee->uid);
2384 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e466e2ce 2385 if (e->callee->clone.combined_args_to_skip)
8d2adc24
EB
2386 {
2387 fprintf (cgraph_dump_file, " combined args to skip: ");
2388 dump_bitmap (cgraph_dump_file,
2389 e->callee->clone.combined_args_to_skip);
e466e2ce 2390 }
8132a837
MJ
2391 }
2392
2393 if (e->callee->clone.combined_args_to_skip)
8d2adc24
EB
2394 {
2395 gimple_stmt_iterator gsi;
2396
2397 new_stmt
2398 = gimple_call_copy_skip_args (e->call_stmt,
2399 e->callee->clone.combined_args_to_skip);
2400
2401 if (gimple_vdef (new_stmt)
2402 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2403 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2404
2405 gsi = gsi_for_stmt (e->call_stmt);
2406 gsi_replace (&gsi, new_stmt, true);
2407 }
8132a837
MJ
2408 else
2409 new_stmt = e->call_stmt;
8132a837 2410
8d2adc24 2411 gimple_call_set_fndecl (new_stmt, e->callee->decl);
0b6e2868 2412 update_stmt (new_stmt);
8132a837 2413
8132a837
MJ
2414 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2415
2416 if (cgraph_dump_file)
2417 {
2418 fprintf (cgraph_dump_file, " updated to:");
2419 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2420 }
2421 return new_stmt;
2422}
2423
9187e02d 2424/* Once all functions from compilation unit are in memory, produce all clones
8132a837
MJ
2425 and update all calls. We might also do this on demand if we don't want to
2426 bring all functions to memory prior compilation, but current WHOPR
2427 implementation does that and it is is bit easier to keep everything right in
2428 this order. */
9187e02d
JH
2429void
2430cgraph_materialize_all_clones (void)
2431{
2432 struct cgraph_node *node;
2433 bool stabilized = false;
2434
2435 if (cgraph_dump_file)
2436 fprintf (cgraph_dump_file, "Materializing clones\n");
2437#ifdef ENABLE_CHECKING
2438 verify_cgraph ();
2439#endif
2440
2441 /* We can also do topological order, but number of iterations should be
2442 bounded by number of IPA passes since single IPA pass is probably not
2443 going to create clones of clones it created itself. */
2444 while (!stabilized)
2445 {
2446 stabilized = true;
2447 for (node = cgraph_nodes; node; node = node->next)
2448 {
2449 if (node->clone_of && node->decl != node->clone_of->decl
2450 && !gimple_has_body_p (node->decl))
2451 {
2452 if (gimple_has_body_p (node->clone_of->decl))
2453 {
2454 if (cgraph_dump_file)
08ad1d6d
JH
2455 {
2456 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2457 cgraph_node_name (node->clone_of),
2458 cgraph_node_name (node));
2459 if (node->clone.tree_map)
2460 {
2461 unsigned int i;
2462 fprintf (cgraph_dump_file, " replace map: ");
2463 for (i = 0; i < VEC_length (ipa_replace_map_p,
2464 node->clone.tree_map);
2465 i++)
2466 {
2467 struct ipa_replace_map *replace_info;
2468 replace_info = VEC_index (ipa_replace_map_p,
2469 node->clone.tree_map,
2470 i);
2471 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2472 fprintf (cgraph_dump_file, " -> ");
2473 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2474 fprintf (cgraph_dump_file, "%s%s;",
2475 replace_info->replace_p ? "(replace)":"",
2476 replace_info->ref_p ? "(ref)":"");
2477 }
2478 fprintf (cgraph_dump_file, "\n");
2479 }
2480 if (node->clone.args_to_skip)
2481 {
2482 fprintf (cgraph_dump_file, " args_to_skip: ");
2483 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2484 }
2485 if (node->clone.args_to_skip)
2486 {
2487 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2488 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2489 }
2490 }
9187e02d 2491 cgraph_materialize_clone (node);
36576655 2492 stabilized = false;
9187e02d 2493 }
9187e02d
JH
2494 }
2495 }
2496 }
47cb0d7d
JH
2497 for (node = cgraph_nodes; node; node = node->next)
2498 if (!node->analyzed && node->callees)
2499 cgraph_node_remove_callees (node);
8132a837
MJ
2500 if (cgraph_dump_file)
2501 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
9a23acef
JH
2502#ifdef ENABLE_CHECKING
2503 verify_cgraph ();
2504#endif
9187e02d
JH
2505 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2506}
2507
7be82279 2508#include "gt-cgraphunit.h"