]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
re PR middle-end/42834 (memcpy folding overeager)
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
a418679d 1/* Callgraph based interprocedural optimizations.
566f27e4 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
cac67c08 3 Free Software Foundation, Inc.
1c4a429a
JH
4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1c4a429a
JH
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
1c4a429a 21
18c6ada9 22/* This module implements main driver of compilation process as well as
a418679d 23 few basic interprocedural optimizers.
18c6ada9
JH
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
efe75b6f
JH
35 (There is one exception needed for implementing GCC extern inline
36 function.)
18c6ada9 37
8a4a83ed 38 - varpool_finalize_variable
18c6ada9 39
1ae58c30 40 This function has same behavior as the above but is used for static
18c6ada9
JH
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
efe75b6f
JH
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
18c6ada9 47
7e8b322a 48 In the the call-graph construction and local function
18c6ada9
JH
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
efe75b6f
JH
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
18c6ada9
JH
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
18c6ada9 62 - cgraph_mark_needed_node
8a4a83ed 63 - varpool_mark_needed_node
18c6ada9 64
efe75b6f
JH
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
dbb23ff7 69 used by C++ frontend to explicitly mark the keyed methods.
18c6ada9
JH
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
7e8b322a 80 Analyzing of all functions is deferred
18c6ada9
JH
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
e1990f69 91 The intra-procedural information is produced and its existence
18c6ada9
JH
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
7e8b322a 106 where reference has been optimized out. */
9b3e897d 107
6674a6ce 108
1c4a429a
JH
109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
c9b9aa64 114#include "rtl.h"
6674a6ce 115#include "tree-flow.h"
1c4a429a
JH
116#include "tree-inline.h"
117#include "langhooks.h"
0c58f841 118#include "pointer-set.h"
1c4a429a
JH
119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
dafc5b82 125#include "diagnostic.h"
cf835838
JM
126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
a194aa56 128#include "timevar.h"
b58b1157
JH
129#include "params.h"
130#include "fibheap.h"
dc0bfe6a 131#include "intl.h"
902edd36 132#include "function.h"
57fb5341 133#include "ipa-prop.h"
726a989a
RB
134#include "gimple.h"
135#include "tree-iterator.h"
b4861090 136#include "tree-pass.h"
a406865a 137#include "tree-dump.h"
cd9c7bd2 138#include "output.h"
3baf459d 139#include "coverage.h"
090fa0ab 140#include "plugin.h"
b58b1157 141
a20af5b8 142static void cgraph_expand_all_functions (void);
db0e878d
AJ
143static void cgraph_mark_functions_to_output (void);
144static void cgraph_expand_function (struct cgraph_node *);
21c4a6a7 145static void cgraph_output_pending_asms (void);
a406865a 146static void cgraph_analyze_function (struct cgraph_node *);
7dff32e6 147
0a5fa5a1 148FILE *cgraph_dump_file;
9b3e897d 149
873c7164
MM
150/* A vector of FUNCTION_DECLs declared as static constructors. */
151static GTY (()) VEC(tree, gc) *static_ctors;
152/* A vector of FUNCTION_DECLs declared as static destructors. */
153static GTY (()) VEC(tree, gc) *static_dtors;
7be82279 154
6744a6ab
JH
155/* Used for vtable lookup in thunk adjusting. */
156static GTY (()) tree vtable_entry_type;
157
7be82279 158/* When target does not have ctors and dtors, we call all constructor
c80b4100 159 and destructor by special initialization/destruction function
b8698a0f
L
160 recognized by collect2.
161
7be82279
JH
162 When we are going to build this function, collect all constructors and
163 destructors and turn them into normal functions. */
164
165static void
166record_cdtor_fn (tree fndecl)
167{
bd85fcee
JH
168 struct cgraph_node *node;
169 if (targetm.have_ctors_dtors
170 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
171 && !DECL_STATIC_DESTRUCTOR (fndecl)))
7be82279
JH
172 return;
173
174 if (DECL_STATIC_CONSTRUCTOR (fndecl))
175 {
873c7164 176 VEC_safe_push (tree, gc, static_ctors, fndecl);
7be82279 177 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
7be82279
JH
178 }
179 if (DECL_STATIC_DESTRUCTOR (fndecl))
180 {
873c7164 181 VEC_safe_push (tree, gc, static_dtors, fndecl);
7be82279 182 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
7be82279 183 }
bd85fcee
JH
184 node = cgraph_node (fndecl);
185 node->local.disregard_inline_limits = 1;
186 cgraph_mark_reachable_node (node);
7be82279
JH
187}
188
873c7164
MM
189/* Define global constructors/destructor functions for the CDTORS, of
190 which they are LEN. The CDTORS are sorted by initialization
191 priority. If CTOR_P is true, these are constructors; otherwise,
192 they are destructors. */
193
7be82279 194static void
873c7164 195build_cdtor (bool ctor_p, tree *cdtors, size_t len)
7be82279 196{
873c7164 197 size_t i;
7be82279 198
873c7164
MM
199 i = 0;
200 while (i < len)
201 {
202 tree body;
203 tree fn;
204 priority_type priority;
205
206 priority = 0;
207 body = NULL_TREE;
208 /* Find the next batch of constructors/destructors with the same
209 initialization priority. */
210 do
211 {
212 priority_type p;
213 fn = cdtors[i];
214 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
215 if (!body)
216 priority = p;
217 else if (p != priority)
218 break;
db3927fb
AH
219 append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
220 fn, 0),
873c7164
MM
221 &body);
222 ++i;
223 }
224 while (i < len);
225 gcc_assert (body != NULL_TREE);
226 /* Generate a function to call all the function of like
227 priority. */
228 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
229 }
230}
231
232/* Comparison function for qsort. P1 and P2 are actually of type
233 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
234 used to determine the sort order. */
7be82279 235
873c7164
MM
236static int
237compare_ctor (const void *p1, const void *p2)
238{
239 tree f1;
240 tree f2;
241 int priority1;
242 int priority2;
243
244 f1 = *(const tree *)p1;
245 f2 = *(const tree *)p2;
246 priority1 = DECL_INIT_PRIORITY (f1);
247 priority2 = DECL_INIT_PRIORITY (f2);
b8698a0f 248
873c7164
MM
249 if (priority1 < priority2)
250 return -1;
251 else if (priority1 > priority2)
252 return 1;
253 else
254 /* Ensure a stable sort. */
255 return (const tree *)p1 - (const tree *)p2;
256}
257
258/* Comparison function for qsort. P1 and P2 are actually of type
259 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
260 used to determine the sort order. */
7be82279 261
873c7164
MM
262static int
263compare_dtor (const void *p1, const void *p2)
264{
265 tree f1;
266 tree f2;
267 int priority1;
268 int priority2;
269
270 f1 = *(const tree *)p1;
271 f2 = *(const tree *)p2;
272 priority1 = DECL_FINI_PRIORITY (f1);
273 priority2 = DECL_FINI_PRIORITY (f2);
b8698a0f 274
873c7164
MM
275 if (priority1 < priority2)
276 return -1;
277 else if (priority1 > priority2)
278 return 1;
279 else
280 /* Ensure a stable sort. */
281 return (const tree *)p1 - (const tree *)p2;
7be82279
JH
282}
283
284/* Generate functions to call static constructors and destructors
285 for targets that do not support .ctors/.dtors sections. These
286 functions have magic names which are detected by collect2. */
287
288static void
289cgraph_build_cdtor_fns (void)
290{
873c7164 291 if (!VEC_empty (tree, static_ctors))
7be82279 292 {
873c7164
MM
293 gcc_assert (!targetm.have_ctors_dtors);
294 qsort (VEC_address (tree, static_ctors),
b8698a0f 295 VEC_length (tree, static_ctors),
873c7164
MM
296 sizeof (tree),
297 compare_ctor);
298 build_cdtor (/*ctor_p=*/true,
299 VEC_address (tree, static_ctors),
b8698a0f 300 VEC_length (tree, static_ctors));
873c7164 301 VEC_truncate (tree, static_ctors, 0);
7be82279 302 }
873c7164
MM
303
304 if (!VEC_empty (tree, static_dtors))
7be82279 305 {
873c7164
MM
306 gcc_assert (!targetm.have_ctors_dtors);
307 qsort (VEC_address (tree, static_dtors),
b8698a0f 308 VEC_length (tree, static_dtors),
873c7164
MM
309 sizeof (tree),
310 compare_dtor);
311 build_cdtor (/*ctor_p=*/false,
312 VEC_address (tree, static_dtors),
b8698a0f 313 VEC_length (tree, static_dtors));
873c7164 314 VEC_truncate (tree, static_dtors, 0);
7be82279
JH
315 }
316}
317
8dafba3c
RH
318/* Determine if function DECL is needed. That is, visible to something
319 either outside this translation unit, something magic in the system
7e8b322a 320 configury. */
8dafba3c 321
d7f09764
DN
322bool
323cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
8dafba3c 324{
e7d6beb0 325 /* If the user told us it is used, then it must be so. */
386b46cf
JH
326 if (node->local.externally_visible)
327 return true;
328
e7d6beb0
JH
329 /* ??? If the assembler name is set by hand, it is possible to assemble
330 the name later after finalizing the function and the fact is noticed
331 in assemble_name then. This is arguably a bug. */
332 if (DECL_ASSEMBLER_NAME_SET_P (decl)
333 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
334 return true;
335
a1d31187
JH
336 /* With -fkeep-inline-functions we are keeping all inline functions except
337 for extern inline ones. */
338 if (flag_keep_inline_functions
339 && DECL_DECLARED_INLINE_P (decl)
b521dcbe
JH
340 && !DECL_EXTERNAL (decl)
341 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
a1d31187
JH
342 return true;
343
8dafba3c
RH
344 /* If we decided it was needed before, but at the time we didn't have
345 the body of the function available, then it's still needed. We have
346 to go back and re-check its dependencies now. */
347 if (node->needed)
348 return true;
349
350 /* Externally visible functions must be output. The exception is
c22cacf3 351 COMDAT functions that must be output only when they are needed.
04f77d0f
JH
352
353 When not optimizing, also output the static functions. (see
46f5f7f2 354 PR24561), but don't do so for always_inline functions, functions
c5d01958 355 declared inline and nested functions. These were optimized out
b633db7b 356 in the original implementation and it is unclear whether we want
6fc0bb99 357 to change the behavior here. */
5d342ef9 358 if (((TREE_PUBLIC (decl)
c5d01958
EB
359 || (!optimize
360 && !node->local.disregard_inline_limits
b633db7b 361 && !DECL_DECLARED_INLINE_P (decl)
c5d01958
EB
362 && !(DECL_CONTEXT (decl)
363 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
b20996ff
JH
364 && !flag_whole_program
365 && !flag_lto
366 && !flag_whopr)
ce91e74c 367 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
8dafba3c
RH
368 return true;
369
370 /* Constructors and destructors are reachable from the runtime by
371 some mechanism. */
372 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
373 return true;
374
8dafba3c
RH
375 return false;
376}
377
d60ab196 378/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
f45e0ad1
JH
379 functions into callgraph in a way so they look like ordinary reachable
380 functions inserted into callgraph already at construction time. */
381
382bool
383cgraph_process_new_functions (void)
384{
385 bool output = false;
386 tree fndecl;
387 struct cgraph_node *node;
388
2942c502 389 varpool_analyze_pending_decls ();
f45e0ad1
JH
390 /* Note that this queue may grow as its being processed, as the new
391 functions may generate new ones. */
392 while (cgraph_new_nodes)
393 {
394 node = cgraph_new_nodes;
395 fndecl = node->decl;
396 cgraph_new_nodes = cgraph_new_nodes->next_needed;
397 switch (cgraph_state)
398 {
399 case CGRAPH_STATE_CONSTRUCTION:
400 /* At construction time we just need to finalize function and move
401 it into reachable functions list. */
402
403 node->next_needed = NULL;
404 cgraph_finalize_function (fndecl, false);
405 cgraph_mark_reachable_node (node);
406 output = true;
407 break;
408
409 case CGRAPH_STATE_IPA:
7a388ee4 410 case CGRAPH_STATE_IPA_SSA:
f45e0ad1
JH
411 /* When IPA optimization already started, do all essential
412 transformations that has been already performed on the whole
413 cgraph but not on this function. */
414
726a989a 415 gimple_register_cfg_hooks ();
f45e0ad1
JH
416 if (!node->analyzed)
417 cgraph_analyze_function (node);
418 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
419 current_function_decl = fndecl;
1920df6c 420 compute_inline_parameters (node);
7a388ee4
JH
421 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
422 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
423 /* When not optimizing, be sure we run early local passes anyway
424 to expand OMP. */
425 || !optimize)
8ddbbcae 426 execute_pass_list (pass_early_local_passes.pass.sub);
f45e0ad1
JH
427 free_dominance_info (CDI_POST_DOMINATORS);
428 free_dominance_info (CDI_DOMINATORS);
429 pop_cfun ();
430 current_function_decl = NULL;
431 break;
432
433 case CGRAPH_STATE_EXPANSION:
434 /* Functions created during expansion shall be compiled
435 directly. */
257eb6e3 436 node->process = 0;
f45e0ad1
JH
437 cgraph_expand_function (node);
438 break;
439
440 default:
441 gcc_unreachable ();
442 break;
443 }
129a37fc 444 cgraph_call_function_insertion_hooks (node);
2942c502 445 varpool_analyze_pending_decls ();
f45e0ad1
JH
446 }
447 return output;
448}
449
d71cc23f
JH
450/* As an GCC extension we allow redefinition of the function. The
451 semantics when both copies of bodies differ is not well defined.
452 We replace the old body with new body so in unit at a time mode
453 we always use new body, while in normal mode we may end up with
454 old body inlined into some functions and new body expanded and
455 inlined in others.
456
457 ??? It may make more sense to use one body for inlining and other
458 body for expanding the function but this is difficult to do. */
459
460static void
461cgraph_reset_node (struct cgraph_node *node)
462{
257eb6e3 463 /* If node->process is set, then we have already begun whole-unit analysis.
7e8b322a
JH
464 This is *not* testing for whether we've already emitted the function.
465 That case can be sort-of legitimately seen with real function redefinition
466 errors. I would argue that the front end should never present us with
467 such a case, but don't enforce that for now. */
257eb6e3 468 gcc_assert (!node->process);
d71cc23f
JH
469
470 /* Reset our data structures so we can analyze the function again. */
471 memset (&node->local, 0, sizeof (node->local));
472 memset (&node->global, 0, sizeof (node->global));
473 memset (&node->rtl, 0, sizeof (node->rtl));
474 node->analyzed = false;
475 node->local.redefined_extern_inline = true;
476 node->local.finalized = false;
477
d71cc23f
JH
478 cgraph_node_remove_callees (node);
479
480 /* We may need to re-queue the node for assembling in case
b86b3ea3
RG
481 we already proceeded it and ignored as not needed or got
482 a re-declaration in IMA mode. */
483 if (node->reachable)
d71cc23f
JH
484 {
485 struct cgraph_node *n;
486
487 for (n = cgraph_nodes_queue; n; n = n->next_needed)
488 if (n == node)
489 break;
490 if (!n)
491 node->reachable = 0;
492 }
493}
d853a20e 494
953ff289
DN
495static void
496cgraph_lower_function (struct cgraph_node *node)
497{
498 if (node->lowered)
499 return;
a406865a
RG
500
501 if (node->nested)
502 lower_nested_functions (node->decl);
503 gcc_assert (!node->nested);
504
953ff289
DN
505 tree_lowering_passes (node->decl);
506 node->lowered = true;
507}
508
6b00c969
RH
509/* DECL has been parsed. Take it, queue it, compile it at the whim of the
510 logic in effect. If NESTED is true, then our caller cannot stand to have
511 the garbage collector run at the moment. We would need to either create
512 a new GC context, or just not compile right now. */
1c4a429a
JH
513
514void
6b00c969 515cgraph_finalize_function (tree decl, bool nested)
1c4a429a
JH
516{
517 struct cgraph_node *node = cgraph_node (decl);
518
d853a20e 519 if (node->local.finalized)
d71cc23f 520 cgraph_reset_node (node);
6b00c969 521
6bad2617 522 node->pid = cgraph_max_pid ++;
d853a20e 523 notice_global_symbol (decl);
f6981e16 524 node->local.finalized = true;
e21aff8a 525 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
d88e5c37 526 node->finalized_by_frontend = true;
7be82279 527 record_cdtor_fn (node->decl);
1c4a429a 528
d7f09764 529 if (cgraph_decide_is_function_needed (node, decl))
8dafba3c
RH
530 cgraph_mark_needed_node (node);
531
ff5c4582 532 /* Since we reclaim unreachable nodes at the end of every language
e7d6beb0
JH
533 level unit, we need to be conservative about possible entry points
534 there. */
ce91e74c 535 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
e7d6beb0
JH
536 cgraph_mark_reachable_node (node);
537
8dafba3c 538 /* If we've not yet emitted decl, tell the debug info about it. */
6b00c969 539 if (!TREE_ASM_WRITTEN (decl))
8dafba3c 540 (*debug_hooks->deferred_inline_function) (decl);
d173e685 541
902edd36
JH
542 /* Possibly warn about unused parameters. */
543 if (warn_unused_parameter)
544 do_warn_unused_parameter (decl);
7e8b322a
JH
545
546 if (!nested)
547 ggc_collect ();
1c4a429a
JH
548}
549
f0c882ab
JH
550/* C99 extern inline keywords allow changing of declaration after function
551 has been finalized. We need to re-decide if we want to mark the function as
552 needed then. */
553
554void
555cgraph_mark_if_needed (tree decl)
556{
557 struct cgraph_node *node = cgraph_node (decl);
d7f09764 558 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
f0c882ab
JH
559 cgraph_mark_needed_node (node);
560}
561
753d358d 562#ifdef ENABLE_CHECKING
9187e02d
JH
563/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
564static bool
565clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
566{
567 while (node != node2 && node2)
568 node2 = node2->clone_of;
569 return node2 != NULL;
570}
753d358d 571#endif
9187e02d 572
18c6ada9 573/* Verify cgraph nodes of given cgraph node. */
24e47c76 574DEBUG_FUNCTION void
18c6ada9
JH
575verify_cgraph_node (struct cgraph_node *node)
576{
577 struct cgraph_edge *e;
e21aff8a 578 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
2bafad93 579 struct function *saved_cfun = cfun;
e21aff8a 580 basic_block this_block;
726a989a 581 gimple_stmt_iterator gsi;
e0704a46 582 bool error_found = false;
18c6ada9 583
1da2ed5f 584 if (seen_error ())
5771bd91
RG
585 return;
586
18c6ada9 587 timevar_push (TV_CGRAPH_VERIFY);
2bafad93
JJ
588 /* debug_generic_stmt needs correct cfun */
589 set_cfun (this_cfun);
18c6ada9
JH
590 for (e = node->callees; e; e = e->next_callee)
591 if (e->aux)
592 {
ab532386 593 error ("aux field set for edge %s->%s",
4f1e4960
JM
594 identifier_to_locale (cgraph_node_name (e->caller)),
595 identifier_to_locale (cgraph_node_name (e->callee)));
18c6ada9
JH
596 error_found = true;
597 }
06191a23
JH
598 if (node->count < 0)
599 {
600 error ("Execution count is negative");
601 error_found = true;
602 }
b20996ff
JH
603 if (node->global.inlined_to && node->local.externally_visible)
604 {
605 error ("Externally visible inline clone");
606 error_found = true;
607 }
608 if (node->global.inlined_to && node->address_taken)
609 {
610 error ("Inline clone with address taken");
611 error_found = true;
612 }
613 if (node->global.inlined_to && node->needed)
614 {
615 error ("Inline clone is needed");
616 error_found = true;
617 }
e33c6cd6
MJ
618 for (e = node->indirect_calls; e; e = e->next_callee)
619 {
620 if (e->aux)
621 {
622 error ("aux field set for indirect edge from %s",
623 identifier_to_locale (cgraph_node_name (e->caller)));
624 error_found = true;
625 }
626 if (!e->indirect_unknown_callee
627 || !e->indirect_info)
628 {
629 error ("An indirect edge from %s is not marked as indirect or has "
630 "associated indirect_info, the corresponding statement is: ",
631 identifier_to_locale (cgraph_node_name (e->caller)));
632 debug_gimple_stmt (e->call_stmt);
633 error_found = true;
634 }
635 }
18c6ada9
JH
636 for (e = node->callers; e; e = e->next_caller)
637 {
06191a23
JH
638 if (e->count < 0)
639 {
640 error ("caller edge count is negative");
641 error_found = true;
642 }
45a80bb9
JH
643 if (e->frequency < 0)
644 {
645 error ("caller edge frequency is negative");
646 error_found = true;
647 }
648 if (e->frequency > CGRAPH_FREQ_MAX)
649 {
650 error ("caller edge frequency is too large");
651 error_found = true;
652 }
f8754107
JH
653 if (gimple_has_body_p (e->caller->decl)
654 && !e->caller->global.inlined_to
655 && (e->frequency
656 != compute_call_stmt_bb_frequency (e->caller->decl,
657 gimple_bb (e->call_stmt))))
658 {
659 error ("caller edge frequency %i does not match BB freqency %i",
660 e->frequency,
661 compute_call_stmt_bb_frequency (e->caller->decl,
662 gimple_bb (e->call_stmt)));
663 error_found = true;
664 }
18c6ada9
JH
665 if (!e->inline_failed)
666 {
667 if (node->global.inlined_to
668 != (e->caller->global.inlined_to
669 ? e->caller->global.inlined_to : e->caller))
670 {
ab532386 671 error ("inlined_to pointer is wrong");
18c6ada9
JH
672 error_found = true;
673 }
674 if (node->callers->next_caller)
675 {
ab532386 676 error ("multiple inline callers");
18c6ada9
JH
677 error_found = true;
678 }
679 }
680 else
681 if (node->global.inlined_to)
682 {
ab532386 683 error ("inlined_to pointer set for noninline callers");
18c6ada9
JH
684 error_found = true;
685 }
686 }
687 if (!node->callers && node->global.inlined_to)
688 {
95a52ebb 689 error ("inlined_to pointer is set but no predecessors found");
18c6ada9
JH
690 error_found = true;
691 }
692 if (node->global.inlined_to == node)
693 {
ab532386 694 error ("inlined_to pointer refers to itself");
18c6ada9
JH
695 error_found = true;
696 }
697
69fb1284 698 if (!cgraph_node (node->decl))
18c6ada9 699 {
69fb1284 700 error ("node not found in cgraph_hash");
18c6ada9
JH
701 error_found = true;
702 }
c22cacf3 703
9187e02d
JH
704 if (node->clone_of)
705 {
706 struct cgraph_node *n;
707 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
708 if (n == node)
709 break;
710 if (!n)
711 {
712 error ("node has wrong clone_of");
713 error_found = true;
714 }
715 }
716 if (node->clones)
717 {
718 struct cgraph_node *n;
719 for (n = node->clones; n; n = n->next_sibling_clone)
720 if (n->clone_of != node)
721 break;
722 if (n)
723 {
724 error ("node has wrong clone list");
725 error_found = true;
726 }
727 }
728 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
729 {
730 error ("node is in clone list but it is not clone");
731 error_found = true;
732 }
733 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
734 {
735 error ("node has wrong prev_clone pointer");
736 error_found = true;
737 }
738 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
739 {
740 error ("double linked list of clones corrupted");
741 error_found = true;
742 }
78eaf7bf
MJ
743 if (node->same_comdat_group)
744 {
745 struct cgraph_node *n = node->same_comdat_group;
746
747 if (!DECL_ONE_ONLY (node->decl))
748 {
749 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
750 error_found = true;
751 }
752 if (n == node)
753 {
754 error ("node is alone in a comdat group");
755 error_found = true;
756 }
757 do
758 {
759 if (!n->same_comdat_group)
760 {
761 error ("same_comdat_group is not a circular list");
762 error_found = true;
763 break;
764 }
765 n = n->same_comdat_group;
766 }
767 while (n != node);
768 }
9187e02d
JH
769
770 if (node->analyzed && gimple_has_body_p (node->decl)
726a989a 771 && !TREE_ASM_WRITTEN (node->decl)
d7f09764
DN
772 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
773 && !flag_wpa)
18c6ada9 774 {
e21aff8a
SB
775 if (this_cfun->cfg)
776 {
777 /* The nodes we're interested in are never shared, so walk
778 the tree ignoring duplicates. */
2dee695b 779 struct pointer_set_t *visited_nodes = pointer_set_create ();
e21aff8a
SB
780 /* Reach the trees by walking over the CFG, and note the
781 enclosing basic-blocks in the call edges. */
782 FOR_EACH_BB_FN (this_block, this_cfun)
726a989a
RB
783 for (gsi = gsi_start_bb (this_block);
784 !gsi_end_p (gsi);
785 gsi_next (&gsi))
e0704a46 786 {
726a989a 787 gimple stmt = gsi_stmt (gsi);
e33c6cd6 788 if (is_gimple_call (stmt))
e0704a46
JH
789 {
790 struct cgraph_edge *e = cgraph_edge (node, stmt);
e33c6cd6 791 tree decl = gimple_call_fndecl (stmt);
e0704a46
JH
792 if (e)
793 {
794 if (e->aux)
795 {
ab532386 796 error ("shared call_stmt:");
726a989a 797 debug_gimple_stmt (stmt);
e0704a46
JH
798 error_found = true;
799 }
e33c6cd6 800 if (!e->indirect_unknown_callee)
6744a6ab 801 {
e33c6cd6
MJ
802 if (e->callee->same_body_alias)
803 {
804 error ("edge points to same body alias:");
805 debug_tree (e->callee->decl);
806 error_found = true;
807 }
e466e2ce
JH
808#ifdef ENABLE_CHECKING
809 else if (!e->callee->global.inlined_to
e33c6cd6 810 && decl
e466e2ce
JH
811 && cgraph_get_node (decl)
812 && (e->callee->former_clone_of
813 != cgraph_get_node (decl)->decl)
e33c6cd6
MJ
814 && !clone_of_p (cgraph_node (decl),
815 e->callee))
816 {
817 error ("edge points to wrong declaration:");
818 debug_tree (e->callee->decl);
819 fprintf (stderr," Instead of:");
820 debug_tree (decl);
821 error_found = true;
822 }
e466e2ce 823#endif
6744a6ab 824 }
e33c6cd6 825 else if (decl)
e0704a46 826 {
e33c6cd6
MJ
827 error ("an indirect edge with unknown callee "
828 "corresponding to a call_stmt with "
829 "a known declaration:");
47cb0d7d 830 error_found = true;
e33c6cd6 831 debug_gimple_stmt (e->call_stmt);
e0704a46
JH
832 }
833 e->aux = (void *)1;
834 }
e33c6cd6 835 else if (decl)
e0704a46 836 {
ab532386 837 error ("missing callgraph edge for call stmt:");
726a989a 838 debug_gimple_stmt (stmt);
e0704a46
JH
839 error_found = true;
840 }
841 }
842 }
e21aff8a 843 pointer_set_destroy (visited_nodes);
e21aff8a
SB
844 }
845 else
846 /* No CFG available?! */
847 gcc_unreachable ();
848
18c6ada9
JH
849 for (e = node->callees; e; e = e->next_callee)
850 {
e33c6cd6 851 if (!e->aux)
18c6ada9 852 {
ab532386 853 error ("edge %s->%s has no corresponding call_stmt",
4f1e4960
JM
854 identifier_to_locale (cgraph_node_name (e->caller)),
855 identifier_to_locale (cgraph_node_name (e->callee)));
726a989a 856 debug_gimple_stmt (e->call_stmt);
18c6ada9
JH
857 error_found = true;
858 }
859 e->aux = 0;
860 }
e33c6cd6
MJ
861 for (e = node->indirect_calls; e; e = e->next_callee)
862 {
863 if (!e->aux)
864 {
865 error ("an indirect edge from %s has no corresponding call_stmt",
866 identifier_to_locale (cgraph_node_name (e->caller)));
867 debug_gimple_stmt (e->call_stmt);
868 error_found = true;
869 }
870 e->aux = 0;
871 }
18c6ada9
JH
872 }
873 if (error_found)
874 {
875 dump_cgraph_node (stderr, node);
ab532386 876 internal_error ("verify_cgraph_node failed");
18c6ada9 877 }
2bafad93 878 set_cfun (saved_cfun);
18c6ada9
JH
879 timevar_pop (TV_CGRAPH_VERIFY);
880}
881
882/* Verify whole cgraph structure. */
24e47c76 883DEBUG_FUNCTION void
18c6ada9
JH
884verify_cgraph (void)
885{
886 struct cgraph_node *node;
887
1da2ed5f 888 if (seen_error ())
89480522
JH
889 return;
890
18c6ada9
JH
891 for (node = cgraph_nodes; node; node = node->next)
892 verify_cgraph_node (node);
893}
894
474eccc6
ILT
895/* Output all asm statements we have stored up to be output. */
896
897static void
898cgraph_output_pending_asms (void)
899{
900 struct cgraph_asm_node *can;
901
1da2ed5f 902 if (seen_error ())
474eccc6
ILT
903 return;
904
905 for (can = cgraph_asm_nodes; can; can = can->next)
906 assemble_asm (can->asm_str);
907 cgraph_asm_nodes = NULL;
908}
909
e767b5be 910/* Analyze the function scheduled to be output. */
a406865a 911static void
e767b5be
JH
912cgraph_analyze_function (struct cgraph_node *node)
913{
a406865a 914 tree save = current_function_decl;
e767b5be
JH
915 tree decl = node->decl;
916
25c84396 917 current_function_decl = decl;
e21aff8a 918 push_cfun (DECL_STRUCT_FUNCTION (decl));
a406865a 919
0e0a1359
MJ
920 assign_assembler_name_if_neeeded (node->decl);
921
a406865a
RG
922 /* Make sure to gimplify bodies only once. During analyzing a
923 function we lower it, which will require gimplified nested
924 functions, so we can end up here with an already gimplified
925 body. */
926 if (!gimple_body (decl))
927 gimplify_function_tree (decl);
928 dump_function (TDI_generic, decl);
929
e21aff8a 930 cgraph_lower_function (node);
6a84c098 931 node->analyzed = true;
e767b5be 932
e21aff8a 933 pop_cfun ();
a406865a 934 current_function_decl = save;
e767b5be
JH
935}
936
386b46cf
JH
937/* Look for externally_visible and used attributes and mark cgraph nodes
938 accordingly.
939
940 We cannot mark the nodes at the point the attributes are processed (in
941 handle_*_attribute) because the copy of the declarations available at that
942 point may not be canonical. For example, in:
943
944 void f();
945 void f() __attribute__((used));
946
947 the declaration we see in handle_used_attribute will be the second
948 declaration -- but the front end will subsequently merge that declaration
949 with the original declaration and discard the second declaration.
950
951 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
952
953 void f() {}
954 void f() __attribute__((externally_visible));
955
956 is valid.
957
958 So, we walk the nodes at the end of the translation unit, applying the
959 attributes at that point. */
960
961static void
962process_function_and_variable_attributes (struct cgraph_node *first,
8a4a83ed 963 struct varpool_node *first_var)
386b46cf
JH
964{
965 struct cgraph_node *node;
8a4a83ed 966 struct varpool_node *vnode;
386b46cf
JH
967
968 for (node = cgraph_nodes; node != first; node = node->next)
969 {
970 tree decl = node->decl;
b42186f1 971 if (DECL_PRESERVE_P (decl))
152464d2 972 cgraph_mark_needed_node (node);
386b46cf
JH
973 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
974 {
343d4b27 975 if (! TREE_PUBLIC (node->decl))
c5d75364
MLI
976 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
977 "%<externally_visible%>"
978 " attribute have effect only on public objects");
b20996ff
JH
979 else if (node->local.finalized)
980 cgraph_mark_needed_node (node);
386b46cf
JH
981 }
982 }
8a4a83ed 983 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
386b46cf
JH
984 {
985 tree decl = vnode->decl;
b42186f1 986 if (DECL_PRESERVE_P (decl))
386b46cf 987 {
a8289259 988 vnode->force_output = true;
386b46cf 989 if (vnode->finalized)
8a4a83ed 990 varpool_mark_needed_node (vnode);
386b46cf
JH
991 }
992 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
993 {
343d4b27 994 if (! TREE_PUBLIC (vnode->decl))
c5d75364
MLI
995 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
996 "%<externally_visible%>"
997 " attribute have effect only on public objects");
b20996ff
JH
998 else if (vnode->finalized)
999 varpool_mark_needed_node (vnode);
386b46cf
JH
1000 }
1001 }
1002}
1003
151e6f24
JH
1004/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1005 each reachable functions) and build cgraph.
1006 The function can be called multiple times after inserting new nodes
88512ba0 1007 into beginning of queue. Just the new part of queue is re-scanned then. */
1c4a429a 1008
151e6f24
JH
1009static void
1010cgraph_analyze_functions (void)
1c4a429a 1011{
cd9c7bd2 1012 /* Keep track of already processed nodes when called multiple times for
aabcd309 1013 intermodule optimization. */
cd9c7bd2 1014 static struct cgraph_node *first_analyzed;
61e00a5e 1015 struct cgraph_node *first_processed = first_analyzed;
8a4a83ed 1016 static struct varpool_node *first_analyzed_var;
151e6f24 1017 struct cgraph_node *node, *next;
1c4a429a 1018
61e00a5e
JH
1019 process_function_and_variable_attributes (first_processed,
1020 first_analyzed_var);
1021 first_processed = cgraph_nodes;
8a4a83ed
JH
1022 first_analyzed_var = varpool_nodes;
1023 varpool_analyze_pending_decls ();
a194aa56 1024 if (cgraph_dump_file)
1c4a429a 1025 {
7d82fe7c 1026 fprintf (cgraph_dump_file, "Initial entry points:");
cd9c7bd2 1027 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
39ecc018 1028 if (node->needed)
a194aa56
JH
1029 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1030 fprintf (cgraph_dump_file, "\n");
1c4a429a 1031 }
151e6f24 1032 cgraph_process_new_functions ();
1c4a429a 1033
7660e67e
SB
1034 /* Propagate reachability flag and lower representation of all reachable
1035 functions. In the future, lowering will introduce new functions and
1036 new entry points on the way (by template instantiation and virtual
1037 method table generation for instance). */
1668aabc 1038 while (cgraph_nodes_queue)
1c4a429a 1039 {
e767b5be 1040 struct cgraph_edge *edge;
1668aabc
JH
1041 tree decl = cgraph_nodes_queue->decl;
1042
1043 node = cgraph_nodes_queue;
8bd87c4e 1044 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
18c6ada9 1045 node->next_needed = NULL;
1c4a429a 1046
cd4dea62 1047 /* ??? It is possible to create extern inline function and later using
9d203871 1048 weak alias attribute to kill its body. See
cd4dea62 1049 gcc.c-torture/compile/20011119-1.c */
726a989a 1050 if (!DECL_STRUCT_FUNCTION (decl))
d71cc23f
JH
1051 {
1052 cgraph_reset_node (node);
1053 continue;
1054 }
cd4dea62 1055
d7f09764
DN
1056 if (!node->analyzed)
1057 cgraph_analyze_function (node);
8dafba3c 1058
1c4a429a 1059 for (edge = node->callees; edge; edge = edge->next_callee)
e767b5be 1060 if (!edge->callee->reachable)
8dafba3c
RH
1061 cgraph_mark_reachable_node (edge->callee);
1062
b66887e4
JJ
1063 if (node->same_comdat_group)
1064 {
1065 for (next = node->same_comdat_group;
1066 next != node;
1067 next = next->same_comdat_group)
1068 cgraph_mark_reachable_node (next);
1069 }
1070
6b20f353
DS
1071 /* If decl is a clone of an abstract function, mark that abstract
1072 function so that we don't release its body. The DECL_INITIAL() of that
1073 abstract function declaration will be later needed to output debug info. */
1074 if (DECL_ABSTRACT_ORIGIN (decl))
1075 {
1076 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
1077 origin_node->abstract_and_needed = true;
1078 }
1079
61e00a5e
JH
1080 /* We finalize local static variables during constructing callgraph
1081 edges. Process their attributes too. */
1082 process_function_and_variable_attributes (first_processed,
1083 first_analyzed_var);
1084 first_processed = cgraph_nodes;
8a4a83ed
JH
1085 first_analyzed_var = varpool_nodes;
1086 varpool_analyze_pending_decls ();
151e6f24 1087 cgraph_process_new_functions ();
1c4a429a 1088 }
8dafba3c 1089
564738df 1090 /* Collect entry points to the unit. */
a194aa56 1091 if (cgraph_dump_file)
1668aabc 1092 {
7d82fe7c 1093 fprintf (cgraph_dump_file, "Unit entry points:");
cd9c7bd2 1094 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
39ecc018 1095 if (node->needed)
a194aa56 1096 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
7d82fe7c 1097 fprintf (cgraph_dump_file, "\n\nInitial ");
e767b5be 1098 dump_cgraph (cgraph_dump_file);
1668aabc 1099 }
7660e67e 1100
a194aa56
JH
1101 if (cgraph_dump_file)
1102 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1c4a429a 1103
96fc428c 1104 for (node = cgraph_nodes; node != first_analyzed; node = next)
1c4a429a
JH
1105 {
1106 tree decl = node->decl;
96fc428c 1107 next = node->next;
1c4a429a 1108
39ecc018 1109 if (node->local.finalized && !gimple_has_body_p (decl))
c22cacf3 1110 cgraph_reset_node (node);
d71cc23f 1111
39ecc018 1112 if (!node->reachable && gimple_has_body_p (decl))
1c4a429a 1113 {
a194aa56
JH
1114 if (cgraph_dump_file)
1115 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
18c6ada9 1116 cgraph_remove_node (node);
d71cc23f 1117 continue;
1c4a429a 1118 }
9b0436b7
JH
1119 else
1120 node->next_needed = NULL;
39ecc018 1121 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
d71cc23f 1122 gcc_assert (node->analyzed == node->local.finalized);
1c4a429a 1123 }
a194aa56 1124 if (cgraph_dump_file)
7d82fe7c
KC
1125 {
1126 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1127 dump_cgraph (cgraph_dump_file);
1128 }
cd9c7bd2 1129 first_analyzed = cgraph_nodes;
1c4a429a 1130 ggc_collect ();
151e6f24
JH
1131}
1132
5f1a9ebb 1133
151e6f24
JH
1134/* Analyze the whole compilation unit once it is parsed completely. */
1135
1136void
1137cgraph_finalize_compilation_unit (void)
1138{
90097c67
RG
1139 timevar_push (TV_CGRAPH);
1140
a406865a
RG
1141 /* Do not skip analyzing the functions if there were errors, we
1142 miss diagnostics for following functions otherwise. */
151e6f24 1143
5f1a9ebb 1144 /* Emit size functions we didn't inline. */
f82a627c 1145 finalize_size_functions ();
5f1a9ebb 1146
5f1a9ebb
RG
1147 /* Call functions declared with the "constructor" or "destructor"
1148 attribute. */
1149 cgraph_build_cdtor_fns ();
151e6f24 1150
90097c67
RG
1151 /* Mark alias targets necessary and emit diagnostics. */
1152 finish_aliases_1 ();
1153
151e6f24
JH
1154 if (!quiet_flag)
1155 {
1156 fprintf (stderr, "\nAnalyzing compilation unit\n");
1157 fflush (stderr);
1158 }
1159
90097c67
RG
1160 /* Gimplify and lower all functions, compute reachability and
1161 remove unreachable nodes. */
1162 cgraph_analyze_functions ();
1163
5f1a9ebb
RG
1164 /* Mark alias targets necessary and emit diagnostics. */
1165 finish_aliases_1 ();
1166
90097c67 1167 /* Gimplify and lower thunks. */
151e6f24 1168 cgraph_analyze_functions ();
a406865a 1169
90097c67 1170 /* Finally drive the pass manager. */
a406865a 1171 cgraph_optimize ();
90097c67
RG
1172
1173 timevar_pop (TV_CGRAPH);
1c4a429a 1174}
3baf459d
DN
1175
1176
1c4a429a
JH
1177/* Figure out what functions we want to assemble. */
1178
1179static void
db0e878d 1180cgraph_mark_functions_to_output (void)
1c4a429a
JH
1181{
1182 struct cgraph_node *node;
b66887e4
JJ
1183#ifdef ENABLE_CHECKING
1184 bool check_same_comdat_groups = false;
1185
1186 for (node = cgraph_nodes; node; node = node->next)
1187 gcc_assert (!node->process);
1188#endif
1c4a429a 1189
1c4a429a
JH
1190 for (node = cgraph_nodes; node; node = node->next)
1191 {
1192 tree decl = node->decl;
b58b1157 1193 struct cgraph_edge *e;
c22cacf3 1194
b66887e4
JJ
1195 gcc_assert (!node->process || node->same_comdat_group);
1196 if (node->process)
1197 continue;
b58b1157
JH
1198
1199 for (e = node->callers; e; e = e->next_caller)
dc0bfe6a 1200 if (e->inline_failed)
b58b1157 1201 break;
1c4a429a 1202
7660e67e
SB
1203 /* We need to output all local functions that are used and not
1204 always inlined, as well as those that are reachable from
1205 outside the current compilation unit. */
39ecc018 1206 if (node->analyzed
18c6ada9 1207 && !node->global.inlined_to
a837268b 1208 && (node->needed || node->reachable_from_other_partition
bd3cdcc0 1209 || node->address_taken
b58b1157 1210 || (e && node->reachable))
6de9cd9a 1211 && !TREE_ASM_WRITTEN (decl)
1c4a429a 1212 && !DECL_EXTERNAL (decl))
b66887e4
JJ
1213 {
1214 node->process = 1;
1215 if (node->same_comdat_group)
1216 {
1217 struct cgraph_node *next;
1218 for (next = node->same_comdat_group;
1219 next != node;
1220 next = next->same_comdat_group)
1221 next->process = 1;
1222 }
1223 }
1224 else if (node->same_comdat_group)
1225 {
1226#ifdef ENABLE_CHECKING
1227 check_same_comdat_groups = true;
1228#endif
1229 }
341c100f 1230 else
1a2caa7a
NS
1231 {
1232 /* We should've reclaimed all functions that are not needed. */
1233#ifdef ENABLE_CHECKING
726a989a 1234 if (!node->global.inlined_to
39ecc018 1235 && gimple_has_body_p (decl)
a837268b
JH
1236 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1237 are inside partition, we can end up not removing the body since we no longer
1238 have analyzed node pointing to it. */
1239 && !node->in_other_partition
1a2caa7a
NS
1240 && !DECL_EXTERNAL (decl))
1241 {
1242 dump_cgraph_node (stderr, node);
1243 internal_error ("failed to reclaim unneeded function");
1244 }
1245#endif
726a989a 1246 gcc_assert (node->global.inlined_to
39ecc018 1247 || !gimple_has_body_p (decl)
a837268b 1248 || node->in_other_partition
1a2caa7a
NS
1249 || DECL_EXTERNAL (decl));
1250
1251 }
c22cacf3 1252
18d13f34 1253 }
b66887e4
JJ
1254#ifdef ENABLE_CHECKING
1255 if (check_same_comdat_groups)
1256 for (node = cgraph_nodes; node; node = node->next)
1257 if (node->same_comdat_group && !node->process)
1258 {
1259 tree decl = node->decl;
1260 if (!node->global.inlined_to
1261 && gimple_has_body_p (decl)
a837268b
JH
1262 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1263 are inside partition, we can end up not removing the body since we no longer
1264 have analyzed node pointing to it. */
1265 && !node->in_other_partition
b66887e4
JJ
1266 && !DECL_EXTERNAL (decl))
1267 {
1268 dump_cgraph_node (stderr, node);
1269 internal_error ("failed to reclaim unneeded function");
1270 }
1271 }
1272#endif
18d13f34
JH
1273}
1274
6744a6ab
JH
1275/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1276 in lowered gimple form.
1277
1278 Set current_function_decl and cfun to newly constructed empty function body.
1279 return basic block in the function body. */
1280
1281static basic_block
1282init_lowered_empty_function (tree decl)
1283{
1284 basic_block bb;
1285
1286 current_function_decl = decl;
1287 allocate_struct_function (decl, false);
1288 gimple_register_cfg_hooks ();
1289 init_empty_tree_cfg ();
1290 init_tree_ssa (cfun);
1291 init_ssa_operands ();
1292 cfun->gimple_df->in_ssa_p = true;
1293 DECL_INITIAL (decl) = make_node (BLOCK);
1294
1295 DECL_SAVED_TREE (decl) = error_mark_node;
1296 cfun->curr_properties |=
1297 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1298 PROP_ssa);
1299
1300 /* Create BB for body of the function and connect it properly. */
1301 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1302 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1303 make_edge (bb, EXIT_BLOCK_PTR, 0);
1304
1305 return bb;
1306}
1307
1308/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1309 offset indicated by VIRTUAL_OFFSET, if that is
1310 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1311 zero for a result adjusting thunk. */
1312
1313static tree
1314thunk_adjust (gimple_stmt_iterator * bsi,
1315 tree ptr, bool this_adjusting,
1316 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1317{
1318 gimple stmt;
1319 tree ret;
1320
313333a6
RG
1321 if (this_adjusting
1322 && fixed_offset != 0)
6744a6ab
JH
1323 {
1324 stmt = gimple_build_assign (ptr,
1325 fold_build2_loc (input_location,
1326 POINTER_PLUS_EXPR,
1327 TREE_TYPE (ptr), ptr,
1328 size_int (fixed_offset)));
1329 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1330 }
1331
1332 /* If there's a virtual offset, look up that value in the vtable and
1333 adjust the pointer again. */
1334 if (virtual_offset)
1335 {
1336 tree vtabletmp;
1337 tree vtabletmp2;
1338 tree vtabletmp3;
1339 tree offsettmp;
1340
1341 if (!vtable_entry_type)
1342 {
1343 tree vfunc_type = make_node (FUNCTION_TYPE);
1344 TREE_TYPE (vfunc_type) = integer_type_node;
1345 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1346 layout_type (vfunc_type);
1347
1348 vtable_entry_type = build_pointer_type (vfunc_type);
1349 }
1350
1351 vtabletmp =
1352 create_tmp_var (build_pointer_type
1353 (build_pointer_type (vtable_entry_type)), "vptr");
1354
1355 /* The vptr is always at offset zero in the object. */
1356 stmt = gimple_build_assign (vtabletmp,
1357 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1358 ptr));
1359 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1360 mark_symbols_for_renaming (stmt);
1361 find_referenced_vars_in (stmt);
1362
1363 /* Form the vtable address. */
1364 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1365 "vtableaddr");
1366 stmt = gimple_build_assign (vtabletmp2,
70f34814 1367 build_simple_mem_ref (vtabletmp));
6744a6ab
JH
1368 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1369 mark_symbols_for_renaming (stmt);
1370 find_referenced_vars_in (stmt);
1371
1372 /* Find the entry with the vcall offset. */
1373 stmt = gimple_build_assign (vtabletmp2,
1374 fold_build2_loc (input_location,
1375 POINTER_PLUS_EXPR,
1376 TREE_TYPE (vtabletmp2),
1377 vtabletmp2,
1378 fold_convert (sizetype,
1379 virtual_offset)));
1380 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1381
1382 /* Get the offset itself. */
1383 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1384 "vcalloffset");
1385 stmt = gimple_build_assign (vtabletmp3,
70f34814 1386 build_simple_mem_ref (vtabletmp2));
6744a6ab
JH
1387 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1388 mark_symbols_for_renaming (stmt);
1389 find_referenced_vars_in (stmt);
1390
1391 /* Cast to sizetype. */
1392 offsettmp = create_tmp_var (sizetype, "offset");
1393 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1394 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1395 mark_symbols_for_renaming (stmt);
1396 find_referenced_vars_in (stmt);
1397
1398 /* Adjust the `this' pointer. */
1399 ptr = fold_build2_loc (input_location,
1400 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1401 offsettmp);
1402 }
1403
313333a6
RG
1404 if (!this_adjusting
1405 && fixed_offset != 0)
6744a6ab
JH
1406 /* Adjust the pointer by the constant. */
1407 {
1408 tree ptrtmp;
1409
1410 if (TREE_CODE (ptr) == VAR_DECL)
1411 ptrtmp = ptr;
1412 else
1413 {
1414 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1415 stmt = gimple_build_assign (ptrtmp, ptr);
1416 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1417 mark_symbols_for_renaming (stmt);
1418 find_referenced_vars_in (stmt);
1419 }
1420 ptr = fold_build2_loc (input_location,
1421 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1422 size_int (fixed_offset));
1423 }
1424
1425 /* Emit the statement and gimplify the adjustment expression. */
1426 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1427 stmt = gimple_build_assign (ret, ptr);
1428 mark_symbols_for_renaming (stmt);
1429 find_referenced_vars_in (stmt);
1430 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1431
1432 return ret;
1433}
1434
1435/* Produce assembler for thunk NODE. */
1436
1437static void
1438assemble_thunk (struct cgraph_node *node)
1439{
1440 bool this_adjusting = node->thunk.this_adjusting;
1441 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1442 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1443 tree virtual_offset = NULL;
1444 tree alias = node->thunk.alias;
1445 tree thunk_fndecl = node->decl;
1446 tree a = DECL_ARGUMENTS (thunk_fndecl);
1447
1448 current_function_decl = thunk_fndecl;
1449
1450 if (this_adjusting
1451 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1452 virtual_value, alias))
1453 {
1454 const char *fnname;
1455 tree fn_block;
1456
1457 DECL_RESULT (thunk_fndecl)
1458 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1459 RESULT_DECL, 0, integer_type_node);
15488554 1460 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
6744a6ab
JH
1461
1462 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1463 create one. */
1464 fn_block = make_node (BLOCK);
1465 BLOCK_VARS (fn_block) = a;
1466 DECL_INITIAL (thunk_fndecl) = fn_block;
1467 init_function_start (thunk_fndecl);
1468 cfun->is_thunk = 1;
1469 assemble_start_function (thunk_fndecl, fnname);
1470
1471 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1472 fixed_offset, virtual_value, alias);
1473
1474 assemble_end_function (thunk_fndecl, fnname);
1475 init_insn_lengths ();
1476 free_after_compilation (cfun);
1477 set_cfun (NULL);
1478 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1479 }
1480 else
1481 {
1482 tree restype;
1483 basic_block bb, then_bb, else_bb, return_bb;
1484 gimple_stmt_iterator bsi;
1485 int nargs = 0;
1486 tree arg;
1487 int i;
1488 tree resdecl;
1489 tree restmp = NULL;
1490 VEC(tree, heap) *vargs;
1491
1492 gimple call;
1493 gimple ret;
1494
1495 DECL_IGNORED_P (thunk_fndecl) = 1;
1496 bitmap_obstack_initialize (NULL);
1497
1498 if (node->thunk.virtual_offset_p)
1499 virtual_offset = size_int (virtual_value);
1500
1501 /* Build the return declaration for the function. */
1502 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1503 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1504 {
1505 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1506 DECL_ARTIFICIAL (resdecl) = 1;
1507 DECL_IGNORED_P (resdecl) = 1;
1508 DECL_RESULT (thunk_fndecl) = resdecl;
1509 }
1510 else
1511 resdecl = DECL_RESULT (thunk_fndecl);
1512
1513 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1514
1515 bsi = gsi_start_bb (bb);
1516
1517 /* Build call to the function being thunked. */
1518 if (!VOID_TYPE_P (restype))
1519 {
1520 if (!is_gimple_reg_type (restype))
1521 {
1522 restmp = resdecl;
1523 cfun->local_decls = tree_cons (NULL_TREE, restmp, cfun->local_decls);
1524 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1525 }
1526 else
1527 restmp = create_tmp_var_raw (restype, "retval");
1528 }
1529
1530 for (arg = a; arg; arg = TREE_CHAIN (arg))
1531 nargs++;
1532 vargs = VEC_alloc (tree, heap, nargs);
1533 if (this_adjusting)
1534 VEC_quick_push (tree, vargs,
1535 thunk_adjust (&bsi,
1536 a, 1, fixed_offset,
1537 virtual_offset));
1538 else
1539 VEC_quick_push (tree, vargs, a);
1540 for (i = 1, arg = TREE_CHAIN (a); i < nargs; i++, arg = TREE_CHAIN (arg))
1541 VEC_quick_push (tree, vargs, arg);
1542 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1543 VEC_free (tree, heap, vargs);
1544 gimple_call_set_cannot_inline (call, true);
1545 gimple_call_set_from_thunk (call, true);
1546 if (restmp)
1547 gimple_call_set_lhs (call, restmp);
1548 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1549 mark_symbols_for_renaming (call);
1550 find_referenced_vars_in (call);
1551 update_stmt (call);
1552
1553 if (restmp && !this_adjusting)
1554 {
1124098b 1555 tree true_label = NULL_TREE;
6744a6ab
JH
1556
1557 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1558 {
1559 gimple stmt;
1560 /* If the return type is a pointer, we need to
1561 protect against NULL. We know there will be an
1562 adjustment, because that's why we're emitting a
1563 thunk. */
1564 then_bb = create_basic_block (NULL, (void *) 0, bb);
1565 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1566 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1567 remove_edge (single_succ_edge (bb));
1568 true_label = gimple_block_label (then_bb);
6744a6ab
JH
1569 stmt = gimple_build_cond (NE_EXPR, restmp,
1570 fold_convert (TREE_TYPE (restmp),
1571 integer_zero_node),
1572 NULL_TREE, NULL_TREE);
1573 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1574 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1575 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1576 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1577 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1578 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1579 bsi = gsi_last_bb (then_bb);
1580 }
1581
1582 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1583 fixed_offset, virtual_offset);
1584 if (true_label)
1585 {
1586 gimple stmt;
1587 bsi = gsi_last_bb (else_bb);
1588 stmt = gimple_build_assign (restmp, fold_convert (TREE_TYPE (restmp),
1589 integer_zero_node));
1590 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1591 bsi = gsi_last_bb (return_bb);
1592 }
1593 }
1594 else
1595 gimple_call_set_tail (call, true);
1596
1597 /* Build return value. */
1598 ret = gimple_build_return (restmp);
1599 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1600
1601 delete_unreachable_blocks ();
1602 update_ssa (TODO_update_ssa);
1603
1604 cgraph_remove_same_body_alias (node);
1605 /* Since we want to emit the thunk, we explicitly mark its name as
1606 referenced. */
6744a6ab
JH
1607 cgraph_add_new_function (thunk_fndecl, true);
1608 bitmap_obstack_release (NULL);
1609 }
1610 current_function_decl = NULL;
1611}
1612
1c4a429a 1613/* Expand function specified by NODE. */
7660e67e 1614
1c4a429a 1615static void
db0e878d 1616cgraph_expand_function (struct cgraph_node *node)
1c4a429a
JH
1617{
1618 tree decl = node->decl;
1619
18c6ada9 1620 /* We ought to not compile any inline clones. */
341c100f 1621 gcc_assert (!node->global.inlined_to);
18c6ada9 1622
7e8b322a 1623 announce_function (decl);
257eb6e3 1624 node->process = 0;
18d13f34 1625
2dee695b 1626 gcc_assert (node->lowered);
776b966e 1627
a3546141 1628 /* Generate RTL for the body of DECL. */
e89d6010 1629 tree_rest_of_compilation (decl);
18d13f34 1630
6de9cd9a 1631 /* Make sure that BE didn't give up on compiling. */
f30cfcb1 1632 gcc_assert (TREE_ASM_WRITTEN (decl));
1c4a429a 1633 current_function_decl = NULL;
b2583345
JJ
1634 if (node->same_body)
1635 {
6744a6ab 1636 struct cgraph_node *alias, *next;
b2583345 1637 bool saved_alias = node->alias;
6744a6ab
JH
1638 for (alias = node->same_body;
1639 alias && alias->next; alias = alias->next)
1640 ;
1641 /* Walk aliases in the order they were created; it is possible that
1642 thunks reffers to the aliases made earlier. */
1643 for (; alias; alias = next)
1644 {
1645 next = alias->previous;
1646 if (!alias->thunk.thunk_p)
1647 assemble_alias (alias->decl,
1648 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1649 else
1650 assemble_thunk (alias);
1651 }
b2583345
JJ
1652 node->alias = saved_alias;
1653 }
39ecc018
JH
1654 gcc_assert (!cgraph_preserve_function_body_p (decl));
1655 cgraph_release_function_body (node);
1656 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1657 points to the dead function body. */
1658 cgraph_node_remove_callees (node);
6b02a499
JH
1659
1660 cgraph_function_flags_ready = true;
1c4a429a
JH
1661}
1662
18c6ada9 1663/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
b58b1157
JH
1664
1665bool
61a05df1 1666cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
b58b1157 1667{
18c6ada9
JH
1668 *reason = e->inline_failed;
1669 return !e->inline_failed;
b58b1157 1670}
18c6ada9 1671
6674a6ce 1672
6674a6ce 1673
db0e878d
AJ
1674/* Expand all functions that must be output.
1675
b58b1157
JH
1676 Attempt to topologically sort the nodes so function is output when
1677 all called functions are already assembled to allow data to be
a98ebe2e 1678 propagated across the callgraph. Use a stack to get smaller distance
d1a6adeb 1679 between a function and its callees (later we may choose to use a more
b58b1157
JH
1680 sophisticated algorithm for function reordering; we will likely want
1681 to use subsections to make the output functions appear in top-down
1682 order). */
1683
1684static void
a20af5b8 1685cgraph_expand_all_functions (void)
b58b1157
JH
1686{
1687 struct cgraph_node *node;
5ed6ace5 1688 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
f30cfcb1 1689 int order_pos, new_order_pos = 0;
b58b1157
JH
1690 int i;
1691
b58b1157 1692 order_pos = cgraph_postorder (order);
341c100f 1693 gcc_assert (order_pos == cgraph_n_nodes);
b58b1157 1694
1ae58c30 1695 /* Garbage collector may remove inline clones we eliminate during
18c6ada9
JH
1696 optimization. So we must be sure to not reference them. */
1697 for (i = 0; i < order_pos; i++)
257eb6e3 1698 if (order[i]->process)
18c6ada9
JH
1699 order[new_order_pos++] = order[i];
1700
1701 for (i = new_order_pos - 1; i >= 0; i--)
b58b1157
JH
1702 {
1703 node = order[i];
257eb6e3 1704 if (node->process)
b58b1157 1705 {
341c100f 1706 gcc_assert (node->reachable);
257eb6e3 1707 node->process = 0;
b58b1157
JH
1708 cgraph_expand_function (node);
1709 }
1710 }
f45e0ad1 1711 cgraph_process_new_functions ();
50674e96 1712
b58b1157 1713 free (order);
50674e96 1714
b58b1157
JH
1715}
1716
474eccc6
ILT
1717/* This is used to sort the node types by the cgraph order number. */
1718
24b97832
ILT
1719enum cgraph_order_sort_kind
1720{
1721 ORDER_UNDEFINED = 0,
1722 ORDER_FUNCTION,
1723 ORDER_VAR,
1724 ORDER_ASM
1725};
1726
474eccc6
ILT
1727struct cgraph_order_sort
1728{
24b97832 1729 enum cgraph_order_sort_kind kind;
474eccc6
ILT
1730 union
1731 {
1732 struct cgraph_node *f;
8a4a83ed 1733 struct varpool_node *v;
474eccc6
ILT
1734 struct cgraph_asm_node *a;
1735 } u;
1736};
1737
1738/* Output all functions, variables, and asm statements in the order
1739 according to their order fields, which is the order in which they
1740 appeared in the file. This implements -fno-toplevel-reorder. In
1741 this mode we may output functions and variables which don't really
1742 need to be output. */
1743
1744static void
1745cgraph_output_in_order (void)
1746{
1747 int max;
474eccc6
ILT
1748 struct cgraph_order_sort *nodes;
1749 int i;
1750 struct cgraph_node *pf;
8a4a83ed 1751 struct varpool_node *pv;
474eccc6
ILT
1752 struct cgraph_asm_node *pa;
1753
1754 max = cgraph_order;
33283dad 1755 nodes = XCNEWVEC (struct cgraph_order_sort, max);
474eccc6 1756
8a4a83ed 1757 varpool_analyze_pending_decls ();
474eccc6
ILT
1758
1759 for (pf = cgraph_nodes; pf; pf = pf->next)
1760 {
257eb6e3 1761 if (pf->process)
474eccc6
ILT
1762 {
1763 i = pf->order;
1764 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1765 nodes[i].kind = ORDER_FUNCTION;
1766 nodes[i].u.f = pf;
1767 }
1768 }
1769
8a4a83ed 1770 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
474eccc6
ILT
1771 {
1772 i = pv->order;
1773 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1774 nodes[i].kind = ORDER_VAR;
1775 nodes[i].u.v = pv;
1776 }
1777
1778 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1779 {
1780 i = pa->order;
1781 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1782 nodes[i].kind = ORDER_ASM;
1783 nodes[i].u.a = pa;
1784 }
474eccc6 1785
7386e3ee
JH
1786 /* In toplevel reorder mode we output all statics; mark them as needed. */
1787 for (i = 0; i < max; ++i)
1788 {
1789 if (nodes[i].kind == ORDER_VAR)
1790 {
1791 varpool_mark_needed_node (nodes[i].u.v);
1792 }
1793 }
1794 varpool_empty_needed_queue ();
1795
474eccc6
ILT
1796 for (i = 0; i < max; ++i)
1797 {
1798 switch (nodes[i].kind)
1799 {
1800 case ORDER_FUNCTION:
257eb6e3 1801 nodes[i].u.f->process = 0;
474eccc6
ILT
1802 cgraph_expand_function (nodes[i].u.f);
1803 break;
1804
1805 case ORDER_VAR:
8a4a83ed 1806 varpool_assemble_decl (nodes[i].u.v);
474eccc6
ILT
1807 break;
1808
1809 case ORDER_ASM:
1810 assemble_asm (nodes[i].u.a->asm_str);
1811 break;
1812
1813 case ORDER_UNDEFINED:
1814 break;
1815
1816 default:
1817 gcc_unreachable ();
1818 }
1819 }
e7b9eb2c
ILT
1820
1821 cgraph_asm_nodes = NULL;
33283dad 1822 free (nodes);
474eccc6
ILT
1823}
1824
18c6ada9
JH
1825/* Return true when function body of DECL still needs to be kept around
1826 for later re-use. */
1827bool
1828cgraph_preserve_function_body_p (tree decl)
1829{
1830 struct cgraph_node *node;
c37f4ba4
JH
1831
1832 gcc_assert (cgraph_global_info_ready);
18c6ada9 1833 /* Look if there is any clone around. */
9187e02d
JH
1834 node = cgraph_node (decl);
1835 if (node->clones)
1836 return true;
18c6ada9
JH
1837 return false;
1838}
1839
ef330312
PB
1840static void
1841ipa_passes (void)
1842{
db2960f4 1843 set_cfun (NULL);
04b201a2 1844 current_function_decl = NULL;
726a989a 1845 gimple_register_cfg_hooks ();
ef330312 1846 bitmap_obstack_initialize (NULL);
b20996ff 1847
090fa0ab
GF
1848 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1849
b20996ff
JH
1850 if (!in_lto_p)
1851 execute_ipa_pass_list (all_small_ipa_passes);
3baf459d 1852
d7f09764
DN
1853 /* If pass_all_early_optimizations was not scheduled, the state of
1854 the cgraph will not be properly updated. Update it now. */
1855 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1856 cgraph_state = CGRAPH_STATE_IPA_SSA;
3baf459d 1857
d7f09764
DN
1858 if (!in_lto_p)
1859 {
1860 /* Generate coverage variables and constructors. */
1861 coverage_finish ();
1862
1863 /* Process new functions added. */
1864 set_cfun (NULL);
1865 current_function_decl = NULL;
1866 cgraph_process_new_functions ();
d7f09764 1867
090fa0ab
GF
1868 execute_ipa_summary_passes
1869 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
fb3f88cc 1870 }
c082f9f3
SB
1871
1872 /* Some targets need to handle LTO assembler output specially. */
1873 if (flag_generate_lto)
1874 targetm.asm_out.lto_start ();
1875
d7f09764
DN
1876 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1877
1878 if (!in_lto_p)
1879 ipa_write_summaries ();
1880
c082f9f3
SB
1881 if (flag_generate_lto)
1882 targetm.asm_out.lto_end ();
1883
fb3f88cc
JH
1884 if (!flag_ltrans)
1885 execute_ipa_pass_list (all_regular_ipa_passes);
090fa0ab 1886 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
3baf459d 1887
ef330312
PB
1888 bitmap_obstack_release (NULL);
1889}
1890
4537ec0c 1891
1c4a429a
JH
1892/* Perform simple optimizations based on callgraph. */
1893
d7f09764 1894void
db0e878d 1895cgraph_optimize (void)
1c4a429a 1896{
1da2ed5f 1897 if (seen_error ())
413803d3
VR
1898 return;
1899
18c6ada9
JH
1900#ifdef ENABLE_CHECKING
1901 verify_cgraph ();
1902#endif
7be82279 1903
cd9c7bd2
JH
1904 /* Frontend may output common variables after the unit has been finalized.
1905 It is safe to deal with them here as they are always zero initialized. */
8a4a83ed 1906 varpool_analyze_pending_decls ();
857e7259 1907
a194aa56 1908 timevar_push (TV_CGRAPHOPT);
a5573239
JH
1909 if (pre_ipa_mem_report)
1910 {
1911 fprintf (stderr, "Memory consumption before IPA\n");
1912 dump_memory_report (false);
1913 }
b58b1157 1914 if (!quiet_flag)
a418679d 1915 fprintf (stderr, "Performing interprocedural optimizations\n");
f45e0ad1 1916 cgraph_state = CGRAPH_STATE_IPA;
f30cfcb1 1917
7e2fe9d8 1918 /* Don't run the IPA passes if there was any error or sorry messages. */
1da2ed5f 1919 if (!seen_error ())
7e2fe9d8
AP
1920 ipa_passes ();
1921
4537ec0c 1922 /* Do nothing else if any IPA pass found errors. */
1da2ed5f 1923 if (seen_error ())
9ba0399e
RH
1924 {
1925 timevar_pop (TV_CGRAPHOPT);
1926 return;
1927 }
4537ec0c 1928
6b02a499
JH
1929 /* This pass remove bodies of extern inline functions we never inlined.
1930 Do this later so other IPA passes see what is really going on. */
1931 cgraph_remove_unreachable_nodes (false, dump_file);
dafc5b82 1932 cgraph_global_info_ready = true;
a194aa56
JH
1933 if (cgraph_dump_file)
1934 {
7d82fe7c 1935 fprintf (cgraph_dump_file, "Optimized ");
a194aa56 1936 dump_cgraph (cgraph_dump_file);
cd9c7bd2 1937 dump_varpool (cgraph_dump_file);
a194aa56 1938 }
a5573239
JH
1939 if (post_ipa_mem_report)
1940 {
7fa982e5 1941 fprintf (stderr, "Memory consumption after IPA\n");
a5573239
JH
1942 dump_memory_report (false);
1943 }
a194aa56 1944 timevar_pop (TV_CGRAPHOPT);
1c4a429a 1945
b58b1157 1946 /* Output everything. */
3df9609a 1947 (*debug_hooks->assembly_start) ();
7d82fe7c
KC
1948 if (!quiet_flag)
1949 fprintf (stderr, "Assembling functions:\n");
18c6ada9
JH
1950#ifdef ENABLE_CHECKING
1951 verify_cgraph ();
1952#endif
474eccc6 1953
9187e02d 1954 cgraph_materialize_all_clones ();
6674a6ce 1955 cgraph_mark_functions_to_output ();
cd9c7bd2 1956
f45e0ad1 1957 cgraph_state = CGRAPH_STATE_EXPANSION;
474eccc6
ILT
1958 if (!flag_toplevel_reorder)
1959 cgraph_output_in_order ();
1960 else
1961 {
1962 cgraph_output_pending_asms ();
1963
1964 cgraph_expand_all_functions ();
8a4a83ed 1965 varpool_remove_unreferenced_decls ();
474eccc6 1966
8a4a83ed 1967 varpool_assemble_pending_decls ();
474eccc6 1968 }
f45e0ad1
JH
1969 cgraph_process_new_functions ();
1970 cgraph_state = CGRAPH_STATE_FINISHED;
cd9c7bd2 1971
a194aa56
JH
1972 if (cgraph_dump_file)
1973 {
7d82fe7c 1974 fprintf (cgraph_dump_file, "\nFinal ");
a194aa56
JH
1975 dump_cgraph (cgraph_dump_file);
1976 }
18c6ada9
JH
1977#ifdef ENABLE_CHECKING
1978 verify_cgraph ();
6de9cd9a
DN
1979 /* Double check that all inline clones are gone and that all
1980 function bodies have been released from memory. */
1da2ed5f 1981 if (!seen_error ())
6de9cd9a
DN
1982 {
1983 struct cgraph_node *node;
1984 bool error_found = false;
1985
1986 for (node = cgraph_nodes; node; node = node->next)
1987 if (node->analyzed
1988 && (node->global.inlined_to
39ecc018 1989 || gimple_has_body_p (node->decl)))
6de9cd9a
DN
1990 {
1991 error_found = true;
1992 dump_cgraph_node (stderr, node);
c22cacf3 1993 }
6de9cd9a 1994 if (error_found)
f30cfcb1 1995 internal_error ("nodes with unreleased memory found");
6de9cd9a 1996 }
18c6ada9 1997#endif
1c4a429a 1998}
4537ec0c
DN
1999
2000
873c7164
MM
2001/* Generate and emit a static constructor or destructor. WHICH must
2002 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
2003 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
fa10beec 2004 initialization priority for this constructor or destructor. */
c9b9aa64
RH
2005
2006void
35b6fdcf 2007cgraph_build_static_cdtor (char which, tree body, int priority)
c9b9aa64
RH
2008{
2009 static int counter = 0;
2010 char which_buf[16];
b785f485 2011 tree decl, name, resdecl;
c9b9aa64 2012
873c7164
MM
2013 /* The priority is encoded in the constructor or destructor name.
2014 collect2 will sort the names and arrange that they are called at
2015 program startup. */
2016 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
5880f14f 2017 name = get_file_function_name (which_buf);
c9b9aa64 2018
c2255bc4 2019 decl = build_decl (input_location, FUNCTION_DECL, name,
c9b9aa64
RH
2020 build_function_type (void_type_node, void_list_node));
2021 current_function_decl = decl;
2022
c2255bc4
AH
2023 resdecl = build_decl (input_location,
2024 RESULT_DECL, NULL_TREE, void_type_node);
b785f485 2025 DECL_ARTIFICIAL (resdecl) = 1;
b785f485 2026 DECL_RESULT (decl) = resdecl;
07485407 2027 DECL_CONTEXT (resdecl) = decl;
b785f485 2028
182e0d71 2029 allocate_struct_function (decl, false);
c9b9aa64
RH
2030
2031 TREE_STATIC (decl) = 1;
2032 TREE_USED (decl) = 1;
2033 DECL_ARTIFICIAL (decl) = 1;
c9b9aa64
RH
2034 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
2035 DECL_SAVED_TREE (decl) = body;
b932b8b1
JDA
2036 if (!targetm.have_ctors_dtors)
2037 {
2038 TREE_PUBLIC (decl) = 1;
2039 DECL_PRESERVE_P (decl) = 1;
2040 }
c9b9aa64
RH
2041 DECL_UNINLINABLE (decl) = 1;
2042
2043 DECL_INITIAL (decl) = make_node (BLOCK);
2044 TREE_USED (DECL_INITIAL (decl)) = 1;
2045
2046 DECL_SOURCE_LOCATION (decl) = input_location;
2047 cfun->function_end_locus = input_location;
2048
341c100f
NS
2049 switch (which)
2050 {
2051 case 'I':
2052 DECL_STATIC_CONSTRUCTOR (decl) = 1;
395a40e0 2053 decl_init_priority_insert (decl, priority);
341c100f
NS
2054 break;
2055 case 'D':
2056 DECL_STATIC_DESTRUCTOR (decl) = 1;
395a40e0 2057 decl_fini_priority_insert (decl, priority);
341c100f
NS
2058 break;
2059 default:
2060 gcc_unreachable ();
2061 }
c9b9aa64
RH
2062
2063 gimplify_function_tree (decl);
2064
f45e0ad1
JH
2065 cgraph_add_new_function (decl, false);
2066 cgraph_mark_needed_node (cgraph_node (decl));
cac67c08 2067 set_cfun (NULL);
c9b9aa64 2068}
9b3e897d
PB
2069
2070void
2071init_cgraph (void)
2072{
2073 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2074}
57fb5341 2075
c22cacf3 2076/* The edges representing the callers of the NEW_VERSION node were
57fb5341
RL
2077 fixed by cgraph_function_versioning (), now the call_expr in their
2078 respective tree code should be updated to call the NEW_VERSION. */
2079
2080static void
2081update_call_expr (struct cgraph_node *new_version)
2082{
2083 struct cgraph_edge *e;
2084
2085 gcc_assert (new_version);
726a989a
RB
2086
2087 /* Update the call expr on the edges to call the new version. */
57fb5341 2088 for (e = new_version->callers; e; e = e->next_caller)
c0ab1df3
AP
2089 {
2090 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2091 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
1d65f45c 2092 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
c0ab1df3 2093 }
57fb5341
RL
2094}
2095
2096
2097/* Create a new cgraph node which is the new version of
2098 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2099 edges which should be redirected to point to
2100 NEW_VERSION. ALL the callees edges of OLD_VERSION
2101 are cloned to the new version node. Return the new
91382288
JH
2102 version node.
2103
2104 If non-NULL BLOCK_TO_COPY determine what basic blocks
2105 was copied to prevent duplications of calls that are dead
2106 in the clone. */
57fb5341
RL
2107
2108static struct cgraph_node *
2109cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
b2c0ad40 2110 tree new_decl,
91382288
JH
2111 VEC(cgraph_edge_p,heap) *redirect_callers,
2112 bitmap bbs_to_copy)
2113 {
57fb5341 2114 struct cgraph_node *new_version;
ae2b0888 2115 struct cgraph_edge *e;
57fb5341
RL
2116 unsigned i;
2117
2118 gcc_assert (old_version);
c22cacf3 2119
57fb5341
RL
2120 new_version = cgraph_node (new_decl);
2121
2122 new_version->analyzed = true;
2123 new_version->local = old_version->local;
036546e5
JH
2124 new_version->local.externally_visible = false;
2125 new_version->local.local = true;
2126 new_version->local.vtable_method = false;
57fb5341 2127 new_version->global = old_version->global;
8cf9feca 2128 new_version->rtl = old_version->rtl;
57fb5341
RL
2129 new_version->reachable = true;
2130 new_version->count = old_version->count;
2131
036546e5 2132 for (e = old_version->callees; e; e=e->next_callee)
91382288
JH
2133 if (!bbs_to_copy
2134 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2135 cgraph_clone_edge (e, new_version, e->call_stmt,
2136 e->lto_stmt_uid, REG_BR_PROB_BASE,
2137 CGRAPH_FREQ_BASE,
2138 e->loop_nest, true);
036546e5 2139 for (e = old_version->indirect_calls; e; e=e->next_callee)
91382288
JH
2140 if (!bbs_to_copy
2141 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2142 cgraph_clone_edge (e, new_version, e->call_stmt,
2143 e->lto_stmt_uid, REG_BR_PROB_BASE,
2144 CGRAPH_FREQ_BASE,
2145 e->loop_nest, true);
b2c0ad40
KH
2146 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
2147 {
2148 /* Redirect calls to the old version node to point to its new
2149 version. */
2150 cgraph_redirect_edge_callee (e, new_version);
2151 }
57fb5341
RL
2152
2153 return new_version;
2154 }
2155
2156 /* Perform function versioning.
c22cacf3 2157 Function versioning includes copying of the tree and
57fb5341
RL
2158 a callgraph update (creating a new cgraph node and updating
2159 its callees and callers).
2160
2161 REDIRECT_CALLERS varray includes the edges to be redirected
2162 to the new version.
2163
2164 TREE_MAP is a mapping of tree nodes we want to replace with
2165 new ones (according to results of prior analysis).
2166 OLD_VERSION_NODE is the node that is versioned.
b8698a0f 2167 It returns the new version's cgraph node.
91382288
JH
2168 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2169 from new version.
2170 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2171 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
57fb5341
RL
2172
2173struct cgraph_node *
2174cgraph_function_versioning (struct cgraph_node *old_version_node,
b2c0ad40 2175 VEC(cgraph_edge_p,heap) *redirect_callers,
9187e02d 2176 VEC (ipa_replace_map_p,gc)* tree_map,
036546e5 2177 bitmap args_to_skip,
91382288
JH
2178 bitmap bbs_to_copy,
2179 basic_block new_entry_block,
036546e5 2180 const char *clone_name)
57fb5341
RL
2181{
2182 tree old_decl = old_version_node->decl;
2183 struct cgraph_node *new_version_node = NULL;
2184 tree new_decl;
2185
2186 if (!tree_versionable_function_p (old_decl))
2187 return NULL;
2188
2189 /* Make a new FUNCTION_DECL tree node for the
2190 new version. */
c6f7cfc1
JH
2191 if (!args_to_skip)
2192 new_decl = copy_node (old_decl);
2193 else
2194 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
57fb5341 2195
9990e02a
JH
2196 /* Generate a new name for the new version. */
2197 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2198 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2199 SET_DECL_RTL (new_decl, NULL);
2200
57fb5341
RL
2201 /* Create the new version's call-graph node.
2202 and update the edges of the new node. */
2203 new_version_node =
2204 cgraph_copy_node_for_versioning (old_version_node, new_decl,
91382288 2205 redirect_callers, bbs_to_copy);
57fb5341
RL
2206
2207 /* Copy the OLD_VERSION_NODE function tree to the new version. */
91382288
JH
2208 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2209 bbs_to_copy, new_entry_block);
57fb5341 2210
c22cacf3 2211 /* Update the new version's properties.
c0ab1df3
AP
2212 Make The new version visible only within this translation unit. Make sure
2213 that is not weak also.
c22cacf3 2214 ??? We cannot use COMDAT linkage because there is no
57fb5341 2215 ABI support for this. */
715a4e08 2216 cgraph_make_decl_local (new_version_node->decl);
e6e1c050 2217 DECL_VIRTUAL_P (new_version_node->decl) = 0;
57fb5341
RL
2218 new_version_node->local.externally_visible = 0;
2219 new_version_node->local.local = 1;
2220 new_version_node->lowered = true;
e6e1c050 2221
c0ab1df3
AP
2222 /* Update the call_expr on the edges to call the new version node. */
2223 update_call_expr (new_version_node);
b8698a0f 2224
129a37fc 2225 cgraph_call_function_insertion_hooks (new_version_node);
57fb5341
RL
2226 return new_version_node;
2227}
ea99e0be
JH
2228
2229/* Produce separate function body for inline clones so the offline copy can be
2230 modified without affecting them. */
2231struct cgraph_node *
2232save_inline_function_body (struct cgraph_node *node)
2233{
9187e02d 2234 struct cgraph_node *first_clone, *n;
ea99e0be
JH
2235
2236 gcc_assert (node == cgraph_node (node->decl));
2237
2238 cgraph_lower_function (node);
2239
9187e02d 2240 first_clone = node->clones;
ea99e0be
JH
2241
2242 first_clone->decl = copy_node (node->decl);
ea99e0be
JH
2243 cgraph_insert_node_to_hashtable (first_clone);
2244 gcc_assert (first_clone == cgraph_node (first_clone->decl));
9187e02d
JH
2245 if (first_clone->next_sibling_clone)
2246 {
2247 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2248 n->clone_of = first_clone;
2249 n->clone_of = first_clone;
2250 n->next_sibling_clone = first_clone->clones;
2251 if (first_clone->clones)
2252 first_clone->clones->prev_sibling_clone = n;
2253 first_clone->clones = first_clone->next_sibling_clone;
2254 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2255 first_clone->next_sibling_clone = NULL;
2256 gcc_assert (!first_clone->prev_sibling_clone);
2257 }
2258 first_clone->clone_of = NULL;
2259 node->clones = NULL;
2260
2261 if (first_clone->clones)
2262 for (n = first_clone->clones; n != first_clone;)
2263 {
2264 gcc_assert (n->decl == node->decl);
2265 n->decl = first_clone->decl;
2266 if (n->clones)
2267 n = n->clones;
2268 else if (n->next_sibling_clone)
2269 n = n->next_sibling_clone;
2270 else
2271 {
2272 while (n != first_clone && !n->next_sibling_clone)
2273 n = n->clone_of;
2274 if (n != first_clone)
2275 n = n->next_sibling_clone;
2276 }
2277 }
ea99e0be
JH
2278
2279 /* Copy the OLD_VERSION_NODE function tree to the new version. */
91382288
JH
2280 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
2281 NULL, NULL);
ea99e0be
JH
2282
2283 DECL_EXTERNAL (first_clone->decl) = 0;
fc26fae3 2284 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
ea99e0be
JH
2285 TREE_PUBLIC (first_clone->decl) = 0;
2286 DECL_COMDAT (first_clone->decl) = 0;
21ecdec5 2287 VEC_free (ipa_opt_pass, heap,
0e3776db
JH
2288 first_clone->ipa_transforms_to_apply);
2289 first_clone->ipa_transforms_to_apply = NULL;
ea99e0be 2290
ea99e0be
JH
2291#ifdef ENABLE_CHECKING
2292 verify_cgraph_node (first_clone);
2293#endif
2294 return first_clone;
2295}
7be82279 2296
9187e02d
JH
2297/* Given virtual clone, turn it into actual clone. */
2298static void
2299cgraph_materialize_clone (struct cgraph_node *node)
2300{
2301 bitmap_obstack_initialize (NULL);
e466e2ce
JH
2302#ifdef ENABLE_CHECKING
2303 node->former_clone_of = node->clone_of->decl;
2304 if (node->clone_of->former_clone_of)
2305 node->former_clone_of = node->clone_of->former_clone_of;
2306#endif
9187e02d
JH
2307 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2308 tree_function_versioning (node->clone_of->decl, node->decl,
2309 node->clone.tree_map, true,
91382288 2310 node->clone.args_to_skip, NULL, NULL);
08ad1d6d
JH
2311 if (cgraph_dump_file)
2312 {
2313 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2314 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2315 }
9187e02d
JH
2316
2317 /* Function is no longer clone. */
2318 if (node->next_sibling_clone)
2319 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2320 if (node->prev_sibling_clone)
2321 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2322 else
2323 node->clone_of->clones = node->next_sibling_clone;
2324 node->next_sibling_clone = NULL;
2325 node->prev_sibling_clone = NULL;
0e3776db 2326 if (!node->clone_of->analyzed && !node->clone_of->clones)
f0c418dc
JH
2327 {
2328 cgraph_release_function_body (node->clone_of);
2329 cgraph_node_remove_callees (node->clone_of);
2330 ipa_remove_all_references (&node->clone_of->ref_list);
2331 }
9187e02d
JH
2332 node->clone_of = NULL;
2333 bitmap_obstack_release (NULL);
2334}
2335
8132a837
MJ
2336/* If necessary, change the function declaration in the call statement
2337 associated with E so that it corresponds to the edge callee. */
2338
2339gimple
2340cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2341{
2342 tree decl = gimple_call_fndecl (e->call_stmt);
2343 gimple new_stmt;
437ffe7b
JH
2344#ifdef ENABLE_CHECKING
2345 struct cgraph_node *node;
2346#endif
8132a837
MJ
2347
2348 if (!decl || decl == e->callee->decl
2349 /* Don't update call from same body alias to the real function. */
2350 || cgraph_get_node (decl) == cgraph_get_node (e->callee->decl))
2351 return e->call_stmt;
2352
437ffe7b
JH
2353#ifdef ENABLE_CHECKING
2354 node = cgraph_get_node (decl);
2355 gcc_assert (!node || !node->clone.combined_args_to_skip);
2356#endif
e466e2ce 2357
8132a837
MJ
2358 if (cgraph_dump_file)
2359 {
2360 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2361 cgraph_node_name (e->caller), e->caller->uid,
2362 cgraph_node_name (e->callee), e->callee->uid);
2363 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e466e2ce 2364 if (e->callee->clone.combined_args_to_skip)
8d2adc24
EB
2365 {
2366 fprintf (cgraph_dump_file, " combined args to skip: ");
2367 dump_bitmap (cgraph_dump_file,
2368 e->callee->clone.combined_args_to_skip);
e466e2ce 2369 }
8132a837
MJ
2370 }
2371
2372 if (e->callee->clone.combined_args_to_skip)
8d2adc24
EB
2373 {
2374 gimple_stmt_iterator gsi;
2375
2376 new_stmt
2377 = gimple_call_copy_skip_args (e->call_stmt,
2378 e->callee->clone.combined_args_to_skip);
2379
2380 if (gimple_vdef (new_stmt)
2381 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2382 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2383
2384 gsi = gsi_for_stmt (e->call_stmt);
2385 gsi_replace (&gsi, new_stmt, true);
2386 }
8132a837
MJ
2387 else
2388 new_stmt = e->call_stmt;
8132a837 2389
8d2adc24 2390 gimple_call_set_fndecl (new_stmt, e->callee->decl);
0b6e2868 2391 update_stmt (new_stmt);
8132a837 2392
8132a837
MJ
2393 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2394
2395 if (cgraph_dump_file)
2396 {
2397 fprintf (cgraph_dump_file, " updated to:");
2398 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2399 }
2400 return new_stmt;
2401}
2402
9187e02d 2403/* Once all functions from compilation unit are in memory, produce all clones
8132a837
MJ
2404 and update all calls. We might also do this on demand if we don't want to
2405 bring all functions to memory prior compilation, but current WHOPR
2406 implementation does that and it is is bit easier to keep everything right in
2407 this order. */
9187e02d
JH
2408void
2409cgraph_materialize_all_clones (void)
2410{
2411 struct cgraph_node *node;
2412 bool stabilized = false;
2413
2414 if (cgraph_dump_file)
2415 fprintf (cgraph_dump_file, "Materializing clones\n");
2416#ifdef ENABLE_CHECKING
2417 verify_cgraph ();
2418#endif
2419
2420 /* We can also do topological order, but number of iterations should be
2421 bounded by number of IPA passes since single IPA pass is probably not
2422 going to create clones of clones it created itself. */
2423 while (!stabilized)
2424 {
2425 stabilized = true;
2426 for (node = cgraph_nodes; node; node = node->next)
2427 {
2428 if (node->clone_of && node->decl != node->clone_of->decl
2429 && !gimple_has_body_p (node->decl))
2430 {
2431 if (gimple_has_body_p (node->clone_of->decl))
2432 {
2433 if (cgraph_dump_file)
08ad1d6d
JH
2434 {
2435 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2436 cgraph_node_name (node->clone_of),
2437 cgraph_node_name (node));
2438 if (node->clone.tree_map)
2439 {
2440 unsigned int i;
2441 fprintf (cgraph_dump_file, " replace map: ");
2442 for (i = 0; i < VEC_length (ipa_replace_map_p,
2443 node->clone.tree_map);
2444 i++)
2445 {
2446 struct ipa_replace_map *replace_info;
2447 replace_info = VEC_index (ipa_replace_map_p,
2448 node->clone.tree_map,
2449 i);
2450 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2451 fprintf (cgraph_dump_file, " -> ");
2452 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2453 fprintf (cgraph_dump_file, "%s%s;",
2454 replace_info->replace_p ? "(replace)":"",
2455 replace_info->ref_p ? "(ref)":"");
2456 }
2457 fprintf (cgraph_dump_file, "\n");
2458 }
2459 if (node->clone.args_to_skip)
2460 {
2461 fprintf (cgraph_dump_file, " args_to_skip: ");
2462 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2463 }
2464 if (node->clone.args_to_skip)
2465 {
2466 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2467 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2468 }
2469 }
9187e02d 2470 cgraph_materialize_clone (node);
36576655 2471 stabilized = false;
9187e02d 2472 }
9187e02d
JH
2473 }
2474 }
2475 }
47cb0d7d
JH
2476 for (node = cgraph_nodes; node; node = node->next)
2477 if (!node->analyzed && node->callees)
2478 cgraph_node_remove_callees (node);
8132a837
MJ
2479 if (cgraph_dump_file)
2480 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
9a23acef
JH
2481#ifdef ENABLE_CHECKING
2482 verify_cgraph ();
2483#endif
9187e02d
JH
2484 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2485}
2486
7be82279 2487#include "gt-cgraphunit.h"