]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
2009-12-10 Daniel Franke <franke.daniel@gmail.com>
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
cd6bca02 1/* Callgraph based interprocedural optimizations.
cfaf579d 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
e3a37aef 3 Free Software Foundation, Inc.
ae01b312 4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
ae01b312 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
cd6bca02 23 few basic interprocedural optimizers.
b0cdf642 24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
b326746d 35 (There is one exception needed for implementing GCC extern inline
36 function.)
b0cdf642 37
1d416bd7 38 - varpool_finalize_variable
b0cdf642 39
7bd28bba 40 This function has same behavior as the above but is used for static
b0cdf642 41 variables.
42
43 - cgraph_finalize_compilation_unit
44
b326746d 45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
b0cdf642 47
6329636b 48 In the the call-graph construction and local function
b0cdf642 49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
b326746d 52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
b0cdf642 54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
b0cdf642 62 - cgraph_mark_needed_node
1d416bd7 63 - varpool_mark_needed_node
b0cdf642 64
b326746d 65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
ccd2f3d1 69 used by C++ frontend to explicitly mark the keyed methods.
b0cdf642 70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
6329636b 80 Analyzing of all functions is deferred
b0cdf642 81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
ca67a72b 91 The intra-procedural information is produced and its existence
b0cdf642 92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
6329636b 106 where reference has been optimized out. */
121f3051 107
acc70efa 108
ae01b312 109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
b5530559 114#include "rtl.h"
acc70efa 115#include "tree-flow.h"
ae01b312 116#include "tree-inline.h"
117#include "langhooks.h"
c6224531 118#include "pointer-set.h"
ae01b312 119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
80a85d8a 125#include "diagnostic.h"
f79b6507 126#include "timevar.h"
d7c6d889 127#include "params.h"
128#include "fibheap.h"
611e5405 129#include "intl.h"
b69eb0ff 130#include "function.h"
b5d36404 131#include "ipa-prop.h"
75a70cf9 132#include "gimple.h"
133#include "tree-iterator.h"
f1e2a033 134#include "tree-pass.h"
bfec3452 135#include "tree-dump.h"
c1dcd13c 136#include "output.h"
9ed5b1f5 137#include "coverage.h"
c9036234 138#include "plugin.h"
d7c6d889 139
a6868229 140static void cgraph_expand_all_functions (void);
d9d9733a 141static void cgraph_mark_functions_to_output (void);
142static void cgraph_expand_function (struct cgraph_node *);
f788fff2 143static void cgraph_output_pending_asms (void);
bfec3452 144static void cgraph_analyze_function (struct cgraph_node *);
25bb88de 145
121f3051 146static FILE *cgraph_dump_file;
147
2c56f72e 148/* A vector of FUNCTION_DECLs declared as static constructors. */
149static GTY (()) VEC(tree, gc) *static_ctors;
150/* A vector of FUNCTION_DECLs declared as static destructors. */
151static GTY (()) VEC(tree, gc) *static_dtors;
a861fe52 152
28454517 153/* Used for vtable lookup in thunk adjusting. */
154static GTY (()) tree vtable_entry_type;
155
a861fe52 156/* When target does not have ctors and dtors, we call all constructor
310d2511 157 and destructor by special initialization/destruction function
48e1416a 158 recognized by collect2.
159
a861fe52 160 When we are going to build this function, collect all constructors and
161 destructors and turn them into normal functions. */
162
163static void
164record_cdtor_fn (tree fndecl)
165{
2de29097 166 struct cgraph_node *node;
167 if (targetm.have_ctors_dtors
168 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
169 && !DECL_STATIC_DESTRUCTOR (fndecl)))
a861fe52 170 return;
171
172 if (DECL_STATIC_CONSTRUCTOR (fndecl))
173 {
2c56f72e 174 VEC_safe_push (tree, gc, static_ctors, fndecl);
a861fe52 175 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
a861fe52 176 }
177 if (DECL_STATIC_DESTRUCTOR (fndecl))
178 {
2c56f72e 179 VEC_safe_push (tree, gc, static_dtors, fndecl);
a861fe52 180 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
a861fe52 181 }
2de29097 182 node = cgraph_node (fndecl);
183 node->local.disregard_inline_limits = 1;
184 cgraph_mark_reachable_node (node);
a861fe52 185}
186
2c56f72e 187/* Define global constructors/destructor functions for the CDTORS, of
188 which they are LEN. The CDTORS are sorted by initialization
189 priority. If CTOR_P is true, these are constructors; otherwise,
190 they are destructors. */
191
a861fe52 192static void
2c56f72e 193build_cdtor (bool ctor_p, tree *cdtors, size_t len)
a861fe52 194{
2c56f72e 195 size_t i;
a861fe52 196
2c56f72e 197 i = 0;
198 while (i < len)
199 {
200 tree body;
201 tree fn;
202 priority_type priority;
203
204 priority = 0;
205 body = NULL_TREE;
206 /* Find the next batch of constructors/destructors with the same
207 initialization priority. */
208 do
209 {
210 priority_type p;
211 fn = cdtors[i];
212 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
213 if (!body)
214 priority = p;
215 else if (p != priority)
216 break;
389dd41b 217 append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
218 fn, 0),
2c56f72e 219 &body);
220 ++i;
221 }
222 while (i < len);
223 gcc_assert (body != NULL_TREE);
224 /* Generate a function to call all the function of like
225 priority. */
226 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
227 }
228}
229
230/* Comparison function for qsort. P1 and P2 are actually of type
231 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
232 used to determine the sort order. */
a861fe52 233
2c56f72e 234static int
235compare_ctor (const void *p1, const void *p2)
236{
237 tree f1;
238 tree f2;
239 int priority1;
240 int priority2;
241
242 f1 = *(const tree *)p1;
243 f2 = *(const tree *)p2;
244 priority1 = DECL_INIT_PRIORITY (f1);
245 priority2 = DECL_INIT_PRIORITY (f2);
48e1416a 246
2c56f72e 247 if (priority1 < priority2)
248 return -1;
249 else if (priority1 > priority2)
250 return 1;
251 else
252 /* Ensure a stable sort. */
253 return (const tree *)p1 - (const tree *)p2;
254}
255
256/* Comparison function for qsort. P1 and P2 are actually of type
257 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
258 used to determine the sort order. */
a861fe52 259
2c56f72e 260static int
261compare_dtor (const void *p1, const void *p2)
262{
263 tree f1;
264 tree f2;
265 int priority1;
266 int priority2;
267
268 f1 = *(const tree *)p1;
269 f2 = *(const tree *)p2;
270 priority1 = DECL_FINI_PRIORITY (f1);
271 priority2 = DECL_FINI_PRIORITY (f2);
48e1416a 272
2c56f72e 273 if (priority1 < priority2)
274 return -1;
275 else if (priority1 > priority2)
276 return 1;
277 else
278 /* Ensure a stable sort. */
279 return (const tree *)p1 - (const tree *)p2;
a861fe52 280}
281
282/* Generate functions to call static constructors and destructors
283 for targets that do not support .ctors/.dtors sections. These
284 functions have magic names which are detected by collect2. */
285
286static void
287cgraph_build_cdtor_fns (void)
288{
2c56f72e 289 if (!VEC_empty (tree, static_ctors))
a861fe52 290 {
2c56f72e 291 gcc_assert (!targetm.have_ctors_dtors);
292 qsort (VEC_address (tree, static_ctors),
48e1416a 293 VEC_length (tree, static_ctors),
2c56f72e 294 sizeof (tree),
295 compare_ctor);
296 build_cdtor (/*ctor_p=*/true,
297 VEC_address (tree, static_ctors),
48e1416a 298 VEC_length (tree, static_ctors));
2c56f72e 299 VEC_truncate (tree, static_ctors, 0);
a861fe52 300 }
2c56f72e 301
302 if (!VEC_empty (tree, static_dtors))
a861fe52 303 {
2c56f72e 304 gcc_assert (!targetm.have_ctors_dtors);
305 qsort (VEC_address (tree, static_dtors),
48e1416a 306 VEC_length (tree, static_dtors),
2c56f72e 307 sizeof (tree),
308 compare_dtor);
309 build_cdtor (/*ctor_p=*/false,
310 VEC_address (tree, static_dtors),
48e1416a 311 VEC_length (tree, static_dtors));
2c56f72e 312 VEC_truncate (tree, static_dtors, 0);
a861fe52 313 }
314}
315
2c0b522d 316/* Determine if function DECL is needed. That is, visible to something
317 either outside this translation unit, something magic in the system
6329636b 318 configury. */
2c0b522d 319
7bfefa9d 320bool
321cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
2c0b522d 322{
3f82b628 323 /* If the user told us it is used, then it must be so. */
05806473 324 if (node->local.externally_visible)
325 return true;
326
3f82b628 327 /* ??? If the assembler name is set by hand, it is possible to assemble
328 the name later after finalizing the function and the fact is noticed
329 in assemble_name then. This is arguably a bug. */
330 if (DECL_ASSEMBLER_NAME_SET_P (decl)
331 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
332 return true;
333
55680bef 334 /* With -fkeep-inline-functions we are keeping all inline functions except
335 for extern inline ones. */
336 if (flag_keep_inline_functions
337 && DECL_DECLARED_INLINE_P (decl)
316ef6d8 338 && !DECL_EXTERNAL (decl)
339 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
55680bef 340 return true;
341
2c0b522d 342 /* If we decided it was needed before, but at the time we didn't have
343 the body of the function available, then it's still needed. We have
344 to go back and re-check its dependencies now. */
345 if (node->needed)
346 return true;
347
348 /* Externally visible functions must be output. The exception is
a0c938f0 349 COMDAT functions that must be output only when they are needed.
8baa9d15 350
351 When not optimizing, also output the static functions. (see
95da6220 352 PR24561), but don't do so for always_inline functions, functions
d3d410e1 353 declared inline and nested functions. These was optimized out
354 in the original implementation and it is unclear whether we want
554f2707 355 to change the behavior here. */
bba7ddf8 356 if (((TREE_PUBLIC (decl)
d3d410e1 357 || (!optimize && !node->local.disregard_inline_limits
358 && !DECL_DECLARED_INLINE_P (decl)
359 && !node->origin))
59dd4830 360 && !flag_whole_program
361 && !flag_lto
362 && !flag_whopr)
62eec3b4 363 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 364 return true;
365
366 /* Constructors and destructors are reachable from the runtime by
367 some mechanism. */
368 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
369 return true;
370
2c0b522d 371 return false;
372}
373
bdc40eb8 374/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
523c1122 375 functions into callgraph in a way so they look like ordinary reachable
376 functions inserted into callgraph already at construction time. */
377
378bool
379cgraph_process_new_functions (void)
380{
381 bool output = false;
382 tree fndecl;
383 struct cgraph_node *node;
384
385 /* Note that this queue may grow as its being processed, as the new
386 functions may generate new ones. */
387 while (cgraph_new_nodes)
388 {
389 node = cgraph_new_nodes;
390 fndecl = node->decl;
391 cgraph_new_nodes = cgraph_new_nodes->next_needed;
392 switch (cgraph_state)
393 {
394 case CGRAPH_STATE_CONSTRUCTION:
395 /* At construction time we just need to finalize function and move
396 it into reachable functions list. */
397
398 node->next_needed = NULL;
399 cgraph_finalize_function (fndecl, false);
400 cgraph_mark_reachable_node (node);
401 output = true;
402 break;
403
404 case CGRAPH_STATE_IPA:
f517b36e 405 case CGRAPH_STATE_IPA_SSA:
523c1122 406 /* When IPA optimization already started, do all essential
407 transformations that has been already performed on the whole
408 cgraph but not on this function. */
409
75a70cf9 410 gimple_register_cfg_hooks ();
523c1122 411 if (!node->analyzed)
412 cgraph_analyze_function (node);
413 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
414 current_function_decl = fndecl;
9c1bff7a 415 compute_inline_parameters (node);
f517b36e 416 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
417 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
418 /* When not optimizing, be sure we run early local passes anyway
419 to expand OMP. */
420 || !optimize)
20099e35 421 execute_pass_list (pass_early_local_passes.pass.sub);
523c1122 422 free_dominance_info (CDI_POST_DOMINATORS);
423 free_dominance_info (CDI_DOMINATORS);
424 pop_cfun ();
425 current_function_decl = NULL;
426 break;
427
428 case CGRAPH_STATE_EXPANSION:
429 /* Functions created during expansion shall be compiled
430 directly. */
09fc9532 431 node->process = 0;
523c1122 432 cgraph_expand_function (node);
433 break;
434
435 default:
436 gcc_unreachable ();
437 break;
438 }
50828ed8 439 cgraph_call_function_insertion_hooks (node);
523c1122 440 }
441 return output;
442}
443
9b8fb23a 444/* As an GCC extension we allow redefinition of the function. The
445 semantics when both copies of bodies differ is not well defined.
446 We replace the old body with new body so in unit at a time mode
447 we always use new body, while in normal mode we may end up with
448 old body inlined into some functions and new body expanded and
449 inlined in others.
450
451 ??? It may make more sense to use one body for inlining and other
452 body for expanding the function but this is difficult to do. */
453
454static void
455cgraph_reset_node (struct cgraph_node *node)
456{
09fc9532 457 /* If node->process is set, then we have already begun whole-unit analysis.
6329636b 458 This is *not* testing for whether we've already emitted the function.
459 That case can be sort-of legitimately seen with real function redefinition
460 errors. I would argue that the front end should never present us with
461 such a case, but don't enforce that for now. */
09fc9532 462 gcc_assert (!node->process);
9b8fb23a 463
464 /* Reset our data structures so we can analyze the function again. */
465 memset (&node->local, 0, sizeof (node->local));
466 memset (&node->global, 0, sizeof (node->global));
467 memset (&node->rtl, 0, sizeof (node->rtl));
468 node->analyzed = false;
469 node->local.redefined_extern_inline = true;
470 node->local.finalized = false;
471
9b8fb23a 472 cgraph_node_remove_callees (node);
473
474 /* We may need to re-queue the node for assembling in case
46beef9a 475 we already proceeded it and ignored as not needed or got
476 a re-declaration in IMA mode. */
477 if (node->reachable)
9b8fb23a 478 {
479 struct cgraph_node *n;
480
481 for (n = cgraph_nodes_queue; n; n = n->next_needed)
482 if (n == node)
483 break;
484 if (!n)
485 node->reachable = 0;
486 }
487}
c08871a9 488
1e8e9920 489static void
490cgraph_lower_function (struct cgraph_node *node)
491{
492 if (node->lowered)
493 return;
bfec3452 494
495 if (node->nested)
496 lower_nested_functions (node->decl);
497 gcc_assert (!node->nested);
498
1e8e9920 499 tree_lowering_passes (node->decl);
500 node->lowered = true;
501}
502
28df663b 503/* DECL has been parsed. Take it, queue it, compile it at the whim of the
504 logic in effect. If NESTED is true, then our caller cannot stand to have
505 the garbage collector run at the moment. We would need to either create
506 a new GC context, or just not compile right now. */
ae01b312 507
508void
28df663b 509cgraph_finalize_function (tree decl, bool nested)
ae01b312 510{
511 struct cgraph_node *node = cgraph_node (decl);
512
c08871a9 513 if (node->local.finalized)
9b8fb23a 514 cgraph_reset_node (node);
28df663b 515
167b550b 516 node->pid = cgraph_max_pid ++;
c08871a9 517 notice_global_symbol (decl);
79bb87b4 518 node->local.finalized = true;
e27482aa 519 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
22c748a1 520 node->finalized_by_frontend = true;
a861fe52 521 record_cdtor_fn (node->decl);
ae01b312 522
7bfefa9d 523 if (cgraph_decide_is_function_needed (node, decl))
2c0b522d 524 cgraph_mark_needed_node (node);
525
ecda6e51 526 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 527 level unit, we need to be conservative about possible entry points
528 there. */
62eec3b4 529 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
3f82b628 530 cgraph_mark_reachable_node (node);
531
2c0b522d 532 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 533 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 534 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 535
b69eb0ff 536 /* Possibly warn about unused parameters. */
537 if (warn_unused_parameter)
538 do_warn_unused_parameter (decl);
6329636b 539
540 if (!nested)
541 ggc_collect ();
ae01b312 542}
543
0da03d11 544/* C99 extern inline keywords allow changing of declaration after function
545 has been finalized. We need to re-decide if we want to mark the function as
546 needed then. */
547
548void
549cgraph_mark_if_needed (tree decl)
550{
551 struct cgraph_node *node = cgraph_node (decl);
7bfefa9d 552 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
0da03d11 553 cgraph_mark_needed_node (node);
554}
555
ccf4ab6b 556/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
557static bool
558clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
559{
560 while (node != node2 && node2)
561 node2 = node2->clone_of;
562 return node2 != NULL;
563}
564
b0cdf642 565/* Verify cgraph nodes of given cgraph node. */
566void
567verify_cgraph_node (struct cgraph_node *node)
568{
569 struct cgraph_edge *e;
e27482aa 570 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
117ef3d7 571 struct function *saved_cfun = cfun;
e27482aa 572 basic_block this_block;
75a70cf9 573 gimple_stmt_iterator gsi;
9bfec7c2 574 bool error_found = false;
b0cdf642 575
bd09cd3e 576 if (errorcount || sorrycount)
577 return;
578
b0cdf642 579 timevar_push (TV_CGRAPH_VERIFY);
117ef3d7 580 /* debug_generic_stmt needs correct cfun */
581 set_cfun (this_cfun);
b0cdf642 582 for (e = node->callees; e; e = e->next_callee)
583 if (e->aux)
584 {
0a81f5a0 585 error ("aux field set for edge %s->%s",
abd3e6b5 586 identifier_to_locale (cgraph_node_name (e->caller)),
587 identifier_to_locale (cgraph_node_name (e->callee)));
b0cdf642 588 error_found = true;
589 }
a2cb9b3b 590 if (node->count < 0)
591 {
592 error ("Execution count is negative");
593 error_found = true;
594 }
59dd4830 595 if (node->global.inlined_to && node->local.externally_visible)
596 {
597 error ("Externally visible inline clone");
598 error_found = true;
599 }
600 if (node->global.inlined_to && node->address_taken)
601 {
602 error ("Inline clone with address taken");
603 error_found = true;
604 }
605 if (node->global.inlined_to && node->needed)
606 {
607 error ("Inline clone is needed");
608 error_found = true;
609 }
b0cdf642 610 for (e = node->callers; e; e = e->next_caller)
611 {
a2cb9b3b 612 if (e->count < 0)
613 {
614 error ("caller edge count is negative");
615 error_found = true;
616 }
4ae20857 617 if (e->frequency < 0)
618 {
619 error ("caller edge frequency is negative");
620 error_found = true;
621 }
622 if (e->frequency > CGRAPH_FREQ_MAX)
623 {
624 error ("caller edge frequency is too large");
625 error_found = true;
626 }
1c094d2f 627 if (gimple_has_body_p (e->caller->decl)
628 && !e->caller->global.inlined_to
629 && (e->frequency
630 != compute_call_stmt_bb_frequency (e->caller->decl,
631 gimple_bb (e->call_stmt))))
632 {
633 error ("caller edge frequency %i does not match BB freqency %i",
634 e->frequency,
635 compute_call_stmt_bb_frequency (e->caller->decl,
636 gimple_bb (e->call_stmt)));
637 error_found = true;
638 }
b0cdf642 639 if (!e->inline_failed)
640 {
641 if (node->global.inlined_to
642 != (e->caller->global.inlined_to
643 ? e->caller->global.inlined_to : e->caller))
644 {
0a81f5a0 645 error ("inlined_to pointer is wrong");
b0cdf642 646 error_found = true;
647 }
648 if (node->callers->next_caller)
649 {
0a81f5a0 650 error ("multiple inline callers");
b0cdf642 651 error_found = true;
652 }
653 }
654 else
655 if (node->global.inlined_to)
656 {
0a81f5a0 657 error ("inlined_to pointer set for noninline callers");
b0cdf642 658 error_found = true;
659 }
660 }
661 if (!node->callers && node->global.inlined_to)
662 {
5cd75817 663 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 664 error_found = true;
665 }
666 if (node->global.inlined_to == node)
667 {
0a81f5a0 668 error ("inlined_to pointer refers to itself");
b0cdf642 669 error_found = true;
670 }
671
0f6439b9 672 if (!cgraph_node (node->decl))
b0cdf642 673 {
0f6439b9 674 error ("node not found in cgraph_hash");
b0cdf642 675 error_found = true;
676 }
a0c938f0 677
ccf4ab6b 678 if (node->clone_of)
679 {
680 struct cgraph_node *n;
681 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
682 if (n == node)
683 break;
684 if (!n)
685 {
686 error ("node has wrong clone_of");
687 error_found = true;
688 }
689 }
690 if (node->clones)
691 {
692 struct cgraph_node *n;
693 for (n = node->clones; n; n = n->next_sibling_clone)
694 if (n->clone_of != node)
695 break;
696 if (n)
697 {
698 error ("node has wrong clone list");
699 error_found = true;
700 }
701 }
702 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
703 {
704 error ("node is in clone list but it is not clone");
705 error_found = true;
706 }
707 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
708 {
709 error ("node has wrong prev_clone pointer");
710 error_found = true;
711 }
712 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
713 {
714 error ("double linked list of clones corrupted");
715 error_found = true;
716 }
717
718 if (node->analyzed && gimple_has_body_p (node->decl)
75a70cf9 719 && !TREE_ASM_WRITTEN (node->decl)
7bfefa9d 720 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
721 && !flag_wpa)
b0cdf642 722 {
e27482aa 723 if (this_cfun->cfg)
724 {
725 /* The nodes we're interested in are never shared, so walk
726 the tree ignoring duplicates. */
e7c352d1 727 struct pointer_set_t *visited_nodes = pointer_set_create ();
e27482aa 728 /* Reach the trees by walking over the CFG, and note the
729 enclosing basic-blocks in the call edges. */
730 FOR_EACH_BB_FN (this_block, this_cfun)
75a70cf9 731 for (gsi = gsi_start_bb (this_block);
732 !gsi_end_p (gsi);
733 gsi_next (&gsi))
9bfec7c2 734 {
75a70cf9 735 gimple stmt = gsi_stmt (gsi);
9bfec7c2 736 tree decl;
75a70cf9 737 if (is_gimple_call (stmt) && (decl = gimple_call_fndecl (stmt)))
9bfec7c2 738 {
739 struct cgraph_edge *e = cgraph_edge (node, stmt);
740 if (e)
741 {
742 if (e->aux)
743 {
0a81f5a0 744 error ("shared call_stmt:");
75a70cf9 745 debug_gimple_stmt (stmt);
9bfec7c2 746 error_found = true;
747 }
28454517 748 if (e->callee->same_body_alias)
749 {
750 error ("edge points to same body alias:");
751 debug_tree (e->callee->decl);
ee3f5fc0 752 error_found = true;
28454517 753 }
754 else if (!clone_of_p (cgraph_node (decl), e->callee)
755 && !e->callee->global.inlined_to)
9bfec7c2 756 {
0a81f5a0 757 error ("edge points to wrong declaration:");
9bfec7c2 758 debug_tree (e->callee->decl);
759 fprintf (stderr," Instead of:");
760 debug_tree (decl);
ee3f5fc0 761 error_found = true;
9bfec7c2 762 }
763 e->aux = (void *)1;
764 }
765 else
766 {
0a81f5a0 767 error ("missing callgraph edge for call stmt:");
75a70cf9 768 debug_gimple_stmt (stmt);
9bfec7c2 769 error_found = true;
770 }
771 }
772 }
e27482aa 773 pointer_set_destroy (visited_nodes);
e27482aa 774 }
775 else
776 /* No CFG available?! */
777 gcc_unreachable ();
778
b0cdf642 779 for (e = node->callees; e; e = e->next_callee)
780 {
f8daee9b 781 if (!e->aux && !e->indirect_call)
b0cdf642 782 {
0a81f5a0 783 error ("edge %s->%s has no corresponding call_stmt",
abd3e6b5 784 identifier_to_locale (cgraph_node_name (e->caller)),
785 identifier_to_locale (cgraph_node_name (e->callee)));
75a70cf9 786 debug_gimple_stmt (e->call_stmt);
b0cdf642 787 error_found = true;
788 }
789 e->aux = 0;
790 }
791 }
792 if (error_found)
793 {
794 dump_cgraph_node (stderr, node);
0a81f5a0 795 internal_error ("verify_cgraph_node failed");
b0cdf642 796 }
117ef3d7 797 set_cfun (saved_cfun);
b0cdf642 798 timevar_pop (TV_CGRAPH_VERIFY);
799}
800
801/* Verify whole cgraph structure. */
802void
803verify_cgraph (void)
804{
805 struct cgraph_node *node;
806
8ec2a798 807 if (sorrycount || errorcount)
808 return;
809
b0cdf642 810 for (node = cgraph_nodes; node; node = node->next)
811 verify_cgraph_node (node);
812}
813
56af936e 814/* Output all asm statements we have stored up to be output. */
815
816static void
817cgraph_output_pending_asms (void)
818{
819 struct cgraph_asm_node *can;
820
821 if (errorcount || sorrycount)
822 return;
823
824 for (can = cgraph_asm_nodes; can; can = can->next)
825 assemble_asm (can->asm_str);
826 cgraph_asm_nodes = NULL;
827}
828
0785e435 829/* Analyze the function scheduled to be output. */
bfec3452 830static void
0785e435 831cgraph_analyze_function (struct cgraph_node *node)
832{
bfec3452 833 tree save = current_function_decl;
0785e435 834 tree decl = node->decl;
835
ec1e35b2 836 current_function_decl = decl;
e27482aa 837 push_cfun (DECL_STRUCT_FUNCTION (decl));
bfec3452 838
839 /* Make sure to gimplify bodies only once. During analyzing a
840 function we lower it, which will require gimplified nested
841 functions, so we can end up here with an already gimplified
842 body. */
843 if (!gimple_body (decl))
844 gimplify_function_tree (decl);
845 dump_function (TDI_generic, decl);
846
e27482aa 847 cgraph_lower_function (node);
6e8d6e86 848 node->analyzed = true;
0785e435 849
e27482aa 850 pop_cfun ();
bfec3452 851 current_function_decl = save;
0785e435 852}
853
05806473 854/* Look for externally_visible and used attributes and mark cgraph nodes
855 accordingly.
856
857 We cannot mark the nodes at the point the attributes are processed (in
858 handle_*_attribute) because the copy of the declarations available at that
859 point may not be canonical. For example, in:
860
861 void f();
862 void f() __attribute__((used));
863
864 the declaration we see in handle_used_attribute will be the second
865 declaration -- but the front end will subsequently merge that declaration
866 with the original declaration and discard the second declaration.
867
868 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
869
870 void f() {}
871 void f() __attribute__((externally_visible));
872
873 is valid.
874
875 So, we walk the nodes at the end of the translation unit, applying the
876 attributes at that point. */
877
878static void
879process_function_and_variable_attributes (struct cgraph_node *first,
1d416bd7 880 struct varpool_node *first_var)
05806473 881{
882 struct cgraph_node *node;
1d416bd7 883 struct varpool_node *vnode;
05806473 884
885 for (node = cgraph_nodes; node != first; node = node->next)
886 {
887 tree decl = node->decl;
888 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
889 {
890 mark_decl_referenced (decl);
891 if (node->local.finalized)
892 cgraph_mark_needed_node (node);
893 }
894 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
895 {
ba12ea31 896 if (! TREE_PUBLIC (node->decl))
712d2297 897 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
898 "%<externally_visible%>"
899 " attribute have effect only on public objects");
59dd4830 900 else if (node->local.finalized)
901 cgraph_mark_needed_node (node);
05806473 902 }
903 }
1d416bd7 904 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
05806473 905 {
906 tree decl = vnode->decl;
907 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
908 {
909 mark_decl_referenced (decl);
22671757 910 vnode->force_output = true;
05806473 911 if (vnode->finalized)
1d416bd7 912 varpool_mark_needed_node (vnode);
05806473 913 }
914 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
915 {
ba12ea31 916 if (! TREE_PUBLIC (vnode->decl))
712d2297 917 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
918 "%<externally_visible%>"
919 " attribute have effect only on public objects");
59dd4830 920 else if (vnode->finalized)
921 varpool_mark_needed_node (vnode);
05806473 922 }
923 }
924}
925
aeeb194b 926/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
927 each reachable functions) and build cgraph.
928 The function can be called multiple times after inserting new nodes
0d424440 929 into beginning of queue. Just the new part of queue is re-scanned then. */
ae01b312 930
aeeb194b 931static void
932cgraph_analyze_functions (void)
ae01b312 933{
c1dcd13c 934 /* Keep track of already processed nodes when called multiple times for
06b27565 935 intermodule optimization. */
c1dcd13c 936 static struct cgraph_node *first_analyzed;
c17d0de1 937 struct cgraph_node *first_processed = first_analyzed;
1d416bd7 938 static struct varpool_node *first_analyzed_var;
aeeb194b 939 struct cgraph_node *node, *next;
ae01b312 940
c17d0de1 941 process_function_and_variable_attributes (first_processed,
942 first_analyzed_var);
943 first_processed = cgraph_nodes;
1d416bd7 944 first_analyzed_var = varpool_nodes;
945 varpool_analyze_pending_decls ();
f79b6507 946 if (cgraph_dump_file)
ae01b312 947 {
e4200070 948 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 949 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 950 if (node->needed)
f79b6507 951 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
952 fprintf (cgraph_dump_file, "\n");
ae01b312 953 }
aeeb194b 954 cgraph_process_new_functions ();
ae01b312 955
e6d2b2d8 956 /* Propagate reachability flag and lower representation of all reachable
957 functions. In the future, lowering will introduce new functions and
958 new entry points on the way (by template instantiation and virtual
959 method table generation for instance). */
3d7bfc56 960 while (cgraph_nodes_queue)
ae01b312 961 {
0785e435 962 struct cgraph_edge *edge;
3d7bfc56 963 tree decl = cgraph_nodes_queue->decl;
964
965 node = cgraph_nodes_queue;
d87976fb 966 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 967 node->next_needed = NULL;
ae01b312 968
638531ad 969 /* ??? It is possible to create extern inline function and later using
bbd5cba2 970 weak alias attribute to kill its body. See
638531ad 971 gcc.c-torture/compile/20011119-1.c */
75a70cf9 972 if (!DECL_STRUCT_FUNCTION (decl))
9b8fb23a 973 {
974 cgraph_reset_node (node);
975 continue;
976 }
638531ad 977
7bfefa9d 978 if (!node->analyzed)
979 cgraph_analyze_function (node);
2c0b522d 980
ae01b312 981 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 982 if (!edge->callee->reachable)
2c0b522d 983 cgraph_mark_reachable_node (edge->callee);
984
d544ceff 985 /* If decl is a clone of an abstract function, mark that abstract
986 function so that we don't release its body. The DECL_INITIAL() of that
987 abstract function declaration will be later needed to output debug info. */
988 if (DECL_ABSTRACT_ORIGIN (decl))
989 {
990 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
991 origin_node->abstract_and_needed = true;
992 }
993
c17d0de1 994 /* We finalize local static variables during constructing callgraph
995 edges. Process their attributes too. */
996 process_function_and_variable_attributes (first_processed,
997 first_analyzed_var);
998 first_processed = cgraph_nodes;
1d416bd7 999 first_analyzed_var = varpool_nodes;
1000 varpool_analyze_pending_decls ();
aeeb194b 1001 cgraph_process_new_functions ();
ae01b312 1002 }
2c0b522d 1003
aa5e06c7 1004 /* Collect entry points to the unit. */
f79b6507 1005 if (cgraph_dump_file)
3d7bfc56 1006 {
e4200070 1007 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 1008 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1009 if (node->needed)
f79b6507 1010 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 1011 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 1012 dump_cgraph (cgraph_dump_file);
3d7bfc56 1013 }
e6d2b2d8 1014
f79b6507 1015 if (cgraph_dump_file)
1016 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 1017
f4ec5ce1 1018 for (node = cgraph_nodes; node != first_analyzed; node = next)
ae01b312 1019 {
1020 tree decl = node->decl;
f4ec5ce1 1021 next = node->next;
ae01b312 1022
1a1a827a 1023 if (node->local.finalized && !gimple_has_body_p (decl))
a0c938f0 1024 cgraph_reset_node (node);
9b8fb23a 1025
1a1a827a 1026 if (!node->reachable && gimple_has_body_p (decl))
ae01b312 1027 {
f79b6507 1028 if (cgraph_dump_file)
1029 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 1030 cgraph_remove_node (node);
9b8fb23a 1031 continue;
ae01b312 1032 }
bc5cab3b 1033 else
1034 node->next_needed = NULL;
1a1a827a 1035 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
9b8fb23a 1036 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 1037 }
f79b6507 1038 if (cgraph_dump_file)
e4200070 1039 {
1040 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1041 dump_cgraph (cgraph_dump_file);
1042 }
c1dcd13c 1043 first_analyzed = cgraph_nodes;
ae01b312 1044 ggc_collect ();
aeeb194b 1045}
1046
8f69fd82 1047
aeeb194b 1048/* Analyze the whole compilation unit once it is parsed completely. */
1049
1050void
1051cgraph_finalize_compilation_unit (void)
1052{
9929334e 1053 timevar_push (TV_CGRAPH);
1054
bfec3452 1055 /* Do not skip analyzing the functions if there were errors, we
1056 miss diagnostics for following functions otherwise. */
aeeb194b 1057
8f69fd82 1058 /* Emit size functions we didn't inline. */
4189e677 1059 finalize_size_functions ();
8f69fd82 1060
8f69fd82 1061 /* Call functions declared with the "constructor" or "destructor"
1062 attribute. */
1063 cgraph_build_cdtor_fns ();
aeeb194b 1064
9929334e 1065 /* Mark alias targets necessary and emit diagnostics. */
1066 finish_aliases_1 ();
1067
aeeb194b 1068 if (!quiet_flag)
1069 {
1070 fprintf (stderr, "\nAnalyzing compilation unit\n");
1071 fflush (stderr);
1072 }
1073
9929334e 1074 /* Gimplify and lower all functions, compute reachability and
1075 remove unreachable nodes. */
1076 cgraph_analyze_functions ();
1077
8f69fd82 1078 /* Mark alias targets necessary and emit diagnostics. */
1079 finish_aliases_1 ();
1080
9929334e 1081 /* Gimplify and lower thunks. */
aeeb194b 1082 cgraph_analyze_functions ();
bfec3452 1083
9929334e 1084 /* Finally drive the pass manager. */
bfec3452 1085 cgraph_optimize ();
9929334e 1086
1087 timevar_pop (TV_CGRAPH);
ae01b312 1088}
9ed5b1f5 1089
1090
ae01b312 1091/* Figure out what functions we want to assemble. */
1092
1093static void
d9d9733a 1094cgraph_mark_functions_to_output (void)
ae01b312 1095{
1096 struct cgraph_node *node;
1097
ae01b312 1098 for (node = cgraph_nodes; node; node = node->next)
1099 {
1100 tree decl = node->decl;
d7c6d889 1101 struct cgraph_edge *e;
a0c938f0 1102
09fc9532 1103 gcc_assert (!node->process);
d7c6d889 1104
1105 for (e = node->callers; e; e = e->next_caller)
611e5405 1106 if (e->inline_failed)
d7c6d889 1107 break;
ae01b312 1108
e6d2b2d8 1109 /* We need to output all local functions that are used and not
1110 always inlined, as well as those that are reachable from
1111 outside the current compilation unit. */
1a1a827a 1112 if (node->analyzed
b0cdf642 1113 && !node->global.inlined_to
ae01b312 1114 && (node->needed
d7c6d889 1115 || (e && node->reachable))
4ee9c684 1116 && !TREE_ASM_WRITTEN (decl)
ae01b312 1117 && !DECL_EXTERNAL (decl))
09fc9532 1118 node->process = 1;
cc636d56 1119 else
9cee7c3f 1120 {
1121 /* We should've reclaimed all functions that are not needed. */
1122#ifdef ENABLE_CHECKING
75a70cf9 1123 if (!node->global.inlined_to
1a1a827a 1124 && gimple_has_body_p (decl)
9cee7c3f 1125 && !DECL_EXTERNAL (decl))
1126 {
1127 dump_cgraph_node (stderr, node);
1128 internal_error ("failed to reclaim unneeded function");
1129 }
1130#endif
75a70cf9 1131 gcc_assert (node->global.inlined_to
1a1a827a 1132 || !gimple_has_body_p (decl)
9cee7c3f 1133 || DECL_EXTERNAL (decl));
1134
1135 }
a0c938f0 1136
961e3b13 1137 }
1138}
1139
28454517 1140/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1141 in lowered gimple form.
1142
1143 Set current_function_decl and cfun to newly constructed empty function body.
1144 return basic block in the function body. */
1145
1146static basic_block
1147init_lowered_empty_function (tree decl)
1148{
1149 basic_block bb;
1150
1151 current_function_decl = decl;
1152 allocate_struct_function (decl, false);
1153 gimple_register_cfg_hooks ();
1154 init_empty_tree_cfg ();
1155 init_tree_ssa (cfun);
1156 init_ssa_operands ();
1157 cfun->gimple_df->in_ssa_p = true;
1158 DECL_INITIAL (decl) = make_node (BLOCK);
1159
1160 DECL_SAVED_TREE (decl) = error_mark_node;
1161 cfun->curr_properties |=
1162 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1163 PROP_ssa);
1164
1165 /* Create BB for body of the function and connect it properly. */
1166 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1167 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1168 make_edge (bb, EXIT_BLOCK_PTR, 0);
1169
1170 return bb;
1171}
1172
1173/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1174 offset indicated by VIRTUAL_OFFSET, if that is
1175 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1176 zero for a result adjusting thunk. */
1177
1178static tree
1179thunk_adjust (gimple_stmt_iterator * bsi,
1180 tree ptr, bool this_adjusting,
1181 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1182{
1183 gimple stmt;
1184 tree ret;
1185
1186 if (this_adjusting)
1187 {
1188 stmt = gimple_build_assign (ptr,
1189 fold_build2_loc (input_location,
1190 POINTER_PLUS_EXPR,
1191 TREE_TYPE (ptr), ptr,
1192 size_int (fixed_offset)));
1193 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1194 }
1195
1196 /* If there's a virtual offset, look up that value in the vtable and
1197 adjust the pointer again. */
1198 if (virtual_offset)
1199 {
1200 tree vtabletmp;
1201 tree vtabletmp2;
1202 tree vtabletmp3;
1203 tree offsettmp;
1204
1205 if (!vtable_entry_type)
1206 {
1207 tree vfunc_type = make_node (FUNCTION_TYPE);
1208 TREE_TYPE (vfunc_type) = integer_type_node;
1209 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1210 layout_type (vfunc_type);
1211
1212 vtable_entry_type = build_pointer_type (vfunc_type);
1213 }
1214
1215 vtabletmp =
1216 create_tmp_var (build_pointer_type
1217 (build_pointer_type (vtable_entry_type)), "vptr");
1218
1219 /* The vptr is always at offset zero in the object. */
1220 stmt = gimple_build_assign (vtabletmp,
1221 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1222 ptr));
1223 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1224 mark_symbols_for_renaming (stmt);
1225 find_referenced_vars_in (stmt);
1226
1227 /* Form the vtable address. */
1228 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1229 "vtableaddr");
1230 stmt = gimple_build_assign (vtabletmp2,
1231 build1 (INDIRECT_REF,
1232 TREE_TYPE (vtabletmp2), vtabletmp));
1233 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1234 mark_symbols_for_renaming (stmt);
1235 find_referenced_vars_in (stmt);
1236
1237 /* Find the entry with the vcall offset. */
1238 stmt = gimple_build_assign (vtabletmp2,
1239 fold_build2_loc (input_location,
1240 POINTER_PLUS_EXPR,
1241 TREE_TYPE (vtabletmp2),
1242 vtabletmp2,
1243 fold_convert (sizetype,
1244 virtual_offset)));
1245 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1246
1247 /* Get the offset itself. */
1248 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1249 "vcalloffset");
1250 stmt = gimple_build_assign (vtabletmp3,
1251 build1 (INDIRECT_REF,
1252 TREE_TYPE (vtabletmp3),
1253 vtabletmp2));
1254 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1255 mark_symbols_for_renaming (stmt);
1256 find_referenced_vars_in (stmt);
1257
1258 /* Cast to sizetype. */
1259 offsettmp = create_tmp_var (sizetype, "offset");
1260 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1261 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1262 mark_symbols_for_renaming (stmt);
1263 find_referenced_vars_in (stmt);
1264
1265 /* Adjust the `this' pointer. */
1266 ptr = fold_build2_loc (input_location,
1267 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1268 offsettmp);
1269 }
1270
1271 if (!this_adjusting)
1272 /* Adjust the pointer by the constant. */
1273 {
1274 tree ptrtmp;
1275
1276 if (TREE_CODE (ptr) == VAR_DECL)
1277 ptrtmp = ptr;
1278 else
1279 {
1280 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1281 stmt = gimple_build_assign (ptrtmp, ptr);
1282 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1283 mark_symbols_for_renaming (stmt);
1284 find_referenced_vars_in (stmt);
1285 }
1286 ptr = fold_build2_loc (input_location,
1287 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1288 size_int (fixed_offset));
1289 }
1290
1291 /* Emit the statement and gimplify the adjustment expression. */
1292 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1293 stmt = gimple_build_assign (ret, ptr);
1294 mark_symbols_for_renaming (stmt);
1295 find_referenced_vars_in (stmt);
1296 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1297
1298 return ret;
1299}
1300
1301/* Produce assembler for thunk NODE. */
1302
1303static void
1304assemble_thunk (struct cgraph_node *node)
1305{
1306 bool this_adjusting = node->thunk.this_adjusting;
1307 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1308 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1309 tree virtual_offset = NULL;
1310 tree alias = node->thunk.alias;
1311 tree thunk_fndecl = node->decl;
1312 tree a = DECL_ARGUMENTS (thunk_fndecl);
1313
1314 current_function_decl = thunk_fndecl;
1315
1316 if (this_adjusting
1317 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1318 virtual_value, alias))
1319 {
1320 const char *fnname;
1321 tree fn_block;
1322
1323 DECL_RESULT (thunk_fndecl)
1324 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1325 RESULT_DECL, 0, integer_type_node);
22ea3b47 1326 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
28454517 1327
1328 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1329 create one. */
1330 fn_block = make_node (BLOCK);
1331 BLOCK_VARS (fn_block) = a;
1332 DECL_INITIAL (thunk_fndecl) = fn_block;
1333 init_function_start (thunk_fndecl);
1334 cfun->is_thunk = 1;
1335 assemble_start_function (thunk_fndecl, fnname);
1336
1337 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1338 fixed_offset, virtual_value, alias);
1339
1340 assemble_end_function (thunk_fndecl, fnname);
1341 init_insn_lengths ();
1342 free_after_compilation (cfun);
1343 set_cfun (NULL);
1344 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1345 }
1346 else
1347 {
1348 tree restype;
1349 basic_block bb, then_bb, else_bb, return_bb;
1350 gimple_stmt_iterator bsi;
1351 int nargs = 0;
1352 tree arg;
1353 int i;
1354 tree resdecl;
1355 tree restmp = NULL;
1356 VEC(tree, heap) *vargs;
1357
1358 gimple call;
1359 gimple ret;
1360
1361 DECL_IGNORED_P (thunk_fndecl) = 1;
1362 bitmap_obstack_initialize (NULL);
1363
1364 if (node->thunk.virtual_offset_p)
1365 virtual_offset = size_int (virtual_value);
1366
1367 /* Build the return declaration for the function. */
1368 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1369 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1370 {
1371 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1372 DECL_ARTIFICIAL (resdecl) = 1;
1373 DECL_IGNORED_P (resdecl) = 1;
1374 DECL_RESULT (thunk_fndecl) = resdecl;
1375 }
1376 else
1377 resdecl = DECL_RESULT (thunk_fndecl);
1378
1379 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1380
1381 bsi = gsi_start_bb (bb);
1382
1383 /* Build call to the function being thunked. */
1384 if (!VOID_TYPE_P (restype))
1385 {
1386 if (!is_gimple_reg_type (restype))
1387 {
1388 restmp = resdecl;
1389 cfun->local_decls = tree_cons (NULL_TREE, restmp, cfun->local_decls);
1390 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1391 }
1392 else
1393 restmp = create_tmp_var_raw (restype, "retval");
1394 }
1395
1396 for (arg = a; arg; arg = TREE_CHAIN (arg))
1397 nargs++;
1398 vargs = VEC_alloc (tree, heap, nargs);
1399 if (this_adjusting)
1400 VEC_quick_push (tree, vargs,
1401 thunk_adjust (&bsi,
1402 a, 1, fixed_offset,
1403 virtual_offset));
1404 else
1405 VEC_quick_push (tree, vargs, a);
1406 for (i = 1, arg = TREE_CHAIN (a); i < nargs; i++, arg = TREE_CHAIN (arg))
1407 VEC_quick_push (tree, vargs, arg);
1408 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1409 VEC_free (tree, heap, vargs);
1410 gimple_call_set_cannot_inline (call, true);
1411 gimple_call_set_from_thunk (call, true);
1412 if (restmp)
1413 gimple_call_set_lhs (call, restmp);
1414 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1415 mark_symbols_for_renaming (call);
1416 find_referenced_vars_in (call);
1417 update_stmt (call);
1418
1419 if (restmp && !this_adjusting)
1420 {
1421 tree true_label = NULL_TREE, false_label = NULL_TREE;
1422
1423 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1424 {
1425 gimple stmt;
1426 /* If the return type is a pointer, we need to
1427 protect against NULL. We know there will be an
1428 adjustment, because that's why we're emitting a
1429 thunk. */
1430 then_bb = create_basic_block (NULL, (void *) 0, bb);
1431 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1432 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1433 remove_edge (single_succ_edge (bb));
1434 true_label = gimple_block_label (then_bb);
1435 false_label = gimple_block_label (else_bb);
1436 stmt = gimple_build_cond (NE_EXPR, restmp,
1437 fold_convert (TREE_TYPE (restmp),
1438 integer_zero_node),
1439 NULL_TREE, NULL_TREE);
1440 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1441 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1442 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1443 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1444 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1445 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1446 bsi = gsi_last_bb (then_bb);
1447 }
1448
1449 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1450 fixed_offset, virtual_offset);
1451 if (true_label)
1452 {
1453 gimple stmt;
1454 bsi = gsi_last_bb (else_bb);
1455 stmt = gimple_build_assign (restmp, fold_convert (TREE_TYPE (restmp),
1456 integer_zero_node));
1457 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1458 bsi = gsi_last_bb (return_bb);
1459 }
1460 }
1461 else
1462 gimple_call_set_tail (call, true);
1463
1464 /* Build return value. */
1465 ret = gimple_build_return (restmp);
1466 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1467
1468 delete_unreachable_blocks ();
1469 update_ssa (TODO_update_ssa);
1470
1471 cgraph_remove_same_body_alias (node);
1472 /* Since we want to emit the thunk, we explicitly mark its name as
1473 referenced. */
1474 mark_decl_referenced (thunk_fndecl);
1475 cgraph_add_new_function (thunk_fndecl, true);
1476 bitmap_obstack_release (NULL);
1477 }
1478 current_function_decl = NULL;
1479}
1480
ae01b312 1481/* Expand function specified by NODE. */
e6d2b2d8 1482
ae01b312 1483static void
d9d9733a 1484cgraph_expand_function (struct cgraph_node *node)
ae01b312 1485{
1486 tree decl = node->decl;
1487
b0cdf642 1488 /* We ought to not compile any inline clones. */
cc636d56 1489 gcc_assert (!node->global.inlined_to);
b0cdf642 1490
6329636b 1491 announce_function (decl);
09fc9532 1492 node->process = 0;
961e3b13 1493
e7c352d1 1494 gcc_assert (node->lowered);
f8deefc1 1495
794da2bb 1496 /* Generate RTL for the body of DECL. */
84e10000 1497 tree_rest_of_compilation (decl);
961e3b13 1498
4ee9c684 1499 /* Make sure that BE didn't give up on compiling. */
c04e3894 1500 gcc_assert (TREE_ASM_WRITTEN (decl));
ae01b312 1501 current_function_decl = NULL;
ed772161 1502 if (node->same_body)
1503 {
28454517 1504 struct cgraph_node *alias, *next;
ed772161 1505 bool saved_alias = node->alias;
28454517 1506 for (alias = node->same_body;
1507 alias && alias->next; alias = alias->next)
1508 ;
1509 /* Walk aliases in the order they were created; it is possible that
1510 thunks reffers to the aliases made earlier. */
1511 for (; alias; alias = next)
1512 {
1513 next = alias->previous;
1514 if (!alias->thunk.thunk_p)
1515 assemble_alias (alias->decl,
1516 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1517 else
1518 assemble_thunk (alias);
1519 }
ed772161 1520 node->alias = saved_alias;
1521 }
1a1a827a 1522 gcc_assert (!cgraph_preserve_function_body_p (decl));
1523 cgraph_release_function_body (node);
1524 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1525 points to the dead function body. */
1526 cgraph_node_remove_callees (node);
e1be32b8 1527
1528 cgraph_function_flags_ready = true;
ae01b312 1529}
1530
b0cdf642 1531/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1532
1533bool
326a9581 1534cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
d7c6d889 1535{
b0cdf642 1536 *reason = e->inline_failed;
1537 return !e->inline_failed;
d7c6d889 1538}
b0cdf642 1539
acc70efa 1540
acc70efa 1541
d9d9733a 1542/* Expand all functions that must be output.
1543
d7c6d889 1544 Attempt to topologically sort the nodes so function is output when
1545 all called functions are already assembled to allow data to be
91c82c20 1546 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1547 between a function and its callees (later we may choose to use a more
d7c6d889 1548 sophisticated algorithm for function reordering; we will likely want
1549 to use subsections to make the output functions appear in top-down
1550 order). */
1551
1552static void
a6868229 1553cgraph_expand_all_functions (void)
d7c6d889 1554{
1555 struct cgraph_node *node;
4c36ffe6 1556 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
c04e3894 1557 int order_pos, new_order_pos = 0;
d7c6d889 1558 int i;
1559
d7c6d889 1560 order_pos = cgraph_postorder (order);
cc636d56 1561 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1562
7bd28bba 1563 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1564 optimization. So we must be sure to not reference them. */
1565 for (i = 0; i < order_pos; i++)
09fc9532 1566 if (order[i]->process)
b0cdf642 1567 order[new_order_pos++] = order[i];
1568
1569 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1570 {
1571 node = order[i];
09fc9532 1572 if (node->process)
d7c6d889 1573 {
cc636d56 1574 gcc_assert (node->reachable);
09fc9532 1575 node->process = 0;
d7c6d889 1576 cgraph_expand_function (node);
1577 }
1578 }
523c1122 1579 cgraph_process_new_functions ();
773c5ba7 1580
d7c6d889 1581 free (order);
773c5ba7 1582
d7c6d889 1583}
1584
56af936e 1585/* This is used to sort the node types by the cgraph order number. */
1586
0b09525f 1587enum cgraph_order_sort_kind
1588{
1589 ORDER_UNDEFINED = 0,
1590 ORDER_FUNCTION,
1591 ORDER_VAR,
1592 ORDER_ASM
1593};
1594
56af936e 1595struct cgraph_order_sort
1596{
0b09525f 1597 enum cgraph_order_sort_kind kind;
56af936e 1598 union
1599 {
1600 struct cgraph_node *f;
1d416bd7 1601 struct varpool_node *v;
56af936e 1602 struct cgraph_asm_node *a;
1603 } u;
1604};
1605
1606/* Output all functions, variables, and asm statements in the order
1607 according to their order fields, which is the order in which they
1608 appeared in the file. This implements -fno-toplevel-reorder. In
1609 this mode we may output functions and variables which don't really
1610 need to be output. */
1611
1612static void
1613cgraph_output_in_order (void)
1614{
1615 int max;
1616 size_t size;
1617 struct cgraph_order_sort *nodes;
1618 int i;
1619 struct cgraph_node *pf;
1d416bd7 1620 struct varpool_node *pv;
56af936e 1621 struct cgraph_asm_node *pa;
1622
1623 max = cgraph_order;
1624 size = max * sizeof (struct cgraph_order_sort);
1625 nodes = (struct cgraph_order_sort *) alloca (size);
1626 memset (nodes, 0, size);
1627
1d416bd7 1628 varpool_analyze_pending_decls ();
56af936e 1629
1630 for (pf = cgraph_nodes; pf; pf = pf->next)
1631 {
09fc9532 1632 if (pf->process)
56af936e 1633 {
1634 i = pf->order;
1635 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1636 nodes[i].kind = ORDER_FUNCTION;
1637 nodes[i].u.f = pf;
1638 }
1639 }
1640
1d416bd7 1641 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
56af936e 1642 {
1643 i = pv->order;
1644 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1645 nodes[i].kind = ORDER_VAR;
1646 nodes[i].u.v = pv;
1647 }
1648
1649 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1650 {
1651 i = pa->order;
1652 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1653 nodes[i].kind = ORDER_ASM;
1654 nodes[i].u.a = pa;
1655 }
56af936e 1656
304e5318 1657 /* In toplevel reorder mode we output all statics; mark them as needed. */
1658 for (i = 0; i < max; ++i)
1659 {
1660 if (nodes[i].kind == ORDER_VAR)
1661 {
1662 varpool_mark_needed_node (nodes[i].u.v);
1663 }
1664 }
1665 varpool_empty_needed_queue ();
1666
56af936e 1667 for (i = 0; i < max; ++i)
1668 {
1669 switch (nodes[i].kind)
1670 {
1671 case ORDER_FUNCTION:
09fc9532 1672 nodes[i].u.f->process = 0;
56af936e 1673 cgraph_expand_function (nodes[i].u.f);
1674 break;
1675
1676 case ORDER_VAR:
1d416bd7 1677 varpool_assemble_decl (nodes[i].u.v);
56af936e 1678 break;
1679
1680 case ORDER_ASM:
1681 assemble_asm (nodes[i].u.a->asm_str);
1682 break;
1683
1684 case ORDER_UNDEFINED:
1685 break;
1686
1687 default:
1688 gcc_unreachable ();
1689 }
1690 }
4b4ea2db 1691
1692 cgraph_asm_nodes = NULL;
56af936e 1693}
1694
b0cdf642 1695/* Return true when function body of DECL still needs to be kept around
1696 for later re-use. */
1697bool
1698cgraph_preserve_function_body_p (tree decl)
1699{
1700 struct cgraph_node *node;
8d8c4c8d 1701
1702 gcc_assert (cgraph_global_info_ready);
b0cdf642 1703 /* Look if there is any clone around. */
ccf4ab6b 1704 node = cgraph_node (decl);
1705 if (node->clones)
1706 return true;
b0cdf642 1707 return false;
1708}
1709
77fce4cd 1710static void
1711ipa_passes (void)
1712{
87d4aa85 1713 set_cfun (NULL);
4b14adf9 1714 current_function_decl = NULL;
75a70cf9 1715 gimple_register_cfg_hooks ();
77fce4cd 1716 bitmap_obstack_initialize (NULL);
59dd4830 1717
c9036234 1718 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1719
59dd4830 1720 if (!in_lto_p)
1721 execute_ipa_pass_list (all_small_ipa_passes);
9ed5b1f5 1722
7bfefa9d 1723 /* If pass_all_early_optimizations was not scheduled, the state of
1724 the cgraph will not be properly updated. Update it now. */
1725 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1726 cgraph_state = CGRAPH_STATE_IPA_SSA;
9ed5b1f5 1727
7bfefa9d 1728 if (!in_lto_p)
1729 {
1730 /* Generate coverage variables and constructors. */
1731 coverage_finish ();
1732
1733 /* Process new functions added. */
1734 set_cfun (NULL);
1735 current_function_decl = NULL;
1736 cgraph_process_new_functions ();
7bfefa9d 1737
c9036234 1738 execute_ipa_summary_passes
1739 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
8867b500 1740 }
7bfefa9d 1741 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1742
1743 if (!in_lto_p)
1744 ipa_write_summaries ();
1745
8867b500 1746 if (!flag_ltrans)
1747 execute_ipa_pass_list (all_regular_ipa_passes);
c9036234 1748 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
9ed5b1f5 1749
77fce4cd 1750 bitmap_obstack_release (NULL);
1751}
1752
34e5cced 1753
ae01b312 1754/* Perform simple optimizations based on callgraph. */
1755
7bfefa9d 1756void
d9d9733a 1757cgraph_optimize (void)
ae01b312 1758{
cb2b5570 1759 if (errorcount || sorrycount)
1760 return;
1761
b0cdf642 1762#ifdef ENABLE_CHECKING
1763 verify_cgraph ();
1764#endif
a861fe52 1765
c1dcd13c 1766 /* Frontend may output common variables after the unit has been finalized.
1767 It is safe to deal with them here as they are always zero initialized. */
1d416bd7 1768 varpool_analyze_pending_decls ();
e9f08e82 1769
f79b6507 1770 timevar_push (TV_CGRAPHOPT);
51949610 1771 if (pre_ipa_mem_report)
1772 {
1773 fprintf (stderr, "Memory consumption before IPA\n");
1774 dump_memory_report (false);
1775 }
d7c6d889 1776 if (!quiet_flag)
cd6bca02 1777 fprintf (stderr, "Performing interprocedural optimizations\n");
523c1122 1778 cgraph_state = CGRAPH_STATE_IPA;
c04e3894 1779
be4d0974 1780 /* Don't run the IPA passes if there was any error or sorry messages. */
1781 if (errorcount == 0 && sorrycount == 0)
1782 ipa_passes ();
1783
34e5cced 1784 /* Do nothing else if any IPA pass found errors. */
1785 if (errorcount || sorrycount)
021c1c18 1786 {
1787 timevar_pop (TV_CGRAPHOPT);
1788 return;
1789 }
34e5cced 1790
e1be32b8 1791 /* This pass remove bodies of extern inline functions we never inlined.
1792 Do this later so other IPA passes see what is really going on. */
1793 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 1794 cgraph_global_info_ready = true;
f79b6507 1795 if (cgraph_dump_file)
1796 {
e4200070 1797 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 1798 dump_cgraph (cgraph_dump_file);
c1dcd13c 1799 dump_varpool (cgraph_dump_file);
f79b6507 1800 }
51949610 1801 if (post_ipa_mem_report)
1802 {
defa2fa6 1803 fprintf (stderr, "Memory consumption after IPA\n");
51949610 1804 dump_memory_report (false);
1805 }
f79b6507 1806 timevar_pop (TV_CGRAPHOPT);
ae01b312 1807
d7c6d889 1808 /* Output everything. */
47306a5d 1809 (*debug_hooks->assembly_start) ();
e4200070 1810 if (!quiet_flag)
1811 fprintf (stderr, "Assembling functions:\n");
b0cdf642 1812#ifdef ENABLE_CHECKING
1813 verify_cgraph ();
1814#endif
56af936e 1815
ccf4ab6b 1816 cgraph_materialize_all_clones ();
acc70efa 1817 cgraph_mark_functions_to_output ();
c1dcd13c 1818
523c1122 1819 cgraph_state = CGRAPH_STATE_EXPANSION;
56af936e 1820 if (!flag_toplevel_reorder)
1821 cgraph_output_in_order ();
1822 else
1823 {
1824 cgraph_output_pending_asms ();
1825
1826 cgraph_expand_all_functions ();
1d416bd7 1827 varpool_remove_unreferenced_decls ();
56af936e 1828
1d416bd7 1829 varpool_assemble_pending_decls ();
56af936e 1830 }
523c1122 1831 cgraph_process_new_functions ();
1832 cgraph_state = CGRAPH_STATE_FINISHED;
c1dcd13c 1833
f79b6507 1834 if (cgraph_dump_file)
1835 {
e4200070 1836 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 1837 dump_cgraph (cgraph_dump_file);
1838 }
b0cdf642 1839#ifdef ENABLE_CHECKING
1840 verify_cgraph ();
4ee9c684 1841 /* Double check that all inline clones are gone and that all
1842 function bodies have been released from memory. */
6329636b 1843 if (!(sorrycount || errorcount))
4ee9c684 1844 {
1845 struct cgraph_node *node;
1846 bool error_found = false;
1847
1848 for (node = cgraph_nodes; node; node = node->next)
1849 if (node->analyzed
1850 && (node->global.inlined_to
1a1a827a 1851 || gimple_has_body_p (node->decl)))
4ee9c684 1852 {
1853 error_found = true;
1854 dump_cgraph_node (stderr, node);
a0c938f0 1855 }
4ee9c684 1856 if (error_found)
c04e3894 1857 internal_error ("nodes with unreleased memory found");
4ee9c684 1858 }
b0cdf642 1859#endif
ae01b312 1860}
34e5cced 1861
1862
2c56f72e 1863/* Generate and emit a static constructor or destructor. WHICH must
1864 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1865 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
f0b5f617 1866 initialization priority for this constructor or destructor. */
b5530559 1867
1868void
c5344b58 1869cgraph_build_static_cdtor (char which, tree body, int priority)
b5530559 1870{
1871 static int counter = 0;
1872 char which_buf[16];
540edea7 1873 tree decl, name, resdecl;
b5530559 1874
2c56f72e 1875 /* The priority is encoded in the constructor or destructor name.
1876 collect2 will sort the names and arrange that they are called at
1877 program startup. */
1878 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
db85cc4f 1879 name = get_file_function_name (which_buf);
b5530559 1880
e60a6f7b 1881 decl = build_decl (input_location, FUNCTION_DECL, name,
b5530559 1882 build_function_type (void_type_node, void_list_node));
1883 current_function_decl = decl;
1884
e60a6f7b 1885 resdecl = build_decl (input_location,
1886 RESULT_DECL, NULL_TREE, void_type_node);
540edea7 1887 DECL_ARTIFICIAL (resdecl) = 1;
540edea7 1888 DECL_RESULT (decl) = resdecl;
8e5b4ed6 1889 DECL_CONTEXT (resdecl) = decl;
540edea7 1890
80f2ef47 1891 allocate_struct_function (decl, false);
b5530559 1892
1893 TREE_STATIC (decl) = 1;
1894 TREE_USED (decl) = 1;
1895 DECL_ARTIFICIAL (decl) = 1;
b5530559 1896 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1897 DECL_SAVED_TREE (decl) = body;
1898 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1899 DECL_UNINLINABLE (decl) = 1;
1900
1901 DECL_INITIAL (decl) = make_node (BLOCK);
1902 TREE_USED (DECL_INITIAL (decl)) = 1;
1903
1904 DECL_SOURCE_LOCATION (decl) = input_location;
1905 cfun->function_end_locus = input_location;
1906
cc636d56 1907 switch (which)
1908 {
1909 case 'I':
1910 DECL_STATIC_CONSTRUCTOR (decl) = 1;
64c2e9b0 1911 decl_init_priority_insert (decl, priority);
cc636d56 1912 break;
1913 case 'D':
1914 DECL_STATIC_DESTRUCTOR (decl) = 1;
64c2e9b0 1915 decl_fini_priority_insert (decl, priority);
cc636d56 1916 break;
1917 default:
1918 gcc_unreachable ();
1919 }
b5530559 1920
1921 gimplify_function_tree (decl);
1922
523c1122 1923 cgraph_add_new_function (decl, false);
1924 cgraph_mark_needed_node (cgraph_node (decl));
e3a37aef 1925 set_cfun (NULL);
b5530559 1926}
121f3051 1927
1928void
1929init_cgraph (void)
1930{
1931 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1932}
b5d36404 1933
a0c938f0 1934/* The edges representing the callers of the NEW_VERSION node were
b5d36404 1935 fixed by cgraph_function_versioning (), now the call_expr in their
1936 respective tree code should be updated to call the NEW_VERSION. */
1937
1938static void
1939update_call_expr (struct cgraph_node *new_version)
1940{
1941 struct cgraph_edge *e;
1942
1943 gcc_assert (new_version);
75a70cf9 1944
1945 /* Update the call expr on the edges to call the new version. */
b5d36404 1946 for (e = new_version->callers; e; e = e->next_caller)
e03a95e7 1947 {
1948 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1949 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
e38def9c 1950 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
e03a95e7 1951 }
b5d36404 1952}
1953
1954
1955/* Create a new cgraph node which is the new version of
1956 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1957 edges which should be redirected to point to
1958 NEW_VERSION. ALL the callees edges of OLD_VERSION
1959 are cloned to the new version node. Return the new
1960 version node. */
1961
1962static struct cgraph_node *
1963cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 1964 tree new_decl,
1965 VEC(cgraph_edge_p,heap) *redirect_callers)
b5d36404 1966 {
1967 struct cgraph_node *new_version;
1968 struct cgraph_edge *e, *new_e;
1969 struct cgraph_edge *next_callee;
1970 unsigned i;
1971
1972 gcc_assert (old_version);
a0c938f0 1973
b5d36404 1974 new_version = cgraph_node (new_decl);
1975
1976 new_version->analyzed = true;
1977 new_version->local = old_version->local;
1978 new_version->global = old_version->global;
1979 new_version->rtl = new_version->rtl;
1980 new_version->reachable = true;
1981 new_version->count = old_version->count;
1982
1983 /* Clone the old node callees. Recursive calls are
1984 also cloned. */
1985 for (e = old_version->callees;e; e=e->next_callee)
1986 {
7bfefa9d 1987 new_e = cgraph_clone_edge (e, new_version, e->call_stmt,
1988 e->lto_stmt_uid, 0, e->frequency,
4ae20857 1989 e->loop_nest, true);
b5d36404 1990 new_e->count = e->count;
1991 }
1992 /* Fix recursive calls.
1993 If OLD_VERSION has a recursive call after the
1994 previous edge cloning, the new version will have an edge
1995 pointing to the old version, which is wrong;
1996 Redirect it to point to the new version. */
1997 for (e = new_version->callees ; e; e = next_callee)
1998 {
1999 next_callee = e->next_callee;
2000 if (e->callee == old_version)
2001 cgraph_redirect_edge_callee (e, new_version);
a0c938f0 2002
b5d36404 2003 if (!next_callee)
2004 break;
2005 }
4460a647 2006 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
2007 {
2008 /* Redirect calls to the old version node to point to its new
2009 version. */
2010 cgraph_redirect_edge_callee (e, new_version);
2011 }
b5d36404 2012
2013 return new_version;
2014 }
2015
2016 /* Perform function versioning.
a0c938f0 2017 Function versioning includes copying of the tree and
b5d36404 2018 a callgraph update (creating a new cgraph node and updating
2019 its callees and callers).
2020
2021 REDIRECT_CALLERS varray includes the edges to be redirected
2022 to the new version.
2023
2024 TREE_MAP is a mapping of tree nodes we want to replace with
2025 new ones (according to results of prior analysis).
2026 OLD_VERSION_NODE is the node that is versioned.
48e1416a 2027 It returns the new version's cgraph node.
5afe38fe 2028 ARGS_TO_SKIP lists arguments to be omitted from functions
2029 */
b5d36404 2030
2031struct cgraph_node *
2032cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 2033 VEC(cgraph_edge_p,heap) *redirect_callers,
ccf4ab6b 2034 VEC (ipa_replace_map_p,gc)* tree_map,
5afe38fe 2035 bitmap args_to_skip)
b5d36404 2036{
2037 tree old_decl = old_version_node->decl;
2038 struct cgraph_node *new_version_node = NULL;
2039 tree new_decl;
2040
2041 if (!tree_versionable_function_p (old_decl))
2042 return NULL;
2043
2044 /* Make a new FUNCTION_DECL tree node for the
2045 new version. */
5afe38fe 2046 if (!args_to_skip)
2047 new_decl = copy_node (old_decl);
2048 else
2049 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
b5d36404 2050
2051 /* Create the new version's call-graph node.
2052 and update the edges of the new node. */
2053 new_version_node =
2054 cgraph_copy_node_for_versioning (old_version_node, new_decl,
2055 redirect_callers);
2056
2057 /* Copy the OLD_VERSION_NODE function tree to the new version. */
5afe38fe 2058 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip);
b5d36404 2059
a0c938f0 2060 /* Update the new version's properties.
e03a95e7 2061 Make The new version visible only within this translation unit. Make sure
2062 that is not weak also.
a0c938f0 2063 ??? We cannot use COMDAT linkage because there is no
b5d36404 2064 ABI support for this. */
2065 DECL_EXTERNAL (new_version_node->decl) = 0;
ecd88073 2066 DECL_COMDAT_GROUP (new_version_node->decl) = NULL_TREE;
b5d36404 2067 TREE_PUBLIC (new_version_node->decl) = 0;
2068 DECL_COMDAT (new_version_node->decl) = 0;
e03a95e7 2069 DECL_WEAK (new_version_node->decl) = 0;
f014e39d 2070 DECL_VIRTUAL_P (new_version_node->decl) = 0;
b5d36404 2071 new_version_node->local.externally_visible = 0;
2072 new_version_node->local.local = 1;
2073 new_version_node->lowered = true;
f014e39d 2074
e03a95e7 2075 /* Update the call_expr on the edges to call the new version node. */
2076 update_call_expr (new_version_node);
48e1416a 2077
50828ed8 2078 cgraph_call_function_insertion_hooks (new_version_node);
b5d36404 2079 return new_version_node;
2080}
469679ab 2081
2082/* Produce separate function body for inline clones so the offline copy can be
2083 modified without affecting them. */
2084struct cgraph_node *
2085save_inline_function_body (struct cgraph_node *node)
2086{
ccf4ab6b 2087 struct cgraph_node *first_clone, *n;
469679ab 2088
2089 gcc_assert (node == cgraph_node (node->decl));
2090
2091 cgraph_lower_function (node);
2092
ccf4ab6b 2093 first_clone = node->clones;
469679ab 2094
2095 first_clone->decl = copy_node (node->decl);
469679ab 2096 cgraph_insert_node_to_hashtable (first_clone);
2097 gcc_assert (first_clone == cgraph_node (first_clone->decl));
ccf4ab6b 2098 if (first_clone->next_sibling_clone)
2099 {
2100 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2101 n->clone_of = first_clone;
2102 n->clone_of = first_clone;
2103 n->next_sibling_clone = first_clone->clones;
2104 if (first_clone->clones)
2105 first_clone->clones->prev_sibling_clone = n;
2106 first_clone->clones = first_clone->next_sibling_clone;
2107 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2108 first_clone->next_sibling_clone = NULL;
2109 gcc_assert (!first_clone->prev_sibling_clone);
2110 }
2111 first_clone->clone_of = NULL;
2112 node->clones = NULL;
2113
2114 if (first_clone->clones)
2115 for (n = first_clone->clones; n != first_clone;)
2116 {
2117 gcc_assert (n->decl == node->decl);
2118 n->decl = first_clone->decl;
2119 if (n->clones)
2120 n = n->clones;
2121 else if (n->next_sibling_clone)
2122 n = n->next_sibling_clone;
2123 else
2124 {
2125 while (n != first_clone && !n->next_sibling_clone)
2126 n = n->clone_of;
2127 if (n != first_clone)
2128 n = n->next_sibling_clone;
2129 }
2130 }
469679ab 2131
2132 /* Copy the OLD_VERSION_NODE function tree to the new version. */
5afe38fe 2133 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
469679ab 2134
2135 DECL_EXTERNAL (first_clone->decl) = 0;
ecd88073 2136 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
469679ab 2137 TREE_PUBLIC (first_clone->decl) = 0;
2138 DECL_COMDAT (first_clone->decl) = 0;
7fe9b425 2139 VEC_free (ipa_opt_pass, heap,
6d1cc52c 2140 first_clone->ipa_transforms_to_apply);
2141 first_clone->ipa_transforms_to_apply = NULL;
469679ab 2142
469679ab 2143#ifdef ENABLE_CHECKING
2144 verify_cgraph_node (first_clone);
2145#endif
2146 return first_clone;
2147}
a861fe52 2148
ccf4ab6b 2149/* Given virtual clone, turn it into actual clone. */
2150static void
2151cgraph_materialize_clone (struct cgraph_node *node)
2152{
2153 bitmap_obstack_initialize (NULL);
2154 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2155 tree_function_versioning (node->clone_of->decl, node->decl,
2156 node->clone.tree_map, true,
2157 node->clone.args_to_skip);
e20422ea 2158 if (cgraph_dump_file)
2159 {
2160 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2161 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2162 }
ccf4ab6b 2163
2164 /* Function is no longer clone. */
2165 if (node->next_sibling_clone)
2166 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2167 if (node->prev_sibling_clone)
2168 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2169 else
2170 node->clone_of->clones = node->next_sibling_clone;
2171 node->next_sibling_clone = NULL;
2172 node->prev_sibling_clone = NULL;
6d1cc52c 2173 if (!node->clone_of->analyzed && !node->clone_of->clones)
2174 cgraph_remove_node (node->clone_of);
ccf4ab6b 2175 node->clone_of = NULL;
2176 bitmap_obstack_release (NULL);
2177}
2178
2179/* Once all functions from compilation unit are in memory, produce all clones
2180 and update all calls.
2181 We might also do this on demand if we don't want to bring all functions to
2182 memory prior compilation, but current WHOPR implementation does that and it is
2183 is bit easier to keep everything right in this order. */
2184void
2185cgraph_materialize_all_clones (void)
2186{
2187 struct cgraph_node *node;
2188 bool stabilized = false;
2189
2190 if (cgraph_dump_file)
2191 fprintf (cgraph_dump_file, "Materializing clones\n");
2192#ifdef ENABLE_CHECKING
2193 verify_cgraph ();
2194#endif
2195
2196 /* We can also do topological order, but number of iterations should be
2197 bounded by number of IPA passes since single IPA pass is probably not
2198 going to create clones of clones it created itself. */
2199 while (!stabilized)
2200 {
2201 stabilized = true;
2202 for (node = cgraph_nodes; node; node = node->next)
2203 {
2204 if (node->clone_of && node->decl != node->clone_of->decl
2205 && !gimple_has_body_p (node->decl))
2206 {
2207 if (gimple_has_body_p (node->clone_of->decl))
2208 {
2209 if (cgraph_dump_file)
e20422ea 2210 {
2211 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2212 cgraph_node_name (node->clone_of),
2213 cgraph_node_name (node));
2214 if (node->clone.tree_map)
2215 {
2216 unsigned int i;
2217 fprintf (cgraph_dump_file, " replace map: ");
2218 for (i = 0; i < VEC_length (ipa_replace_map_p,
2219 node->clone.tree_map);
2220 i++)
2221 {
2222 struct ipa_replace_map *replace_info;
2223 replace_info = VEC_index (ipa_replace_map_p,
2224 node->clone.tree_map,
2225 i);
2226 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2227 fprintf (cgraph_dump_file, " -> ");
2228 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2229 fprintf (cgraph_dump_file, "%s%s;",
2230 replace_info->replace_p ? "(replace)":"",
2231 replace_info->ref_p ? "(ref)":"");
2232 }
2233 fprintf (cgraph_dump_file, "\n");
2234 }
2235 if (node->clone.args_to_skip)
2236 {
2237 fprintf (cgraph_dump_file, " args_to_skip: ");
2238 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2239 }
2240 if (node->clone.args_to_skip)
2241 {
2242 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2243 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2244 }
2245 }
ccf4ab6b 2246 cgraph_materialize_clone (node);
2247 }
2248 else
2249 stabilized = false;
2250 }
2251 }
2252 }
ee3f5fc0 2253 for (node = cgraph_nodes; node; node = node->next)
2254 if (!node->analyzed && node->callees)
2255 cgraph_node_remove_callees (node);
ccf4ab6b 2256 if (cgraph_dump_file)
2257 fprintf (cgraph_dump_file, "Updating call sites\n");
2258 for (node = cgraph_nodes; node; node = node->next)
2259 if (node->analyzed && gimple_has_body_p (node->decl)
2260 && (!node->clone_of || node->clone_of->decl != node->decl))
2261 {
2262 struct cgraph_edge *e;
2263
2264 current_function_decl = node->decl;
2265 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2266 for (e = node->callees; e; e = e->next_callee)
2267 {
2268 tree decl = gimple_call_fndecl (e->call_stmt);
947781ac 2269 /* When function gets inlined, indirect inlining might've invented
2270 new edge for orginally indirect stmt. Since we are not
2271 preserving clones in the original form, we must not update here
2272 since other inline clones don't need to contain call to the same
2273 call. Inliner will do the substitution for us later. */
2274 if (decl && decl != e->callee->decl)
ccf4ab6b 2275 {
2276 gimple new_stmt;
2277 gimple_stmt_iterator gsi;
ed772161 2278
2279 if (e->callee->same_body)
2280 {
2281 struct cgraph_node *alias;
2282
2283 for (alias = e->callee->same_body;
2284 alias;
2285 alias = alias->next)
2286 if (decl == alias->decl)
2287 break;
2288 /* Don't update call from same body alias to the real
2289 function. */
2290 if (alias)
2291 continue;
2292 }
2293
ccf4ab6b 2294 if (cgraph_dump_file)
2295 {
2296 fprintf (cgraph_dump_file, "updating call of %s in %s:",
2297 cgraph_node_name (node),
2298 cgraph_node_name (e->callee));
2299 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2300 }
2301
e20422ea 2302 if (e->callee->clone.combined_args_to_skip)
ccf4ab6b 2303 new_stmt = gimple_call_copy_skip_args (e->call_stmt,
e20422ea 2304 e->callee->clone.combined_args_to_skip);
ccf4ab6b 2305 else
2306 new_stmt = e->call_stmt;
2307 if (gimple_vdef (new_stmt)
2308 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2309 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2310 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2311
2312 gsi = gsi_for_stmt (e->call_stmt);
2313 gsi_replace (&gsi, new_stmt, true);
2314
2315 /* Update EH information too, just in case. */
e38def9c 2316 maybe_clean_or_replace_eh_stmt (e->call_stmt, new_stmt);
ccf4ab6b 2317
2318 cgraph_set_call_stmt_including_clones (node, e->call_stmt, new_stmt);
2319
2320 if (cgraph_dump_file)
2321 {
2322 fprintf (cgraph_dump_file, " updated to:");
2323 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2324 }
2325 }
2326 }
2327 pop_cfun ();
2328 current_function_decl = NULL;
2329#ifdef ENABLE_CHECKING
2330 verify_cgraph_node (node);
2331#endif
2332 }
947781ac 2333#ifdef ENABLE_CHECKING
2334 verify_cgraph ();
2335#endif
ccf4ab6b 2336 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2337}
2338
a861fe52 2339#include "gt-cgraphunit.h"