]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraph.c
config.in: Regenerate.
[thirdparty/gcc.git] / gcc / cgraph.c
CommitLineData
e72fcfe8 1/* Callgraph handling code.
2bafad93
JJ
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
e72fcfe8
JH
4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
e72fcfe8
JH
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
e72fcfe8 21
8a4a83ed 22/* This file contains basic routines manipulating call graph
c22cacf3 23
18c6ada9
JH
24The callgraph:
25
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
28 sharing.
29
30 The call-graph consist of nodes and edges represented via linked lists.
0550e7b7 31 Each function (external or not) corresponds to the unique node.
18c6ada9
JH
32
33 The mapping from declarations to call-graph nodes is done using hash table
0550e7b7
JH
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
18c6ada9
JH
36
37 The callgraph at the moment does not represent indirect calls or calls
38 from other compilation unit. Flag NEEDED is set for each node that may
1f838355 39 be accessed in such an invisible way and it shall be considered an
18c6ada9
JH
40 entry point to the callgraph.
41
a418679d 42 Interprocedural information:
18c6ada9 43
a418679d 44 Callgraph is place to store data needed for interprocedural optimization.
1ae58c30 45 All data structures are divided into three components: local_info that
18c6ada9 46 is produced while analyzing the function, global_info that is result
1ae58c30 47 of global walking of the callgraph on the end of compilation and
18c6ada9
JH
48 rtl_info used by RTL backend to propagate data from already compiled
49 functions to their callers.
50
51 Inlining plans:
52
53 The function inlining information is decided in advance and maintained
54 in the callgraph as so called inline plan.
1ae58c30 55 For each inlined call, the callee's node is cloned to represent the
1ea7e6ad 56 new function copy produced by inliner.
1ae58c30
KH
57 Each inlined call gets a unique corresponding clone node of the callee
58 and the data structure is updated while inlining is performed, so
59 the clones are eliminated and their callee edges redirected to the
c22cacf3 60 caller.
18c6ada9
JH
61
62 Each edge has "inline_failed" field. When the field is set to NULL,
2b8a92de 63 the call will be inlined. When it is non-NULL it contains a reason
8a4a83ed 64 why inlining wasn't performed. */
18c6ada9 65
e72fcfe8
JH
66#include "config.h"
67#include "system.h"
68#include "coretypes.h"
69#include "tm.h"
70#include "tree.h"
cd9c7bd2 71#include "tree-inline.h"
e72fcfe8
JH
72#include "langhooks.h"
73#include "hashtab.h"
74#include "toplev.h"
75#include "flags.h"
76#include "ggc.h"
77#include "debug.h"
78#include "target.h"
cd9c7bd2 79#include "basic-block.h"
1c4a429a 80#include "cgraph.h"
988d1653 81#include "varray.h"
e69529cd 82#include "output.h"
dc0bfe6a 83#include "intl.h"
e0704a46 84#include "tree-gimple.h"
ef330312 85#include "tree-dump.h"
7a388ee4 86#include "tree-flow.h"
988d1653 87
2563c224
RG
88static void cgraph_node_remove_callers (struct cgraph_node *node);
89static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
90static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
91
e72fcfe8 92/* Hash table used to convert declarations into nodes. */
ed2df68b 93static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
e72fcfe8
JH
94
95/* The linked list of cgraph nodes. */
1c4a429a 96struct cgraph_node *cgraph_nodes;
e72fcfe8 97
1668aabc
JH
98/* Queue of cgraph nodes scheduled to be lowered. */
99struct cgraph_node *cgraph_nodes_queue;
100
f45e0ad1 101/* Queue of cgraph nodes scheduled to be added into cgraph. This is a
c0220ea4 102 secondary queue used during optimization to accommodate passes that
50674e96 103 may generate new functions that need to be optimized and expanded. */
f45e0ad1 104struct cgraph_node *cgraph_new_nodes;
953ff289 105
e72fcfe8 106/* Number of nodes in existence. */
1c4a429a 107int cgraph_n_nodes;
e72fcfe8 108
b58b1157
JH
109/* Maximal uid used in cgraph nodes. */
110int cgraph_max_uid;
111
6bad2617
TB
112/* Maximal pid used for profiling */
113int cgraph_max_pid;
114
dafc5b82
JH
115/* Set when whole unit has been analyzed so we can access global info. */
116bool cgraph_global_info_ready = false;
117
f45e0ad1
JH
118/* What state callgraph is in right now. */
119enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
120
cd9c7bd2
JH
121/* Set when the cgraph is fully build and the basic flags are computed. */
122bool cgraph_function_flags_ready = false;
123
474eccc6
ILT
124/* Linked list of cgraph asm nodes. */
125struct cgraph_asm_node *cgraph_asm_nodes;
126
127/* Last node in cgraph_asm_nodes. */
128static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
129
130/* The order index of the next cgraph node to be created. This is
131 used so that we can sort the cgraph nodes in order by when we saw
132 them, to support -fno-toplevel-reorder. */
133int cgraph_order;
134
e72fcfe8
JH
135/* Returns a hash code for P. */
136
137static hashval_t
439f7bc3 138hash_node (const void *p)
e72fcfe8 139{
cceb1885 140 const struct cgraph_node *n = (const struct cgraph_node *) p;
6f312d18 141 return (hashval_t) DECL_UID (n->decl);
e72fcfe8
JH
142}
143
6356f892 144/* Returns nonzero if P1 and P2 are equal. */
e72fcfe8
JH
145
146static int
439f7bc3 147eq_node (const void *p1, const void *p2)
e72fcfe8 148{
cceb1885
GDR
149 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
150 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
6f312d18 151 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
e72fcfe8
JH
152}
153
1ae58c30 154/* Allocate new callgraph node and insert it into basic data structures. */
0550e7b7 155
18c6ada9
JH
156static struct cgraph_node *
157cgraph_create_node (void)
158{
159 struct cgraph_node *node;
160
cceb1885 161 node = GGC_CNEW (struct cgraph_node);
18c6ada9
JH
162 node->next = cgraph_nodes;
163 node->uid = cgraph_max_uid++;
6bad2617 164 node->pid = -1;
474eccc6 165 node->order = cgraph_order++;
18c6ada9
JH
166 if (cgraph_nodes)
167 cgraph_nodes->previous = node;
168 node->previous = NULL;
670cd5c5 169 node->global.estimated_growth = INT_MIN;
18c6ada9
JH
170 cgraph_nodes = node;
171 cgraph_n_nodes++;
172 return node;
173}
174
e72fcfe8 175/* Return cgraph node assigned to DECL. Create new one when needed. */
0550e7b7 176
1c4a429a 177struct cgraph_node *
439f7bc3 178cgraph_node (tree decl)
e72fcfe8 179{
6f312d18 180 struct cgraph_node key, *node, **slot;
e72fcfe8 181
341c100f 182 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
988d1653 183
e72fcfe8 184 if (!cgraph_hash)
ed2df68b 185 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
e72fcfe8 186
6f312d18
ZW
187 key.decl = decl;
188
189 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
190
e72fcfe8 191 if (*slot)
6b02a499
JH
192 {
193 node = *slot;
194 if (!node->master_clone)
195 node->master_clone = node;
196 return node;
197 }
18c6ada9
JH
198
199 node = cgraph_create_node ();
e72fcfe8 200 node->decl = decl;
e72fcfe8 201 *slot = node;
5c2e00ee 202 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
e72fcfe8
JH
203 {
204 node->origin = cgraph_node (DECL_CONTEXT (decl));
205 node->next_nested = node->origin->nested;
206 node->origin->nested = node;
6b02a499 207 node->master_clone = node;
e72fcfe8
JH
208 }
209 return node;
210}
211
ea99e0be
JH
212/* Insert already constructed node into hashtable. */
213
214void
215cgraph_insert_node_to_hashtable (struct cgraph_node *node)
216{
217 struct cgraph_node **slot;
218
219 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
220
221 gcc_assert (!*slot);
222 *slot = node;
223}
224
bedb9fc0
RH
225
226/* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
227 Return NULL if there's no such node. */
228
229struct cgraph_node *
230cgraph_node_for_asm (tree asmname)
231{
232 struct cgraph_node *node;
233
234 for (node = cgraph_nodes; node ; node = node->next)
235 if (decl_assembler_name_equal (node->decl, asmname))
236 return node;
237
238 return NULL;
239}
240
70d539ce
JH
241/* Returns a hash value for X (which really is a die_struct). */
242
243static hashval_t
244edge_hash (const void *x)
245{
741ac903 246 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
70d539ce
JH
247}
248
249/* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
250
251static int
252edge_eq (const void *x, const void *y)
253{
741ac903 254 return ((const struct cgraph_edge *) x)->call_stmt == y;
70d539ce
JH
255}
256
e0704a46 257/* Return callgraph edge representing CALL_EXPR statement. */
18c6ada9 258struct cgraph_edge *
e0704a46 259cgraph_edge (struct cgraph_node *node, tree call_stmt)
18c6ada9 260{
70d539ce
JH
261 struct cgraph_edge *e, *e2;
262 int n = 0;
263
264 if (node->call_site_hash)
f883e0a7
KG
265 return (struct cgraph_edge *)
266 htab_find_with_hash (node->call_site_hash, call_stmt,
267 htab_hash_pointer (call_stmt));
18c6ada9
JH
268
269 /* This loop may turn out to be performance problem. In such case adding
270 hashtables into call nodes with very many edges is probably best
2b8a92de 271 solution. It is not good idea to add pointer into CALL_EXPR itself
18c6ada9
JH
272 because we want to make possible having multiple cgraph nodes representing
273 different clones of the same body before the body is actually cloned. */
274 for (e = node->callees; e; e= e->next_callee)
70d539ce
JH
275 {
276 if (e->call_stmt == call_stmt)
277 break;
278 n++;
279 }
280 if (n > 100)
281 {
282 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
283 for (e2 = node->callees; e2; e2 = e2->next_callee)
284 {
285 void **slot;
286 slot = htab_find_slot_with_hash (node->call_site_hash,
287 e2->call_stmt,
288 htab_hash_pointer (e2->call_stmt),
289 INSERT);
290 gcc_assert (!*slot);
291 *slot = e2;
292 }
293 }
18c6ada9
JH
294 return e;
295}
296
fa10beec 297/* Change call_stmt of edge E to NEW_STMT. */
0550e7b7 298
70d539ce
JH
299void
300cgraph_set_call_stmt (struct cgraph_edge *e, tree new_stmt)
301{
302 if (e->caller->call_site_hash)
303 {
304 htab_remove_elt_with_hash (e->caller->call_site_hash,
305 e->call_stmt,
306 htab_hash_pointer (e->call_stmt));
307 }
308 e->call_stmt = new_stmt;
309 if (e->caller->call_site_hash)
310 {
311 void **slot;
312 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
313 e->call_stmt,
314 htab_hash_pointer
315 (e->call_stmt), INSERT);
316 gcc_assert (!*slot);
317 *slot = e;
318 }
319}
320
e72fcfe8
JH
321/* Create edge from CALLER to CALLEE in the cgraph. */
322
18c6ada9
JH
323struct cgraph_edge *
324cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
45a80bb9 325 tree call_stmt, gcov_type count, int freq, int nest)
e72fcfe8 326{
cceb1885 327 struct cgraph_edge *edge = GGC_NEW (struct cgraph_edge);
18c6ada9
JH
328#ifdef ENABLE_CHECKING
329 struct cgraph_edge *e;
330
331 for (e = caller->callees; e; e = e->next_callee)
e0704a46 332 gcc_assert (e->call_stmt != call_stmt);
18c6ada9
JH
333#endif
334
e0704a46 335 gcc_assert (get_call_expr_in (call_stmt));
b58b1157 336
dc0bfe6a
JH
337 if (!DECL_SAVED_TREE (callee->decl))
338 edge->inline_failed = N_("function body not available");
95c755e9
JH
339 else if (callee->local.redefined_extern_inline)
340 edge->inline_failed = N_("redefined extern inline functions are not "
341 "considered for inlining");
dc0bfe6a
JH
342 else if (callee->local.inlinable)
343 edge->inline_failed = N_("function not considered for inlining");
344 else
345 edge->inline_failed = N_("function not inlinable");
346
18c6ada9 347 edge->aux = NULL;
e72fcfe8
JH
348
349 edge->caller = caller;
350 edge->callee = callee;
e0704a46 351 edge->call_stmt = call_stmt;
2563c224 352 edge->prev_caller = NULL;
e72fcfe8 353 edge->next_caller = callee->callers;
2563c224
RG
354 if (callee->callers)
355 callee->callers->prev_caller = edge;
356 edge->prev_callee = NULL;
e72fcfe8 357 edge->next_callee = caller->callees;
2563c224
RG
358 if (caller->callees)
359 caller->callees->prev_callee = edge;
e72fcfe8
JH
360 caller->callees = edge;
361 callee->callers = edge;
e42922b1 362 edge->count = count;
45a80bb9
JH
363 gcc_assert (count >= 0);
364 edge->frequency = freq;
365 gcc_assert (freq >= 0);
366 gcc_assert (freq <= CGRAPH_FREQ_MAX);
e42922b1 367 edge->loop_nest = nest;
70d539ce
JH
368 if (caller->call_site_hash)
369 {
370 void **slot;
371 slot = htab_find_slot_with_hash (caller->call_site_hash,
372 edge->call_stmt,
373 htab_hash_pointer
374 (edge->call_stmt),
375 INSERT);
376 gcc_assert (!*slot);
377 *slot = edge;
378 }
e72fcfe8
JH
379 return edge;
380}
381
2563c224
RG
382/* Remove the edge E from the list of the callers of the callee. */
383
384static inline void
385cgraph_edge_remove_callee (struct cgraph_edge *e)
386{
387 if (e->prev_caller)
388 e->prev_caller->next_caller = e->next_caller;
389 if (e->next_caller)
390 e->next_caller->prev_caller = e->prev_caller;
391 if (!e->prev_caller)
392 e->callee->callers = e->next_caller;
393}
394
395/* Remove the edge E from the list of the callees of the caller. */
396
397static inline void
398cgraph_edge_remove_caller (struct cgraph_edge *e)
399{
400 if (e->prev_callee)
401 e->prev_callee->next_callee = e->next_callee;
402 if (e->next_callee)
403 e->next_callee->prev_callee = e->prev_callee;
404 if (!e->prev_callee)
405 e->caller->callees = e->next_callee;
70d539ce
JH
406 if (e->caller->call_site_hash)
407 htab_remove_elt_with_hash (e->caller->call_site_hash,
408 e->call_stmt,
409 htab_hash_pointer (e->call_stmt));
2563c224
RG
410}
411
412/* Remove the edge E in the cgraph. */
e72fcfe8 413
cb967da5 414void
18c6ada9 415cgraph_remove_edge (struct cgraph_edge *e)
e72fcfe8 416{
2563c224
RG
417 /* Remove from callers list of the callee. */
418 cgraph_edge_remove_callee (e);
419
420 /* Remove from callees list of the callers. */
421 cgraph_edge_remove_caller (e);
e72fcfe8
JH
422}
423
18c6ada9
JH
424/* Redirect callee of E to N. The function does not update underlying
425 call expression. */
426
427void
428cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
429{
2563c224
RG
430 /* Remove from callers list of the current callee. */
431 cgraph_edge_remove_callee (e);
18c6ada9 432
2563c224
RG
433 /* Insert to callers list of the new callee. */
434 e->prev_caller = NULL;
435 if (n->callers)
436 n->callers->prev_caller = e;
18c6ada9
JH
437 e->next_caller = n->callers;
438 n->callers = e;
2563c224
RG
439 e->callee = n;
440}
441
2bafad93
JJ
442/* Update or remove corresponding cgraph edge if a call OLD_CALL
443 in OLD_STMT changed into NEW_STMT. */
444
445void
446cgraph_update_edges_for_call_stmt (tree old_stmt, tree old_call,
447 tree new_stmt)
448{
449 tree new_call = get_call_expr_in (new_stmt);
450 struct cgraph_node *node = cgraph_node (cfun->decl);
451
452 if (old_call != new_call)
453 {
454 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
455 struct cgraph_edge *ne = NULL;
456 tree new_decl;
457
458 if (e)
459 {
460 gcov_type count = e->count;
461 int frequency = e->frequency;
462 int loop_nest = e->loop_nest;
463
464 cgraph_remove_edge (e);
465 if (new_call)
466 {
467 new_decl = get_callee_fndecl (new_call);
468 if (new_decl)
469 {
470 ne = cgraph_create_edge (node, cgraph_node (new_decl),
471 new_stmt, count, frequency,
472 loop_nest);
473 gcc_assert (ne->inline_failed);
474 }
475 }
476 }
477 }
478 else if (old_stmt != new_stmt)
479 {
480 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
481
482 if (e)
483 cgraph_set_call_stmt (e, new_stmt);
484 }
485}
486
2563c224
RG
487/* Remove all callees from the node. */
488
489void
490cgraph_node_remove_callees (struct cgraph_node *node)
491{
492 struct cgraph_edge *e;
493
494 /* It is sufficient to remove the edges from the lists of callers of
495 the callees. The callee list of the node can be zapped with one
496 assignment. */
497 for (e = node->callees; e; e = e->next_callee)
498 cgraph_edge_remove_callee (e);
499 node->callees = NULL;
70d539ce
JH
500 if (node->call_site_hash)
501 {
502 htab_delete (node->call_site_hash);
503 node->call_site_hash = NULL;
504 }
2563c224
RG
505}
506
507/* Remove all callers from the node. */
508
509static void
510cgraph_node_remove_callers (struct cgraph_node *node)
511{
512 struct cgraph_edge *e;
513
514 /* It is sufficient to remove the edges from the lists of callees of
515 the callers. The caller list of the node can be zapped with one
516 assignment. */
517 for (e = node->callers; e; e = e->next_caller)
518 cgraph_edge_remove_caller (e);
519 node->callers = NULL;
18c6ada9
JH
520}
521
3a40c18a
JH
522/* Release memory used to represent body of function NODE. */
523
524void
525cgraph_release_function_body (struct cgraph_node *node)
526{
527 if (DECL_STRUCT_FUNCTION (node->decl)
528 && DECL_STRUCT_FUNCTION (node->decl)->gimple_df)
529 {
530 tree old_decl = current_function_decl;
531 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
532 current_function_decl = node->decl;
533 delete_tree_ssa ();
534 delete_tree_cfg_annotations ();
4437b50d 535 cfun->eh = NULL;
3a40c18a
JH
536 current_function_decl = old_decl;
537 pop_cfun();
538 }
539 DECL_SAVED_TREE (node->decl) = NULL;
540 DECL_STRUCT_FUNCTION (node->decl) = NULL;
541 DECL_INITIAL (node->decl) = error_mark_node;
542}
543
18d13f34
JH
544/* Remove the node from cgraph. */
545
546void
439f7bc3 547cgraph_remove_node (struct cgraph_node *node)
18d13f34 548{
2ee1067b 549 void **slot;
4a76d91a 550 bool kill_body = false;
18c6ada9 551
2563c224
RG
552 cgraph_node_remove_callers (node);
553 cgraph_node_remove_callees (node);
96fc428c
JH
554 /* Incremental inlining access removed nodes stored in the postorder list.
555 */
556 node->needed = node->reachable = false;
18d13f34
JH
557 while (node->nested)
558 cgraph_remove_node (node->nested);
559 if (node->origin)
560 {
561 struct cgraph_node **node2 = &node->origin->nested;
562
563 while (*node2 != node)
564 node2 = &(*node2)->next_nested;
565 *node2 = node->next_nested;
566 }
567 if (node->previous)
568 node->previous->next = node->next;
569 else
9b0436b7 570 cgraph_nodes = node->next;
18d13f34
JH
571 if (node->next)
572 node->next->previous = node->previous;
96fc428c
JH
573 node->next = NULL;
574 node->previous = NULL;
6f312d18 575 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
18c6ada9
JH
576 if (*slot == node)
577 {
578 if (node->next_clone)
1655dc9d 579 {
6b02a499
JH
580 struct cgraph_node *new_node = node->next_clone;
581 struct cgraph_node *n;
582
583 /* Make the next clone be the master clone */
c22cacf3 584 for (n = new_node; n; n = n->next_clone)
6b02a499 585 n->master_clone = new_node;
c22cacf3 586
6b02a499 587 *slot = new_node;
1655dc9d
JH
588 node->next_clone->prev_clone = NULL;
589 }
18c6ada9
JH
590 else
591 {
c22cacf3 592 htab_clear_slot (cgraph_hash, slot);
4a76d91a 593 kill_body = true;
18c6ada9
JH
594 }
595 }
596 else
597 {
1655dc9d
JH
598 node->prev_clone->next_clone = node->next_clone;
599 if (node->next_clone)
c22cacf3 600 node->next_clone->prev_clone = node->prev_clone;
18c6ada9
JH
601 }
602
c22cacf3 603 /* While all the clones are removed after being proceeded, the function
4a76d91a
JH
604 itself is kept in the cgraph even after it is compiled. Check whether
605 we are done with this body and reclaim it proactively if this is the case.
606 */
607 if (!kill_body && *slot)
18c6ada9 608 {
cceb1885 609 struct cgraph_node *n = (struct cgraph_node *) *slot;
4a76d91a 610 if (!n->next_clone && !n->global.inlined_to
d63db217
JH
611 && (cgraph_global_info_ready
612 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl))))
4a76d91a
JH
613 kill_body = true;
614 }
18c6ada9 615
63c2d00c 616 if (kill_body && flag_unit_at_a_time)
3a40c18a 617 cgraph_release_function_body (node);
96fc428c 618 node->decl = NULL;
70d539ce
JH
619 if (node->call_site_hash)
620 {
621 htab_delete (node->call_site_hash);
622 node->call_site_hash = NULL;
623 }
18c6ada9 624 cgraph_n_nodes--;
18d13f34
JH
625 /* Do not free the structure itself so the walk over chain can continue. */
626}
627
8dafba3c
RH
628/* Notify finalize_compilation_unit that given node is reachable. */
629
1668aabc 630void
8dafba3c 631cgraph_mark_reachable_node (struct cgraph_node *node)
1668aabc 632{
ba245151 633 if (!node->reachable && node->local.finalized)
1668aabc 634 {
ba245151 635 notice_global_symbol (node->decl);
1668aabc 636 node->reachable = 1;
45676d2b 637 gcc_assert (!cgraph_global_info_ready);
e767b5be
JH
638
639 node->next_needed = cgraph_nodes_queue;
640 cgraph_nodes_queue = node;
1668aabc
JH
641 }
642}
643
8dafba3c
RH
644/* Likewise indicate that a node is needed, i.e. reachable via some
645 external means. */
646
647void
648cgraph_mark_needed_node (struct cgraph_node *node)
649{
650 node->needed = 1;
651 cgraph_mark_reachable_node (node);
652}
18d13f34 653
dafc5b82
JH
654/* Return local info for the compiled function. */
655
656struct cgraph_local_info *
439f7bc3 657cgraph_local_info (tree decl)
dafc5b82
JH
658{
659 struct cgraph_node *node;
c22cacf3 660
341c100f 661 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
dafc5b82
JH
662 node = cgraph_node (decl);
663 return &node->local;
664}
665
666/* Return local info for the compiled function. */
667
668struct cgraph_global_info *
439f7bc3 669cgraph_global_info (tree decl)
dafc5b82
JH
670{
671 struct cgraph_node *node;
c22cacf3 672
341c100f 673 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
dafc5b82
JH
674 node = cgraph_node (decl);
675 return &node->global;
676}
677
b255a036
JH
678/* Return local info for the compiled function. */
679
680struct cgraph_rtl_info *
439f7bc3 681cgraph_rtl_info (tree decl)
b255a036
JH
682{
683 struct cgraph_node *node;
c22cacf3 684
341c100f 685 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
b255a036
JH
686 node = cgraph_node (decl);
687 if (decl != current_function_decl
688 && !TREE_ASM_WRITTEN (node->decl))
689 return NULL;
690 return &node->rtl;
691}
692
a194aa56
JH
693/* Return name of the node used in debug output. */
694const char *
439f7bc3 695cgraph_node_name (struct cgraph_node *node)
a194aa56 696{
ae2bcd98 697 return lang_hooks.decl_printable_name (node->decl, 2);
a194aa56 698}
dafc5b82 699
6b02a499 700/* Names used to print out the availability enum. */
8a4a83ed 701const char * const cgraph_availability_names[] =
fa10beec 702 {"unset", "not_available", "overwritable", "available", "local"};
6b02a499 703
c4e622b6
DN
704
705/* Dump call graph node NODE to file F. */
706
18c6ada9
JH
707void
708dump_cgraph_node (FILE *f, struct cgraph_node *node)
709{
710 struct cgraph_edge *edge;
6bad2617 711 fprintf (f, "%s/%i(%i):", cgraph_node_name (node), node->uid, node->pid);
18c6ada9
JH
712 if (node->global.inlined_to)
713 fprintf (f, " (inline copy in %s/%i)",
714 cgraph_node_name (node->global.inlined_to),
715 node->global.inlined_to->uid);
6b02a499 716 if (cgraph_function_flags_ready)
c22cacf3 717 fprintf (f, " availability:%s",
8a4a83ed 718 cgraph_availability_names [cgraph_function_body_availability (node)]);
6b02a499
JH
719 if (node->master_clone && node->master_clone->uid != node->uid)
720 fprintf (f, "(%i)", node->master_clone->uid);
e42922b1
JH
721 if (node->count)
722 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
723 (HOST_WIDEST_INT)node->count);
95622280
JH
724 if (node->local.inline_summary.self_insns)
725 fprintf (f, " %i insns", node->local.inline_summary.self_insns);
726 if (node->global.insns && node->global.insns
727 != node->local.inline_summary.self_insns)
18c6ada9 728 fprintf (f, " (%i after inlining)", node->global.insns);
95622280
JH
729 if (node->local.inline_summary.estimated_self_stack_size)
730 fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size);
731 if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size)
ff28a94d 732 fprintf (f, " %i bytes after inlining", (int)node->global.estimated_stack_size);
18c6ada9
JH
733 if (node->origin)
734 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
735 if (node->needed)
736 fprintf (f, " needed");
737 else if (node->reachable)
738 fprintf (f, " reachable");
739 if (DECL_SAVED_TREE (node->decl))
740 fprintf (f, " tree");
741 if (node->output)
742 fprintf (f, " output");
18c6ada9
JH
743 if (node->local.local)
744 fprintf (f, " local");
e7d6beb0
JH
745 if (node->local.externally_visible)
746 fprintf (f, " externally_visible");
747 if (node->local.finalized)
748 fprintf (f, " finalized");
18c6ada9
JH
749 if (node->local.disregard_inline_limits)
750 fprintf (f, " always_inline");
751 else if (node->local.inlinable)
752 fprintf (f, " inlinable");
e7d6beb0
JH
753 if (node->local.redefined_extern_inline)
754 fprintf (f, " redefined_extern_inline");
18c6ada9
JH
755 if (TREE_ASM_WRITTEN (node->decl))
756 fprintf (f, " asm_written");
757
758 fprintf (f, "\n called by: ");
759 for (edge = node->callers; edge; edge = edge->next_caller)
760 {
761 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
762 edge->caller->uid);
e42922b1
JH
763 if (edge->count)
764 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
765 (HOST_WIDEST_INT)edge->count);
45a80bb9
JH
766 if (edge->frequency)
767 fprintf (f, "(%.2f per call) ",
768 edge->frequency / (double)CGRAPH_FREQ_BASE);
18c6ada9
JH
769 if (!edge->inline_failed)
770 fprintf(f, "(inlined) ");
771 }
772
773 fprintf (f, "\n calls: ");
774 for (edge = node->callees; edge; edge = edge->next_callee)
775 {
776 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
777 edge->callee->uid);
778 if (!edge->inline_failed)
779 fprintf(f, "(inlined) ");
6b02a499
JH
780 if (edge->count)
781 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
782 (HOST_WIDEST_INT)edge->count);
45a80bb9
JH
783 if (edge->frequency)
784 fprintf (f, "(%.2f per call) ",
785 edge->frequency / (double)CGRAPH_FREQ_BASE);
6b02a499
JH
786 if (edge->loop_nest)
787 fprintf (f, "(nested in %i loops) ", edge->loop_nest);
18c6ada9
JH
788 }
789 fprintf (f, "\n");
790}
791
c4e622b6
DN
792
793/* Dump call graph node NODE to stderr. */
794
795void
796debug_cgraph_node (struct cgraph_node *node)
797{
798 dump_cgraph_node (stderr, node);
799}
800
801
802/* Dump the callgraph to file F. */
e72fcfe8
JH
803
804void
439f7bc3 805dump_cgraph (FILE *f)
e72fcfe8
JH
806{
807 struct cgraph_node *node;
808
7d82fe7c 809 fprintf (f, "callgraph:\n\n");
e72fcfe8 810 for (node = cgraph_nodes; node; node = node->next)
18c6ada9 811 dump_cgraph_node (f, node);
e72fcfe8 812}
988d1653 813
c4e622b6
DN
814
815/* Dump the call graph to stderr. */
816
817void
818debug_cgraph (void)
819{
820 dump_cgraph (stderr);
821}
822
823
fccc4eb2 824/* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
c4e622b6 825
fccc4eb2
JH
826void
827change_decl_assembler_name (tree decl, tree name)
828{
fccc4eb2
JH
829 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
830 {
831 SET_DECL_ASSEMBLER_NAME (decl, name);
832 return;
833 }
834 if (name == DECL_ASSEMBLER_NAME (decl))
835 return;
836
df964a18
JH
837 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
838 && DECL_RTL_SET_P (decl))
d4ee4d25 839 warning (0, "%D renamed after being referenced in assembly", decl);
fccc4eb2 840
fccc4eb2 841 SET_DECL_ASSEMBLER_NAME (decl, name);
e69529cd
JH
842}
843
474eccc6
ILT
844/* Add a top-level asm statement to the list. */
845
846struct cgraph_asm_node *
847cgraph_add_asm_node (tree asm_str)
848{
849 struct cgraph_asm_node *node;
850
851 node = GGC_CNEW (struct cgraph_asm_node);
852 node->asm_str = asm_str;
853 node->order = cgraph_order++;
854 node->next = NULL;
855 if (cgraph_asm_nodes == NULL)
856 cgraph_asm_nodes = node;
857 else
858 cgraph_asm_last_node->next = node;
859 cgraph_asm_last_node = node;
860 return node;
861}
862
1bb17c21
JH
863/* Return true when the DECL can possibly be inlined. */
864bool
865cgraph_function_possibly_inlined_p (tree decl)
866{
1bb17c21 867 if (!cgraph_global_info_ready)
18c6ada9 868 return (DECL_INLINE (decl) && !flag_really_no_inline);
6f312d18 869 return DECL_POSSIBLY_INLINED (decl);
18c6ada9
JH
870}
871
872/* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
873struct cgraph_edge *
e42922b1 874cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
45a80bb9
JH
875 tree call_stmt, gcov_type count_scale, int freq_scale,
876 int loop_nest, bool update_original)
18c6ada9 877{
e42922b1 878 struct cgraph_edge *new;
45a80bb9
JH
879 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
880 gcov_type freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
e42922b1 881
45a80bb9
JH
882 if (freq > CGRAPH_FREQ_MAX)
883 freq = CGRAPH_FREQ_MAX;
884 new = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
c22cacf3 885 e->loop_nest + loop_nest);
18c6ada9
JH
886
887 new->inline_failed = e->inline_failed;
c5a4444c 888 if (update_original)
d63f0fe5
JH
889 {
890 e->count -= new->count;
891 if (e->count < 0)
892 e->count = 0;
893 }
18c6ada9 894 return new;
1bb17c21 895}
e69529cd 896
e42922b1 897/* Create node representing clone of N executed COUNT times. Decrease
c22cacf3 898 the execution counts from original node too.
c5a4444c
JH
899
900 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
901 function's profile to reflect the fact that part of execution is handled
902 by node. */
18c6ada9 903struct cgraph_node *
45a80bb9 904cgraph_clone_node (struct cgraph_node *n, gcov_type count, int freq, int loop_nest,
c5a4444c 905 bool update_original)
18c6ada9
JH
906{
907 struct cgraph_node *new = cgraph_create_node ();
908 struct cgraph_edge *e;
06191a23 909 gcov_type count_scale;
18c6ada9
JH
910
911 new->decl = n->decl;
912 new->origin = n->origin;
913 if (new->origin)
914 {
915 new->next_nested = new->origin->nested;
916 new->origin->nested = new;
917 }
918 new->analyzed = n->analyzed;
919 new->local = n->local;
920 new->global = n->global;
921 new->rtl = n->rtl;
6b02a499 922 new->master_clone = n->master_clone;
e42922b1
JH
923 new->count = count;
924 if (n->count)
925 count_scale = new->count * REG_BR_PROB_BASE / n->count;
926 else
927 count_scale = 0;
c5a4444c 928 if (update_original)
d63f0fe5
JH
929 {
930 n->count -= count;
931 if (n->count < 0)
932 n->count = 0;
933 }
18c6ada9
JH
934
935 for (e = n->callees;e; e=e->next_callee)
45a80bb9 936 cgraph_clone_edge (e, new, e->call_stmt, count_scale, freq, loop_nest,
c5a4444c 937 update_original);
18c6ada9
JH
938
939 new->next_clone = n->next_clone;
1655dc9d 940 new->prev_clone = n;
18c6ada9 941 n->next_clone = new;
1655dc9d
JH
942 if (new->next_clone)
943 new->next_clone->prev_clone = new;
18c6ada9
JH
944
945 return new;
946}
8f235343 947
6b02a499
JH
948/* Return true if N is an master_clone, (see cgraph_master_clone). */
949
950bool
951cgraph_is_master_clone (struct cgraph_node *n)
952{
953 return (n == cgraph_master_clone (n));
954}
955
956struct cgraph_node *
957cgraph_master_clone (struct cgraph_node *n)
958{
959 enum availability avail = cgraph_function_body_availability (n);
c22cacf3 960
6b02a499
JH
961 if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
962 return NULL;
963
c22cacf3 964 if (!n->master_clone)
6b02a499 965 n->master_clone = cgraph_node (n->decl);
c22cacf3 966
6b02a499
JH
967 return n->master_clone;
968}
969
8f235343
JH
970/* NODE is no longer nested function; update cgraph accordingly. */
971void
972cgraph_unnest_node (struct cgraph_node *node)
973{
974 struct cgraph_node **node2 = &node->origin->nested;
975 gcc_assert (node->origin);
976
977 while (*node2 != node)
978 node2 = &(*node2)->next_nested;
979 *node2 = node->next_nested;
980 node->origin = NULL;
981}
6b02a499
JH
982
983/* Return function availability. See cgraph.h for description of individual
984 return values. */
985enum availability
986cgraph_function_body_availability (struct cgraph_node *node)
987{
988 enum availability avail;
989 gcc_assert (cgraph_function_flags_ready);
093c2329 990 if (!node->analyzed)
6b02a499
JH
991 avail = AVAIL_NOT_AVAILABLE;
992 else if (node->local.local)
993 avail = AVAIL_LOCAL;
994 else if (node->local.externally_visible)
995 avail = AVAIL_AVAILABLE;
996
997 /* If the function can be overwritten, return OVERWRITABLE. Take
998 care at least of two notable extensions - the COMDAT functions
999 used to share template instantiations in C++ (this is symmetric
1000 to code cp_cannot_inline_tree_fn and probably shall be shared and
ff5c4582 1001 the inlinability hooks completely eliminated).
6b02a499
JH
1002
1003 ??? Does the C++ one definition rule allow us to always return
1004 AVAIL_AVAILABLE here? That would be good reason to preserve this
1005 hook Similarly deal with extern inline functions - this is again
ff5c4582 1006 necessary to get C++ shared functions having keyed templates
6b02a499
JH
1007 right and in the C extension documentation we probably should
1008 document the requirement of both versions of function (extern
1009 inline and offline) having same side effect characteristics as
1010 good optimization is what this optimization is about. */
c22cacf3 1011
6b02a499
JH
1012 else if (!(*targetm.binds_local_p) (node->decl)
1013 && !DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl))
1014 avail = AVAIL_OVERWRITABLE;
1015 else avail = AVAIL_AVAILABLE;
1016
1017 return avail;
1018}
1019
f45e0ad1
JH
1020/* Add the function FNDECL to the call graph.
1021 Unlike cgraph_finalize_function, this function is intended to be used
1022 by middle end and allows insertion of new function at arbitrary point
1023 of compilation. The function can be either in high, low or SSA form
1024 GIMPLE.
50674e96 1025
f45e0ad1
JH
1026 The function is assumed to be reachable and have address taken (so no
1027 API breaking optimizations are performed on it).
50674e96 1028
f45e0ad1
JH
1029 Main work done by this function is to enqueue the function for later
1030 processing to avoid need the passes to be re-entrant. */
953ff289
DN
1031
1032void
f45e0ad1 1033cgraph_add_new_function (tree fndecl, bool lowered)
953ff289 1034{
f45e0ad1
JH
1035 struct cgraph_node *node;
1036 switch (cgraph_state)
1037 {
1038 case CGRAPH_STATE_CONSTRUCTION:
fa10beec 1039 /* Just enqueue function to be processed at nearest occurrence. */
f45e0ad1
JH
1040 node = cgraph_node (fndecl);
1041 node->next_needed = cgraph_new_nodes;
1042 if (lowered)
1043 node->lowered = true;
1044 cgraph_new_nodes = node;
1045 break;
1046
1047 case CGRAPH_STATE_IPA:
7a388ee4 1048 case CGRAPH_STATE_IPA_SSA:
f45e0ad1
JH
1049 case CGRAPH_STATE_EXPANSION:
1050 /* Bring the function into finalized state and enqueue for later
1051 analyzing and compilation. */
1052 node = cgraph_node (fndecl);
1053 node->local.local = false;
1054 node->local.finalized = true;
1055 node->reachable = node->needed = true;
ff2c88a5
JH
1056 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
1057 {
1058 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
1059 current_function_decl = fndecl;
1060 tree_register_cfg_hooks ();
1061 tree_lowering_passes (fndecl);
1062 bitmap_obstack_initialize (NULL);
1063 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
1064 execute_pass_list (pass_early_local_passes.pass.sub);
1065 bitmap_obstack_release (NULL);
1066 pop_cfun ();
1067 current_function_decl = NULL;
1068
1069 lowered = true;
1070 }
f45e0ad1
JH
1071 if (lowered)
1072 node->lowered = true;
1073 node->next_needed = cgraph_new_nodes;
1074 cgraph_new_nodes = node;
1075 break;
1076
1077 case CGRAPH_STATE_FINISHED:
1078 /* At the very end of compilation we have to do all the work up
1079 to expansion. */
1080 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
1081 current_function_decl = fndecl;
1082 tree_register_cfg_hooks ();
1083 if (!lowered)
1084 tree_lowering_passes (fndecl);
7a388ee4
JH
1085 bitmap_obstack_initialize (NULL);
1086 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)) && optimize)
8ddbbcae 1087 execute_pass_list (pass_early_local_passes.pass.sub);
7a388ee4 1088 bitmap_obstack_release (NULL);
f45e0ad1
JH
1089 tree_rest_of_compilation (fndecl);
1090 pop_cfun ();
1091 current_function_decl = NULL;
1092 break;
1093 }
953ff289
DN
1094}
1095
988d1653 1096#include "gt-cgraph.h"