]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraph.c
Fix failure with pragma once where buffer is NULL and buffer_valid is true.
[thirdparty/gcc.git] / gcc / cgraph.c
CommitLineData
e72fcfe8 1/* Callgraph handling code.
ad616de1 2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
e72fcfe8
JH
3 Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
19Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2002110-1301, USA. */
e72fcfe8 21
18c6ada9
JH
22/* This file contains basic routines manipulating call graph and variable pool
23
24The callgraph:
25
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
28 sharing.
29
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node (in
32 contrast to tree DECL nodes where we can have multiple nodes for each
33 function).
34
35 The mapping from declarations to call-graph nodes is done using hash table
36 based on DECL_ASSEMBLER_NAME, so it is essential for assembler name to
37 not change once the declaration is inserted into the call-graph.
38 The call-graph nodes are created lazily using cgraph_node function when
39 called for unknown declaration.
40
41 When built, there is one edge for each direct call. It is possible that
42 the reference will be later optimized out. The call-graph is built
43 conservatively in order to make conservative data flow analysis possible.
44
45 The callgraph at the moment does not represent indirect calls or calls
46 from other compilation unit. Flag NEEDED is set for each node that may
1f838355 47 be accessed in such an invisible way and it shall be considered an
18c6ada9
JH
48 entry point to the callgraph.
49
50 Intraprocedural information:
51
52 Callgraph is place to store data needed for intraprocedural optimization.
1ae58c30 53 All data structures are divided into three components: local_info that
18c6ada9 54 is produced while analyzing the function, global_info that is result
1ae58c30 55 of global walking of the callgraph on the end of compilation and
18c6ada9
JH
56 rtl_info used by RTL backend to propagate data from already compiled
57 functions to their callers.
58
59 Inlining plans:
60
61 The function inlining information is decided in advance and maintained
62 in the callgraph as so called inline plan.
1ae58c30 63 For each inlined call, the callee's node is cloned to represent the
1ea7e6ad 64 new function copy produced by inliner.
1ae58c30
KH
65 Each inlined call gets a unique corresponding clone node of the callee
66 and the data structure is updated while inlining is performed, so
67 the clones are eliminated and their callee edges redirected to the
18c6ada9
JH
68 caller.
69
70 Each edge has "inline_failed" field. When the field is set to NULL,
2b8a92de
KH
71 the call will be inlined. When it is non-NULL it contains a reason
72 why inlining wasn't performed.
18c6ada9
JH
73
74
75The varpool data structure:
76
77 Varpool is used to maintain variables in similar manner as call-graph
78 is used for functions. Most of the API is symmetric replacing cgraph
79 function prefix by cgraph_varpool */
80
81
e72fcfe8
JH
82#include "config.h"
83#include "system.h"
84#include "coretypes.h"
85#include "tm.h"
86#include "tree.h"
cd9c7bd2 87#include "tree-inline.h"
e72fcfe8
JH
88#include "langhooks.h"
89#include "hashtab.h"
90#include "toplev.h"
91#include "flags.h"
92#include "ggc.h"
93#include "debug.h"
94#include "target.h"
cd9c7bd2 95#include "basic-block.h"
1c4a429a 96#include "cgraph.h"
988d1653 97#include "varray.h"
e69529cd 98#include "output.h"
dc0bfe6a 99#include "intl.h"
e0704a46 100#include "tree-gimple.h"
ef330312 101#include "tree-dump.h"
988d1653 102
2563c224
RG
103static void cgraph_node_remove_callers (struct cgraph_node *node);
104static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
105static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
106
e72fcfe8 107/* Hash table used to convert declarations into nodes. */
ed2df68b 108static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
e72fcfe8
JH
109
110/* The linked list of cgraph nodes. */
1c4a429a 111struct cgraph_node *cgraph_nodes;
e72fcfe8 112
1668aabc
JH
113/* Queue of cgraph nodes scheduled to be lowered. */
114struct cgraph_node *cgraph_nodes_queue;
115
e72fcfe8 116/* Number of nodes in existence. */
1c4a429a 117int cgraph_n_nodes;
e72fcfe8 118
b58b1157
JH
119/* Maximal uid used in cgraph nodes. */
120int cgraph_max_uid;
121
dafc5b82
JH
122/* Set when whole unit has been analyzed so we can access global info. */
123bool cgraph_global_info_ready = false;
124
cd9c7bd2
JH
125/* Set when the cgraph is fully build and the basic flags are computed. */
126bool cgraph_function_flags_ready = false;
127
e69529cd 128/* Hash table used to convert declarations into nodes. */
ed2df68b 129static GTY((param_is (struct cgraph_varpool_node))) htab_t cgraph_varpool_hash;
e69529cd
JH
130
131/* Queue of cgraph nodes scheduled to be lowered and output. */
cd9c7bd2
JH
132struct cgraph_varpool_node *cgraph_varpool_nodes_queue, *cgraph_varpool_first_unanalyzed_node;
133
e69529cd 134
ed2df68b 135/* The linked list of cgraph varpool nodes. */
cd9c7bd2
JH
136static GTY(()) struct cgraph_varpool_node *cgraph_varpool_nodes;
137
138/* End of the varpool queue. Needs to be QTYed to work with PCH. */
139static GTY(()) struct cgraph_varpool_node *cgraph_varpool_last_needed_node;
ed2df68b 140
439f7bc3
AJ
141static hashval_t hash_node (const void *);
142static int eq_node (const void *, const void *);
e72fcfe8
JH
143
144/* Returns a hash code for P. */
145
146static hashval_t
439f7bc3 147hash_node (const void *p)
e72fcfe8 148{
6f312d18
ZW
149 const struct cgraph_node *n = p;
150 return (hashval_t) DECL_UID (n->decl);
e72fcfe8
JH
151}
152
6356f892 153/* Returns nonzero if P1 and P2 are equal. */
e72fcfe8
JH
154
155static int
439f7bc3 156eq_node (const void *p1, const void *p2)
e72fcfe8 157{
6f312d18
ZW
158 const struct cgraph_node *n1 = p1, *n2 = p2;
159 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
e72fcfe8
JH
160}
161
1ae58c30 162/* Allocate new callgraph node and insert it into basic data structures. */
18c6ada9
JH
163static struct cgraph_node *
164cgraph_create_node (void)
165{
166 struct cgraph_node *node;
167
168 node = ggc_alloc_cleared (sizeof (*node));
169 node->next = cgraph_nodes;
170 node->uid = cgraph_max_uid++;
171 if (cgraph_nodes)
172 cgraph_nodes->previous = node;
173 node->previous = NULL;
670cd5c5 174 node->global.estimated_growth = INT_MIN;
18c6ada9
JH
175 cgraph_nodes = node;
176 cgraph_n_nodes++;
177 return node;
178}
179
e72fcfe8 180/* Return cgraph node assigned to DECL. Create new one when needed. */
1c4a429a 181struct cgraph_node *
439f7bc3 182cgraph_node (tree decl)
e72fcfe8 183{
6f312d18 184 struct cgraph_node key, *node, **slot;
e72fcfe8 185
341c100f 186 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
988d1653 187
e72fcfe8 188 if (!cgraph_hash)
ed2df68b 189 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
e72fcfe8 190
6f312d18
ZW
191 key.decl = decl;
192
193 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
194
e72fcfe8 195 if (*slot)
6b02a499
JH
196 {
197 node = *slot;
198 if (!node->master_clone)
199 node->master_clone = node;
200 return node;
201 }
18c6ada9
JH
202
203 node = cgraph_create_node ();
e72fcfe8 204 node->decl = decl;
e72fcfe8 205 *slot = node;
5c2e00ee 206 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
e72fcfe8
JH
207 {
208 node->origin = cgraph_node (DECL_CONTEXT (decl));
209 node->next_nested = node->origin->nested;
210 node->origin->nested = node;
6b02a499 211 node->master_clone = node;
e72fcfe8
JH
212 }
213 return node;
214}
215
bedb9fc0
RH
216/* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
217
218static bool
219decl_assembler_name_equal (tree decl, tree asmname)
220{
221 tree decl_asmname = DECL_ASSEMBLER_NAME (decl);
222
223 if (decl_asmname == asmname)
224 return true;
225
226 /* If the target assembler name was set by the user, things are trickier.
227 We have a leading '*' to begin with. After that, it's arguable what
228 is the correct thing to do with -fleading-underscore. Arguably, we've
229 historically been doing the wrong thing in assemble_alias by always
230 printing the leading underscore. Since we're not changing that, make
231 sure user_label_prefix follows the '*' before matching. */
232 if (IDENTIFIER_POINTER (decl_asmname)[0] == '*')
233 {
234 const char *decl_str = IDENTIFIER_POINTER (decl_asmname) + 1;
235 size_t ulp_len = strlen (user_label_prefix);
236
237 if (ulp_len == 0)
238 ;
239 else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0)
240 decl_str += ulp_len;
241 else
242 return false;
243
244 return strcmp (decl_str, IDENTIFIER_POINTER (asmname)) == 0;
245 }
246
247 return false;
248}
249
250
251/* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
252 Return NULL if there's no such node. */
253
254struct cgraph_node *
255cgraph_node_for_asm (tree asmname)
256{
257 struct cgraph_node *node;
258
259 for (node = cgraph_nodes; node ; node = node->next)
260 if (decl_assembler_name_equal (node->decl, asmname))
261 return node;
262
263 return NULL;
264}
265
e0704a46 266/* Return callgraph edge representing CALL_EXPR statement. */
18c6ada9 267struct cgraph_edge *
e0704a46 268cgraph_edge (struct cgraph_node *node, tree call_stmt)
18c6ada9
JH
269{
270 struct cgraph_edge *e;
271
272 /* This loop may turn out to be performance problem. In such case adding
273 hashtables into call nodes with very many edges is probably best
2b8a92de 274 solution. It is not good idea to add pointer into CALL_EXPR itself
18c6ada9
JH
275 because we want to make possible having multiple cgraph nodes representing
276 different clones of the same body before the body is actually cloned. */
277 for (e = node->callees; e; e= e->next_callee)
e0704a46 278 if (e->call_stmt == call_stmt)
18c6ada9
JH
279 break;
280 return e;
281}
282
e72fcfe8
JH
283/* Create edge from CALLER to CALLEE in the cgraph. */
284
18c6ada9
JH
285struct cgraph_edge *
286cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
e0704a46 287 tree call_stmt, gcov_type count, int nest)
e72fcfe8 288{
ed2df68b 289 struct cgraph_edge *edge = ggc_alloc (sizeof (struct cgraph_edge));
18c6ada9
JH
290#ifdef ENABLE_CHECKING
291 struct cgraph_edge *e;
292
293 for (e = caller->callees; e; e = e->next_callee)
e0704a46 294 gcc_assert (e->call_stmt != call_stmt);
18c6ada9
JH
295#endif
296
e0704a46 297 gcc_assert (get_call_expr_in (call_stmt));
b58b1157 298
dc0bfe6a
JH
299 if (!DECL_SAVED_TREE (callee->decl))
300 edge->inline_failed = N_("function body not available");
95c755e9
JH
301 else if (callee->local.redefined_extern_inline)
302 edge->inline_failed = N_("redefined extern inline functions are not "
303 "considered for inlining");
dc0bfe6a
JH
304 else if (callee->local.inlinable)
305 edge->inline_failed = N_("function not considered for inlining");
306 else
307 edge->inline_failed = N_("function not inlinable");
308
18c6ada9 309 edge->aux = NULL;
e72fcfe8
JH
310
311 edge->caller = caller;
312 edge->callee = callee;
e0704a46 313 edge->call_stmt = call_stmt;
2563c224 314 edge->prev_caller = NULL;
e72fcfe8 315 edge->next_caller = callee->callers;
2563c224
RG
316 if (callee->callers)
317 callee->callers->prev_caller = edge;
318 edge->prev_callee = NULL;
e72fcfe8 319 edge->next_callee = caller->callees;
2563c224
RG
320 if (caller->callees)
321 caller->callees->prev_callee = edge;
e72fcfe8
JH
322 caller->callees = edge;
323 callee->callers = edge;
e42922b1
JH
324 edge->count = count;
325 edge->loop_nest = nest;
e72fcfe8
JH
326 return edge;
327}
328
2563c224
RG
329/* Remove the edge E from the list of the callers of the callee. */
330
331static inline void
332cgraph_edge_remove_callee (struct cgraph_edge *e)
333{
334 if (e->prev_caller)
335 e->prev_caller->next_caller = e->next_caller;
336 if (e->next_caller)
337 e->next_caller->prev_caller = e->prev_caller;
338 if (!e->prev_caller)
339 e->callee->callers = e->next_caller;
340}
341
342/* Remove the edge E from the list of the callees of the caller. */
343
344static inline void
345cgraph_edge_remove_caller (struct cgraph_edge *e)
346{
347 if (e->prev_callee)
348 e->prev_callee->next_callee = e->next_callee;
349 if (e->next_callee)
350 e->next_callee->prev_callee = e->prev_callee;
351 if (!e->prev_callee)
352 e->caller->callees = e->next_callee;
353}
354
355/* Remove the edge E in the cgraph. */
e72fcfe8 356
cb967da5 357void
18c6ada9 358cgraph_remove_edge (struct cgraph_edge *e)
e72fcfe8 359{
2563c224
RG
360 /* Remove from callers list of the callee. */
361 cgraph_edge_remove_callee (e);
362
363 /* Remove from callees list of the callers. */
364 cgraph_edge_remove_caller (e);
e72fcfe8
JH
365}
366
18c6ada9
JH
367/* Redirect callee of E to N. The function does not update underlying
368 call expression. */
369
370void
371cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
372{
2563c224
RG
373 /* Remove from callers list of the current callee. */
374 cgraph_edge_remove_callee (e);
18c6ada9 375
2563c224
RG
376 /* Insert to callers list of the new callee. */
377 e->prev_caller = NULL;
378 if (n->callers)
379 n->callers->prev_caller = e;
18c6ada9
JH
380 e->next_caller = n->callers;
381 n->callers = e;
2563c224
RG
382 e->callee = n;
383}
384
385/* Remove all callees from the node. */
386
387void
388cgraph_node_remove_callees (struct cgraph_node *node)
389{
390 struct cgraph_edge *e;
391
392 /* It is sufficient to remove the edges from the lists of callers of
393 the callees. The callee list of the node can be zapped with one
394 assignment. */
395 for (e = node->callees; e; e = e->next_callee)
396 cgraph_edge_remove_callee (e);
397 node->callees = NULL;
398}
399
400/* Remove all callers from the node. */
401
402static void
403cgraph_node_remove_callers (struct cgraph_node *node)
404{
405 struct cgraph_edge *e;
406
407 /* It is sufficient to remove the edges from the lists of callees of
408 the callers. The caller list of the node can be zapped with one
409 assignment. */
410 for (e = node->callers; e; e = e->next_caller)
411 cgraph_edge_remove_caller (e);
412 node->callers = NULL;
18c6ada9
JH
413}
414
18d13f34
JH
415/* Remove the node from cgraph. */
416
417void
439f7bc3 418cgraph_remove_node (struct cgraph_node *node)
18d13f34 419{
2ee1067b 420 void **slot;
4a76d91a 421 bool kill_body = false;
18c6ada9 422
2563c224
RG
423 cgraph_node_remove_callers (node);
424 cgraph_node_remove_callees (node);
18d13f34
JH
425 while (node->nested)
426 cgraph_remove_node (node->nested);
427 if (node->origin)
428 {
429 struct cgraph_node **node2 = &node->origin->nested;
430
431 while (*node2 != node)
432 node2 = &(*node2)->next_nested;
433 *node2 = node->next_nested;
434 }
435 if (node->previous)
436 node->previous->next = node->next;
437 else
9b0436b7 438 cgraph_nodes = node->next;
18d13f34
JH
439 if (node->next)
440 node->next->previous = node->previous;
6f312d18 441 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
18c6ada9
JH
442 if (*slot == node)
443 {
444 if (node->next_clone)
1655dc9d 445 {
6b02a499
JH
446 struct cgraph_node *new_node = node->next_clone;
447 struct cgraph_node *n;
448
449 /* Make the next clone be the master clone */
450 for (n = new_node; n; n = n->next_clone)
451 n->master_clone = new_node;
452
453 *slot = new_node;
1655dc9d
JH
454 node->next_clone->prev_clone = NULL;
455 }
18c6ada9
JH
456 else
457 {
458 htab_clear_slot (cgraph_hash, slot);
4a76d91a 459 kill_body = true;
18c6ada9
JH
460 }
461 }
462 else
463 {
1655dc9d
JH
464 node->prev_clone->next_clone = node->next_clone;
465 if (node->next_clone)
466 node->next_clone->prev_clone = node->prev_clone;
18c6ada9
JH
467 }
468
4a76d91a
JH
469 /* While all the clones are removed after being proceeded, the function
470 itself is kept in the cgraph even after it is compiled. Check whether
471 we are done with this body and reclaim it proactively if this is the case.
472 */
473 if (!kill_body && *slot)
18c6ada9 474 {
4a76d91a
JH
475 struct cgraph_node *n = *slot;
476 if (!n->next_clone && !n->global.inlined_to
d63db217
JH
477 && (cgraph_global_info_ready
478 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl))))
4a76d91a
JH
479 kill_body = true;
480 }
18c6ada9 481
4a76d91a
JH
482 if (kill_body && !dump_enabled_p (TDI_tree_all) && flag_unit_at_a_time)
483 {
484 DECL_SAVED_TREE (node->decl) = NULL;
485 DECL_STRUCT_FUNCTION (node->decl) = NULL;
486 DECL_INITIAL (node->decl) = error_mark_node;
18c6ada9
JH
487 }
488 cgraph_n_nodes--;
18d13f34
JH
489 /* Do not free the structure itself so the walk over chain can continue. */
490}
491
8dafba3c
RH
492/* Notify finalize_compilation_unit that given node is reachable. */
493
1668aabc 494void
8dafba3c 495cgraph_mark_reachable_node (struct cgraph_node *node)
1668aabc 496{
ba245151 497 if (!node->reachable && node->local.finalized)
1668aabc 498 {
ba245151 499 notice_global_symbol (node->decl);
1668aabc 500 node->reachable = 1;
45676d2b 501 gcc_assert (!cgraph_global_info_ready);
e767b5be
JH
502
503 node->next_needed = cgraph_nodes_queue;
504 cgraph_nodes_queue = node;
1668aabc
JH
505 }
506}
507
8dafba3c
RH
508/* Likewise indicate that a node is needed, i.e. reachable via some
509 external means. */
510
511void
512cgraph_mark_needed_node (struct cgraph_node *node)
513{
514 node->needed = 1;
515 cgraph_mark_reachable_node (node);
516}
18d13f34 517
dafc5b82
JH
518/* Return local info for the compiled function. */
519
520struct cgraph_local_info *
439f7bc3 521cgraph_local_info (tree decl)
dafc5b82
JH
522{
523 struct cgraph_node *node;
341c100f
NS
524
525 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
dafc5b82
JH
526 node = cgraph_node (decl);
527 return &node->local;
528}
529
530/* Return local info for the compiled function. */
531
532struct cgraph_global_info *
439f7bc3 533cgraph_global_info (tree decl)
dafc5b82
JH
534{
535 struct cgraph_node *node;
341c100f
NS
536
537 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
dafc5b82
JH
538 node = cgraph_node (decl);
539 return &node->global;
540}
541
b255a036
JH
542/* Return local info for the compiled function. */
543
544struct cgraph_rtl_info *
439f7bc3 545cgraph_rtl_info (tree decl)
b255a036
JH
546{
547 struct cgraph_node *node;
341c100f
NS
548
549 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
b255a036
JH
550 node = cgraph_node (decl);
551 if (decl != current_function_decl
552 && !TREE_ASM_WRITTEN (node->decl))
553 return NULL;
554 return &node->rtl;
555}
556
a194aa56
JH
557/* Return name of the node used in debug output. */
558const char *
439f7bc3 559cgraph_node_name (struct cgraph_node *node)
a194aa56 560{
ae2bcd98 561 return lang_hooks.decl_printable_name (node->decl, 2);
a194aa56 562}
dafc5b82 563
cd9c7bd2
JH
564/* Return name of the node used in debug output. */
565static const char *
566cgraph_varpool_node_name (struct cgraph_varpool_node *node)
567{
568 return lang_hooks.decl_printable_name (node->decl, 2);
569}
570
6b02a499
JH
571/* Names used to print out the availability enum. */
572static const char * const availability_names[] =
573 {"unset", "not_available", "overwrittable", "available", "local"};
574
18c6ada9
JH
575/* Dump given cgraph node. */
576void
577dump_cgraph_node (FILE *f, struct cgraph_node *node)
578{
579 struct cgraph_edge *edge;
580 fprintf (f, "%s/%i:", cgraph_node_name (node), node->uid);
581 if (node->global.inlined_to)
582 fprintf (f, " (inline copy in %s/%i)",
583 cgraph_node_name (node->global.inlined_to),
584 node->global.inlined_to->uid);
6b02a499
JH
585 if (cgraph_function_flags_ready)
586 fprintf (f, " availability:%s",
587 availability_names [cgraph_function_body_availability (node)]);
588 if (node->master_clone && node->master_clone->uid != node->uid)
589 fprintf (f, "(%i)", node->master_clone->uid);
e42922b1
JH
590 if (node->count)
591 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
592 (HOST_WIDEST_INT)node->count);
18c6ada9
JH
593 if (node->local.self_insns)
594 fprintf (f, " %i insns", node->local.self_insns);
595 if (node->global.insns && node->global.insns != node->local.self_insns)
596 fprintf (f, " (%i after inlining)", node->global.insns);
597 if (node->origin)
598 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
599 if (node->needed)
600 fprintf (f, " needed");
601 else if (node->reachable)
602 fprintf (f, " reachable");
603 if (DECL_SAVED_TREE (node->decl))
604 fprintf (f, " tree");
605 if (node->output)
606 fprintf (f, " output");
18c6ada9
JH
607 if (node->local.local)
608 fprintf (f, " local");
e7d6beb0
JH
609 if (node->local.externally_visible)
610 fprintf (f, " externally_visible");
611 if (node->local.finalized)
612 fprintf (f, " finalized");
18c6ada9
JH
613 if (node->local.disregard_inline_limits)
614 fprintf (f, " always_inline");
615 else if (node->local.inlinable)
616 fprintf (f, " inlinable");
e7d6beb0
JH
617 if (node->local.redefined_extern_inline)
618 fprintf (f, " redefined_extern_inline");
18c6ada9
JH
619 if (TREE_ASM_WRITTEN (node->decl))
620 fprintf (f, " asm_written");
621
622 fprintf (f, "\n called by: ");
623 for (edge = node->callers; edge; edge = edge->next_caller)
624 {
625 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
626 edge->caller->uid);
e42922b1
JH
627 if (edge->count)
628 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
629 (HOST_WIDEST_INT)edge->count);
18c6ada9
JH
630 if (!edge->inline_failed)
631 fprintf(f, "(inlined) ");
632 }
633
634 fprintf (f, "\n calls: ");
635 for (edge = node->callees; edge; edge = edge->next_callee)
636 {
637 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
638 edge->callee->uid);
639 if (!edge->inline_failed)
640 fprintf(f, "(inlined) ");
6b02a499
JH
641 if (edge->count)
642 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
643 (HOST_WIDEST_INT)edge->count);
644 if (edge->loop_nest)
645 fprintf (f, "(nested in %i loops) ", edge->loop_nest);
18c6ada9
JH
646 }
647 fprintf (f, "\n");
648}
649
e72fcfe8
JH
650/* Dump the callgraph. */
651
652void
439f7bc3 653dump_cgraph (FILE *f)
e72fcfe8
JH
654{
655 struct cgraph_node *node;
656
7d82fe7c 657 fprintf (f, "callgraph:\n\n");
e72fcfe8 658 for (node = cgraph_nodes; node; node = node->next)
18c6ada9 659 dump_cgraph_node (f, node);
e72fcfe8 660}
988d1653 661
cd9c7bd2
JH
662/* Dump given cgraph node. */
663void
664dump_cgraph_varpool_node (FILE *f, struct cgraph_varpool_node *node)
665{
666 fprintf (f, "%s:", cgraph_varpool_node_name (node));
6b02a499 667 fprintf (f, " availability:%s", availability_names [cgraph_variable_initializer_availability (node)]);
cd9c7bd2
JH
668 if (DECL_INITIAL (node->decl))
669 fprintf (f, " initialized");
670 if (node->needed)
671 fprintf (f, " needed");
672 if (node->analyzed)
673 fprintf (f, " analyzed");
674 if (node->finalized)
675 fprintf (f, " finalized");
676 if (node->output)
677 fprintf (f, " output");
e7d6beb0
JH
678 if (node->externally_visible)
679 fprintf (f, " externally_visible");
cd9c7bd2
JH
680 fprintf (f, "\n");
681}
682
683/* Dump the callgraph. */
684
685void
686dump_varpool (FILE *f)
687{
688 struct cgraph_varpool_node *node;
689
690 fprintf (f, "variable pool:\n\n");
691 for (node = cgraph_varpool_nodes; node; node = node->next_needed)
692 dump_cgraph_varpool_node (f, node);
693}
694
e69529cd
JH
695/* Returns a hash code for P. */
696
697static hashval_t
6f312d18 698hash_varpool_node (const void *p)
e69529cd 699{
6f312d18
ZW
700 const struct cgraph_varpool_node *n = p;
701 return (hashval_t) DECL_UID (n->decl);
e69529cd
JH
702}
703
f1ba665b 704/* Returns nonzero if P1 and P2 are equal. */
e69529cd
JH
705
706static int
6f312d18 707eq_varpool_node (const void *p1, const void *p2)
e69529cd 708{
6f312d18
ZW
709 const struct cgraph_varpool_node *n1 = p1, *n2 = p2;
710 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
e69529cd
JH
711}
712
713/* Return cgraph_varpool node assigned to DECL. Create new one when needed. */
714struct cgraph_varpool_node *
715cgraph_varpool_node (tree decl)
716{
6f312d18 717 struct cgraph_varpool_node key, *node, **slot;
e69529cd 718
341c100f 719 gcc_assert (DECL_P (decl) && TREE_CODE (decl) != FUNCTION_DECL);
e69529cd
JH
720
721 if (!cgraph_varpool_hash)
6f312d18
ZW
722 cgraph_varpool_hash = htab_create_ggc (10, hash_varpool_node,
723 eq_varpool_node, NULL);
724 key.decl = decl;
ed2df68b 725 slot = (struct cgraph_varpool_node **)
6f312d18 726 htab_find_slot (cgraph_varpool_hash, &key, INSERT);
e69529cd
JH
727 if (*slot)
728 return *slot;
ed2df68b 729 node = ggc_alloc_cleared (sizeof (*node));
e69529cd 730 node->decl = decl;
bedb9fc0 731 node->next = cgraph_varpool_nodes;
ed2df68b 732 cgraph_varpool_nodes = node;
e69529cd 733 *slot = node;
e69529cd
JH
734 return node;
735}
736
bedb9fc0
RH
737struct cgraph_varpool_node *
738cgraph_varpool_node_for_asm (tree asmname)
739{
740 struct cgraph_varpool_node *node;
741
742 for (node = cgraph_varpool_nodes; node ; node = node->next)
743 if (decl_assembler_name_equal (node->decl, asmname))
744 return node;
745
746 return NULL;
747}
748
fccc4eb2
JH
749/* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
750void
751change_decl_assembler_name (tree decl, tree name)
752{
fccc4eb2
JH
753 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
754 {
755 SET_DECL_ASSEMBLER_NAME (decl, name);
756 return;
757 }
758 if (name == DECL_ASSEMBLER_NAME (decl))
759 return;
760
df964a18
JH
761 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
762 && DECL_RTL_SET_P (decl))
d4ee4d25 763 warning (0, "%D renamed after being referenced in assembly", decl);
fccc4eb2 764
fccc4eb2 765 SET_DECL_ASSEMBLER_NAME (decl, name);
e69529cd
JH
766}
767
cd9c7bd2
JH
768/* Helper function for finalization code - add node into lists so it will
769 be analyzed and compiled. */
770void
771cgraph_varpool_enqueue_needed_node (struct cgraph_varpool_node *node)
772{
773 if (cgraph_varpool_last_needed_node)
774 cgraph_varpool_last_needed_node->next_needed = node;
775 cgraph_varpool_last_needed_node = node;
776 node->next_needed = NULL;
777 if (!cgraph_varpool_nodes_queue)
778 cgraph_varpool_nodes_queue = node;
779 if (!cgraph_varpool_first_unanalyzed_node)
780 cgraph_varpool_first_unanalyzed_node = node;
781 notice_global_symbol (node->decl);
782}
783
784/* Reset the queue of needed nodes. */
785void
786cgraph_varpool_reset_queue (void)
787{
788 cgraph_varpool_last_needed_node = NULL;
789 cgraph_varpool_nodes_queue = NULL;
790 cgraph_varpool_first_unanalyzed_node = NULL;
791}
792
e69529cd
JH
793/* Notify finalize_compilation_unit that given node is reachable
794 or needed. */
795void
796cgraph_varpool_mark_needed_node (struct cgraph_varpool_node *node)
797{
798 if (!node->needed && node->finalized)
cd9c7bd2 799 cgraph_varpool_enqueue_needed_node (node);
e69529cd
JH
800 node->needed = 1;
801}
802
cd9c7bd2
JH
803/* Determine if variable DECL is needed. That is, visible to something
804 either outside this translation unit, something magic in the system
805 configury, or (if not doing unit-at-a-time) to something we haven't
806 seen yet. */
807
808bool
809decide_is_variable_needed (struct cgraph_varpool_node *node, tree decl)
810{
811 /* If the user told us it is used, then it must be so. */
ce91e74c
JH
812 if (node->externally_visible
813 || lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
cd9c7bd2
JH
814 return true;
815
816 /* ??? If the assembler name is set by hand, it is possible to assemble
817 the name later after finalizing the function and the fact is noticed
818 in assemble_name then. This is arguably a bug. */
819 if (DECL_ASSEMBLER_NAME_SET_P (decl)
820 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
821 return true;
822
823 /* If we decided it was needed before, but at the time we didn't have
824 the definition available, then it's still needed. */
825 if (node->needed)
826 return true;
827
e7d6beb0
JH
828 /* Externally visible variables must be output. The exception is
829 COMDAT variables that must be output only when they are needed. */
cd9c7bd2
JH
830 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
831 return true;
832
833 if (flag_unit_at_a_time)
834 return false;
835
836 /* If not doing unit at a time, then we'll only defer this function
837 if its marked for inlining. Otherwise we want to emit it now. */
838
839 /* We want to emit COMDAT variables only when absolutely necessary. */
840 if (DECL_COMDAT (decl))
841 return false;
842 return true;
843}
844
e69529cd
JH
845void
846cgraph_varpool_finalize_decl (tree decl)
847{
848 struct cgraph_varpool_node *node = cgraph_varpool_node (decl);
d853a20e
JH
849
850 /* The first declaration of a variable that comes through this function
851 decides whether it is global (in C, has external linkage)
852 or local (in C, has internal linkage). So do nothing more
853 if this function has already run. */
854 if (node->finalized)
e69529cd 855 {
cd9c7bd2
JH
856 if (cgraph_global_info_ready || !flag_unit_at_a_time)
857 cgraph_varpool_assemble_pending_decls ();
858 return;
e69529cd 859 }
cd9c7bd2
JH
860 if (node->needed)
861 cgraph_varpool_enqueue_needed_node (node);
e69529cd
JH
862 node->finalized = true;
863
cd9c7bd2
JH
864 if (decide_is_variable_needed (node, decl))
865 cgraph_varpool_mark_needed_node (node);
ff5c4582 866 /* Since we reclaim unreachable nodes at the end of every language
e7d6beb0
JH
867 level unit, we need to be conservative about possible entry points
868 there. */
ce91e74c 869 else if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
e7d6beb0 870 cgraph_varpool_mark_needed_node (node);
cd9c7bd2
JH
871 if (cgraph_global_info_ready || !flag_unit_at_a_time)
872 cgraph_varpool_assemble_pending_decls ();
e69529cd
JH
873}
874
1bb17c21
JH
875/* Return true when the DECL can possibly be inlined. */
876bool
877cgraph_function_possibly_inlined_p (tree decl)
878{
1bb17c21 879 if (!cgraph_global_info_ready)
18c6ada9 880 return (DECL_INLINE (decl) && !flag_really_no_inline);
6f312d18 881 return DECL_POSSIBLY_INLINED (decl);
18c6ada9
JH
882}
883
884/* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
885struct cgraph_edge *
e42922b1 886cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
06191a23 887 tree call_stmt, gcov_type count_scale, int loop_nest,
c5a4444c 888 bool update_original)
18c6ada9 889{
e42922b1
JH
890 struct cgraph_edge *new;
891
e0704a46 892 new = cgraph_create_edge (n, e->callee, call_stmt,
e42922b1
JH
893 e->count * count_scale / REG_BR_PROB_BASE,
894 e->loop_nest + loop_nest);
18c6ada9
JH
895
896 new->inline_failed = e->inline_failed;
c5a4444c 897 if (update_original)
d63f0fe5
JH
898 {
899 e->count -= new->count;
900 if (e->count < 0)
901 e->count = 0;
902 }
18c6ada9 903 return new;
1bb17c21 904}
e69529cd 905
e42922b1 906/* Create node representing clone of N executed COUNT times. Decrease
c5a4444c
JH
907 the execution counts from original node too.
908
909 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
910 function's profile to reflect the fact that part of execution is handled
911 by node. */
18c6ada9 912struct cgraph_node *
c5a4444c
JH
913cgraph_clone_node (struct cgraph_node *n, gcov_type count, int loop_nest,
914 bool update_original)
18c6ada9
JH
915{
916 struct cgraph_node *new = cgraph_create_node ();
917 struct cgraph_edge *e;
06191a23 918 gcov_type count_scale;
18c6ada9
JH
919
920 new->decl = n->decl;
921 new->origin = n->origin;
922 if (new->origin)
923 {
924 new->next_nested = new->origin->nested;
925 new->origin->nested = new;
926 }
927 new->analyzed = n->analyzed;
928 new->local = n->local;
929 new->global = n->global;
930 new->rtl = n->rtl;
6b02a499 931 new->master_clone = n->master_clone;
e42922b1
JH
932 new->count = count;
933 if (n->count)
934 count_scale = new->count * REG_BR_PROB_BASE / n->count;
935 else
936 count_scale = 0;
c5a4444c 937 if (update_original)
d63f0fe5
JH
938 {
939 n->count -= count;
940 if (n->count < 0)
941 n->count = 0;
942 }
18c6ada9
JH
943
944 for (e = n->callees;e; e=e->next_callee)
c5a4444c
JH
945 cgraph_clone_edge (e, new, e->call_stmt, count_scale, loop_nest,
946 update_original);
18c6ada9
JH
947
948 new->next_clone = n->next_clone;
1655dc9d 949 new->prev_clone = n;
18c6ada9 950 n->next_clone = new;
1655dc9d
JH
951 if (new->next_clone)
952 new->next_clone->prev_clone = new;
18c6ada9
JH
953
954 return new;
955}
8f235343 956
6b02a499
JH
957/* Return true if N is an master_clone, (see cgraph_master_clone). */
958
959bool
960cgraph_is_master_clone (struct cgraph_node *n)
961{
962 return (n == cgraph_master_clone (n));
963}
964
965struct cgraph_node *
966cgraph_master_clone (struct cgraph_node *n)
967{
968 enum availability avail = cgraph_function_body_availability (n);
969
970 if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
971 return NULL;
972
973 if (!n->master_clone)
974 n->master_clone = cgraph_node (n->decl);
975
976 return n->master_clone;
977}
978
8f235343
JH
979/* NODE is no longer nested function; update cgraph accordingly. */
980void
981cgraph_unnest_node (struct cgraph_node *node)
982{
983 struct cgraph_node **node2 = &node->origin->nested;
984 gcc_assert (node->origin);
985
986 while (*node2 != node)
987 node2 = &(*node2)->next_nested;
988 *node2 = node->next_nested;
989 node->origin = NULL;
990}
6b02a499
JH
991
992/* Return function availability. See cgraph.h for description of individual
993 return values. */
994enum availability
995cgraph_function_body_availability (struct cgraph_node *node)
996{
997 enum availability avail;
998 gcc_assert (cgraph_function_flags_ready);
093c2329 999 if (!node->analyzed)
6b02a499
JH
1000 avail = AVAIL_NOT_AVAILABLE;
1001 else if (node->local.local)
1002 avail = AVAIL_LOCAL;
1003 else if (node->local.externally_visible)
1004 avail = AVAIL_AVAILABLE;
1005
1006 /* If the function can be overwritten, return OVERWRITABLE. Take
1007 care at least of two notable extensions - the COMDAT functions
1008 used to share template instantiations in C++ (this is symmetric
1009 to code cp_cannot_inline_tree_fn and probably shall be shared and
ff5c4582 1010 the inlinability hooks completely eliminated).
6b02a499
JH
1011
1012 ??? Does the C++ one definition rule allow us to always return
1013 AVAIL_AVAILABLE here? That would be good reason to preserve this
1014 hook Similarly deal with extern inline functions - this is again
ff5c4582 1015 necessary to get C++ shared functions having keyed templates
6b02a499
JH
1016 right and in the C extension documentation we probably should
1017 document the requirement of both versions of function (extern
1018 inline and offline) having same side effect characteristics as
1019 good optimization is what this optimization is about. */
1020
1021 else if (!(*targetm.binds_local_p) (node->decl)
1022 && !DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl))
1023 avail = AVAIL_OVERWRITABLE;
1024 else avail = AVAIL_AVAILABLE;
1025
1026 return avail;
1027}
1028
1029/* Return variable availability. See cgraph.h for description of individual
1030 return values. */
1031enum availability
1032cgraph_variable_initializer_availability (struct cgraph_varpool_node *node)
1033{
1034 gcc_assert (cgraph_function_flags_ready);
1035 if (!node->finalized)
1036 return AVAIL_NOT_AVAILABLE;
1037 if (!TREE_PUBLIC (node->decl))
1038 return AVAIL_AVAILABLE;
ff5c4582 1039 /* If the variable can be overwritten, return OVERWRITABLE. Takes
6b02a499
JH
1040 care of at least two notable extensions - the COMDAT variables
1041 used to share template instantiations in C++. */
1042 if (!(*targetm.binds_local_p) (node->decl) && !DECL_COMDAT (node->decl))
1043 return AVAIL_OVERWRITABLE;
1044 return AVAIL_AVAILABLE;
1045}
1046
988d1653 1047#include "gt-cgraph.h"