]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
* alloc-pool.h (ALLOC_POOL_ID_TYPE): New type.
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
ae01b312 1/* Callgraph handling code.
2 Copyright (C) 2003 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "tree-inline.h"
28#include "langhooks.h"
29#include "hashtab.h"
30#include "toplev.h"
31#include "flags.h"
32#include "ggc.h"
33#include "debug.h"
34#include "target.h"
35#include "cgraph.h"
80a85d8a 36#include "diagnostic.h"
ae01b312 37
38static void cgraph_expand_functions PARAMS ((void));
39static void cgraph_mark_functions_to_output PARAMS ((void));
40static void cgraph_expand_function PARAMS ((struct cgraph_node *));
41static tree record_call_1 PARAMS ((tree *, int *, void *));
80a85d8a 42static void cgraph_mark_local_functions PARAMS ((void));
961e3b13 43static void cgraph_mark_functions_to_inline_once PARAMS ((void));
44static void cgraph_optimize_function PARAMS ((struct cgraph_node *));
ae01b312 45
46/* Analyze function once it is parsed. Set up the local information
47 available - create cgraph edges for function calles via BODY. */
48
49void
50cgraph_finalize_function (decl, body)
51 tree decl;
52 tree body ATTRIBUTE_UNUSED;
53{
54 struct cgraph_node *node = cgraph_node (decl);
55
56 node->decl = decl;
57
961e3b13 58 node->local.can_inline_once = tree_inlinable_function_p (decl, 1);
80a85d8a 59 if (flag_inline_trees)
961e3b13 60 node->local.inline_many = tree_inlinable_function_p (decl, 0);
80a85d8a 61 else
62 node->local.inline_many = 0;
ae01b312 63
64 (*debug_hooks->deferred_inline_function) (decl);
65}
66
67static struct cgraph_node *queue = NULL;
68
69/* Notify finalize_compilation_unit that given node is reachable
70 or needed. */
71void
72cgraph_mark_needed_node (node, needed)
73 struct cgraph_node *node;
74 int needed;
75{
76 if (needed)
77 {
78 if (DECL_SAVED_TREE (node->decl))
79 announce_function (node->decl);
80 node->needed = 1;
81 }
82 if (!node->reachable)
83 {
84 node->reachable = 1;
85 if (DECL_SAVED_TREE (node->decl))
86 {
87 node->aux = queue;
88 queue = node;
89 }
90 }
91}
92
93/* Walk tree and record all calls. Called via walk_tree. */
94static tree
95record_call_1 (tp, walk_subtrees, data)
96 tree *tp;
97 int *walk_subtrees;
98 void *data;
99{
100 /* Record dereferences to the functions. This makes the functions
101 reachable unconditionally. */
102 if (TREE_CODE (*tp) == ADDR_EXPR)
103 {
104 tree decl = TREE_OPERAND (*tp, 0);
105 if (TREE_CODE (decl) == FUNCTION_DECL)
106 cgraph_mark_needed_node (cgraph_node (decl), 1);
107 }
108 else if (TREE_CODE (*tp) == CALL_EXPR)
109 {
110 tree decl = TREE_OPERAND (*tp, 0);
111 if (TREE_CODE (decl) == ADDR_EXPR)
112 decl = TREE_OPERAND (decl, 0);
113 if (TREE_CODE (decl) == FUNCTION_DECL)
114 {
115 if (DECL_BUILT_IN (decl))
116 return NULL;
117 cgraph_record_call (data, decl);
118 walk_tree (&TREE_OPERAND (*tp, 1), record_call_1, data, NULL);
119 *walk_subtrees = 0;
120 }
121 }
122 return NULL;
123}
124
125/* Create cgraph edges for function calles via BODY. */
126
127void
128cgraph_create_edges (decl, body)
129 tree decl;
130 tree body;
131{
132 walk_tree (&body, record_call_1, decl, NULL);
133}
134
135/* Analyze the whole compilation unit once it is parsed completely. */
136
137void
138cgraph_finalize_compilation_unit ()
139{
140 struct cgraph_node *node;
141 struct cgraph_edge *edge;
142
143 /* Collect entry points to the unit. */
144
145 if (!quiet_flag)
146 fprintf (stderr, "\n\nUnit entry points:");
147
148 for (node = cgraph_nodes; node; node = node->next)
149 {
150 tree decl = node->decl;
151
152 if (!DECL_SAVED_TREE (decl))
153 continue;
154 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
155 || (DECL_ASSEMBLER_NAME_SET_P (decl)
156 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))))
157 {
158 cgraph_mark_needed_node (node, 1);
159 }
160 }
161
162 /* Propagate reachability flag and lower representation of all reachable
163 functions. In the future, lowering will introduce new functions and
164 new entry points on the way (by template instantiation and virtual
165 method table generation for instance). */
166 while (queue)
167 {
168 tree decl = queue->decl;
169
170 node = queue;
171 queue = queue->aux;
172 if (node->lowered || !node->reachable || !DECL_SAVED_TREE (decl))
173 abort ();
174
175 /* At the moment frontend automatically emits all nested functions. */
176 if (node->nested)
177 {
178 struct cgraph_node *node2;
179
180 for (node2 = node->nested; node2; node2 = node2->next_nested)
181 if (!node2->reachable)
182 cgraph_mark_needed_node (node2, 0);
183 }
184
185 if (lang_hooks.callgraph.lower_function)
186 (*lang_hooks.callgraph.lower_function) (decl);
187 /* First kill forward declaration so reverse inling works properly. */
188 cgraph_create_edges (decl, DECL_SAVED_TREE (decl));
189
190 for (edge = node->callees; edge; edge = edge->next_callee)
191 {
192 if (!edge->callee->reachable)
193 cgraph_mark_needed_node (edge->callee, 0);
194 }
195 node->lowered = true;
196 }
197 if (!quiet_flag)
198 fprintf (stderr, "\n\nReclaiming functions:");
199
200 for (node = cgraph_nodes; node; node = node->next)
201 {
202 tree decl = node->decl;
203
204 if (!node->reachable && DECL_SAVED_TREE (decl))
205 {
961e3b13 206 cgraph_remove_node (node);
ae01b312 207 announce_function (decl);
208 }
209 }
210 ggc_collect ();
211}
212
213/* Figure out what functions we want to assemble. */
214
215static void
216cgraph_mark_functions_to_output ()
217{
218 struct cgraph_node *node;
219
220 /* Figure out functions we want to assemble. */
221 for (node = cgraph_nodes; node; node = node->next)
222 {
223 tree decl = node->decl;
224
225 if (DECL_SAVED_TREE (decl)
226 && (node->needed
961e3b13 227 || (!node->local.inline_many && !node->global.inline_once
228 && node->reachable)
ae01b312 229 || TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
230 && !TREE_ASM_WRITTEN (decl) && !node->origin
231 && !DECL_EXTERNAL (decl))
232 node->output = 1;
233 }
234}
235
961e3b13 236/* Optimize the function before expansion. */
237static void
238cgraph_optimize_function (node)
239 struct cgraph_node *node;
240{
241 tree decl = node->decl;
242
243 if (flag_inline_trees)
244 optimize_inline_calls (decl);
245 if (node->nested)
246 {
247 for (node = node->nested; node; node = node->next_nested)
248 cgraph_optimize_function (node);
249 }
250}
251
ae01b312 252/* Expand function specified by NODE. */
253static void
254cgraph_expand_function (node)
255 struct cgraph_node *node;
256{
257 tree decl = node->decl;
258
259 announce_function (decl);
961e3b13 260
261 cgraph_optimize_function (node);
80a85d8a 262
263 /* Avoid RTL inlining from taking place. */
ae01b312 264 (*lang_hooks.callgraph.expand_function) (decl);
961e3b13 265
266 /* When we decided to inline the function once, we never ever should need to
267 output it separately. */
268 if (node->global.inline_once)
269 abort ();
270 if (!node->local.inline_many
271 || !node->callers)
ae01b312 272 DECL_SAVED_TREE (decl) = NULL;
273 current_function_decl = NULL;
274}
275
276
277/* Expand all functions that must be output.
278
279 Attempt to topologically sort the nodes so function is output when
280 all called functions are already assembled to allow data to be propagated
281 accross the callgraph. Use stack to get smaller distance between function
282 and it's callees (later we may use more sophisticated algorithm for
283 function reordering, we will likely want to use subsections to make output
284 functions to appear in top-down order, not bottom-up they are assembled). */
285
286static void
287cgraph_expand_functions ()
288{
289 struct cgraph_node *node, *node2;
290 struct cgraph_node **stack =
291 xcalloc (sizeof (struct cgraph_node *), cgraph_n_nodes);
292 struct cgraph_node **order =
293 xcalloc (sizeof (struct cgraph_node *), cgraph_n_nodes);
294 int stack_size = 0;
295 int order_pos = 0;
296 struct cgraph_edge *edge, last;
297 int i;
298
299 cgraph_mark_functions_to_output ();
300
301 /* We have to deal with cycles nicely, so use depth first traversal
302 algorithm. Ignore the fact that some functions won't need to be output
303 and put them into order as well, so we get dependencies right trought inlined
304 functions. */
305 for (node = cgraph_nodes; node; node = node->next)
306 node->aux = NULL;
307 for (node = cgraph_nodes; node; node = node->next)
308 if (node->output && !node->aux)
309 {
310 node2 = node;
311 if (!node->callers)
312 node->aux = &last;
313 else
314 node->aux = node->callers;
315 while (node2)
316 {
317 while (node2->aux != &last)
318 {
319 edge = node2->aux;
320 if (edge->next_caller)
321 node2->aux = edge->next_caller;
322 else
323 node2->aux = &last;
324 if (!edge->caller->aux)
325 {
326 if (!edge->caller->callers)
327 edge->caller->aux = &last;
328 else
329 edge->caller->aux = edge->caller->callers;
330 stack[stack_size++] = node2;
331 node2 = edge->caller;
332 break;
333 }
334 }
335 if (node2->aux == &last)
336 {
337 order[order_pos++] = node2;
338 if (stack_size)
339 node2 = stack[--stack_size];
340 else
341 node2 = NULL;
342 }
343 }
344 }
ea0041f4 345 for (i = order_pos - 1; i >= 0; i--)
ae01b312 346 {
347 node = order[i];
348 if (node->output)
349 {
350 if (!node->reachable)
351 abort ();
352 node->output = 0;
353 cgraph_expand_function (node);
354 }
355 }
356 free (stack);
357 free (order);
358}
359
80a85d8a 360/* Mark all local functions.
361 We can not use node->needed directly as it is modified during
362 execution of cgraph_optimize. */
363
364static void
365cgraph_mark_local_functions ()
366{
367 struct cgraph_node *node;
368
369 if (!quiet_flag)
370 fprintf (stderr, "\n\nMarking local functions:");
371
372 /* Figure out functions we want to assemble. */
373 for (node = cgraph_nodes; node; node = node->next)
374 {
375 node->local.local = (!node->needed
376 && DECL_SAVED_TREE (node->decl)
377 && !TREE_PUBLIC (node->decl));
378 if (node->local.local)
379 announce_function (node->decl);
380 }
381}
382
961e3b13 383/* Decide what function should be inlined because they are invoked once
384 (so inlining won't result in duplication of the code). */
385
386static void
387cgraph_mark_functions_to_inline_once ()
388{
389 struct cgraph_node *node, *node1;
390
391 if (!quiet_flag)
392 fprintf (stderr, "\n\nMarking functions to inline once:");
393
394 /* Now look for function called only once and mark them to inline. From this
395 point number of calls to given function won't grow. */
396 for (node = cgraph_nodes; node; node = node->next)
397 {
398 if (node->callers && !node->callers->next_caller && !node->needed
399 && node->local.can_inline_once)
400 {
401 bool ok = true;
402
403 /* Verify that we won't duplicate the caller. */
404 for (node1 = node->callers->caller;
405 node1->local.inline_many
406 && node1->callers
407 && ok;
408 node1 = node1->callers->caller)
409 if (node1->callers->next_caller || node1->needed)
410 ok = false;
411 if (ok)
412 {
413 node->global.inline_once = true;
414 announce_function (node->decl);
415 }
416 }
417 }
418}
419
80a85d8a 420
ae01b312 421/* Perform simple optimizations based on callgraph. */
422
423void
424cgraph_optimize ()
425{
426 struct cgraph_node *node;
427 bool changed = true;
ae01b312 428
80a85d8a 429 cgraph_mark_local_functions ();
430
961e3b13 431 cgraph_mark_functions_to_inline_once ();
432
80a85d8a 433 cgraph_global_info_ready = true;
ae01b312 434 if (!quiet_flag)
435 fprintf (stderr, "\n\nAssembling functions:");
436
437 /* Output everything.
438 ??? Our inline heuristic may decide to not inline functions previously
439 marked as inlinable thus adding new function bodies that must be output.
440 Later we should move all inlining decisions to callgraph code to make
441 this impossible. */
442 cgraph_expand_functions ();
80a85d8a 443 if (!quiet_flag)
444 fprintf (stderr, "\n\nAssembling functions that failed to inline:");
445 while (changed && !errorcount && !sorrycount)
ae01b312 446 {
447 changed = false;
448 for (node = cgraph_nodes; node; node = node->next)
449 {
80a85d8a 450 tree decl = node->decl;
451 if (!node->origin
452 && !TREE_ASM_WRITTEN (decl)
453 && DECL_SAVED_TREE (decl)
454 && !DECL_EXTERNAL (decl))
455 {
456 struct cgraph_edge *edge;
457
458 for (edge = node->callers; edge; edge = edge->next_caller)
459 if (TREE_ASM_WRITTEN (edge->caller->decl))
460 {
461 changed = true;
462 cgraph_expand_function (node);
463 break;
464 }
465 }
ae01b312 466 }
467 }
ae01b312 468}