]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraphbuild.c
lto-symtab.c (lto_varpool_replace_node): Remove code handling extra name aliases.
[thirdparty/gcc.git] / gcc / cgraphbuild.c
1 /* Callgraph construction.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tree-flow.h"
28 #include "langhooks.h"
29 #include "pointer-set.h"
30 #include "cgraph.h"
31 #include "intl.h"
32 #include "gimple.h"
33 #include "tree-pass.h"
34 #include "ipa-utils.h"
35 #include "except.h"
36 #include "ipa-inline.h"
37
38 /* Context of record_reference. */
39 struct record_reference_ctx
40 {
41 bool only_vars;
42 struct varpool_node *varpool_node;
43 };
44
45 /* Walk tree and record all calls and references to functions/variables.
46 Called via walk_tree: TP is pointer to tree to be examined.
47 When DATA is non-null, record references to callgraph.
48 */
49
50 static tree
51 record_reference (tree *tp, int *walk_subtrees, void *data)
52 {
53 tree t = *tp;
54 tree decl;
55 struct record_reference_ctx *ctx = (struct record_reference_ctx *)data;
56
57 t = canonicalize_constructor_val (t);
58 if (!t)
59 t = *tp;
60 else if (t != *tp)
61 *tp = t;
62
63 switch (TREE_CODE (t))
64 {
65 case VAR_DECL:
66 case FUNCTION_DECL:
67 gcc_unreachable ();
68 break;
69
70 case FDESC_EXPR:
71 case ADDR_EXPR:
72 /* Record dereferences to the functions. This makes the
73 functions reachable unconditionally. */
74 decl = get_base_var (*tp);
75 if (TREE_CODE (decl) == FUNCTION_DECL)
76 {
77 struct cgraph_node *node = cgraph_get_create_node (decl);
78 if (!ctx->only_vars)
79 cgraph_mark_address_taken_node (node);
80 ipa_record_reference (NULL, ctx->varpool_node, node, NULL,
81 IPA_REF_ADDR, NULL);
82 }
83
84 if (TREE_CODE (decl) == VAR_DECL)
85 {
86 struct varpool_node *vnode = varpool_node (decl);
87 if (lang_hooks.callgraph.analyze_expr)
88 lang_hooks.callgraph.analyze_expr (&decl, walk_subtrees);
89 varpool_mark_needed_node (vnode);
90 ipa_record_reference (NULL, ctx->varpool_node,
91 NULL, vnode,
92 IPA_REF_ADDR, NULL);
93 }
94 *walk_subtrees = 0;
95 break;
96
97 default:
98 /* Save some cycles by not walking types and declaration as we
99 won't find anything useful there anyway. */
100 if (IS_TYPE_OR_DECL_P (*tp))
101 {
102 *walk_subtrees = 0;
103 break;
104 }
105
106 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
107 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees);
108 break;
109 }
110
111 return NULL_TREE;
112 }
113
114 /* Record references to typeinfos in the type list LIST. */
115
116 static void
117 record_type_list (struct cgraph_node *node, tree list)
118 {
119 for (; list; list = TREE_CHAIN (list))
120 {
121 tree type = TREE_VALUE (list);
122
123 if (TYPE_P (type))
124 type = lookup_type_for_runtime (type);
125 STRIP_NOPS (type);
126 if (TREE_CODE (type) == ADDR_EXPR)
127 {
128 type = TREE_OPERAND (type, 0);
129 if (TREE_CODE (type) == VAR_DECL)
130 {
131 struct varpool_node *vnode = varpool_node (type);
132 varpool_mark_needed_node (vnode);
133 ipa_record_reference (node, NULL,
134 NULL, vnode,
135 IPA_REF_ADDR, NULL);
136 }
137 }
138 }
139 }
140
141 /* Record all references we will introduce by producing EH tables
142 for NODE. */
143
144 static void
145 record_eh_tables (struct cgraph_node *node, struct function *fun)
146 {
147 eh_region i;
148
149 if (DECL_FUNCTION_PERSONALITY (node->decl))
150 {
151 struct cgraph_node *per_node;
152
153 per_node = cgraph_get_create_node (DECL_FUNCTION_PERSONALITY (node->decl));
154 ipa_record_reference (node, NULL, per_node, NULL, IPA_REF_ADDR, NULL);
155 cgraph_mark_address_taken_node (per_node);
156 }
157
158 i = fun->eh->region_tree;
159 if (!i)
160 return;
161
162 while (1)
163 {
164 switch (i->type)
165 {
166 case ERT_CLEANUP:
167 case ERT_MUST_NOT_THROW:
168 break;
169
170 case ERT_TRY:
171 {
172 eh_catch c;
173 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
174 record_type_list (node, c->type_list);
175 }
176 break;
177
178 case ERT_ALLOWED_EXCEPTIONS:
179 record_type_list (node, i->u.allowed.type_list);
180 break;
181 }
182 /* If there are sub-regions, process them. */
183 if (i->inner)
184 i = i->inner;
185 /* If there are peers, process them. */
186 else if (i->next_peer)
187 i = i->next_peer;
188 /* Otherwise, step back up the tree to the next peer. */
189 else
190 {
191 do
192 {
193 i = i->outer;
194 if (i == NULL)
195 return;
196 }
197 while (i->next_peer == NULL);
198 i = i->next_peer;
199 }
200 }
201 }
202
203 /* Reset inlining information of all incoming call edges of NODE. */
204
205 void
206 reset_inline_failed (struct cgraph_node *node)
207 {
208 struct cgraph_edge *e;
209
210 for (e = node->callers; e; e = e->next_caller)
211 {
212 e->callee->global.inlined_to = NULL;
213 initialize_inline_failed (e);
214 }
215 }
216
217 /* Computes the frequency of the call statement so that it can be stored in
218 cgraph_edge. BB is the basic block of the call statement. */
219 int
220 compute_call_stmt_bb_frequency (tree decl, basic_block bb)
221 {
222 int entry_freq = ENTRY_BLOCK_PTR_FOR_FUNCTION
223 (DECL_STRUCT_FUNCTION (decl))->frequency;
224 int freq = bb->frequency;
225
226 if (profile_status_for_function (DECL_STRUCT_FUNCTION (decl)) == PROFILE_ABSENT)
227 return CGRAPH_FREQ_BASE;
228
229 if (!entry_freq)
230 entry_freq = 1, freq++;
231
232 freq = freq * CGRAPH_FREQ_BASE / entry_freq;
233 if (freq > CGRAPH_FREQ_MAX)
234 freq = CGRAPH_FREQ_MAX;
235
236 return freq;
237 }
238
239 /* Mark address taken in STMT. */
240
241 static bool
242 mark_address (gimple stmt, tree addr, void *data)
243 {
244 addr = get_base_address (addr);
245 if (TREE_CODE (addr) == FUNCTION_DECL)
246 {
247 struct cgraph_node *node = cgraph_get_create_node (addr);
248 cgraph_mark_address_taken_node (node);
249 ipa_record_reference ((struct cgraph_node *)data, NULL,
250 node, NULL,
251 IPA_REF_ADDR, stmt);
252 }
253 else if (addr && TREE_CODE (addr) == VAR_DECL
254 && (TREE_STATIC (addr) || DECL_EXTERNAL (addr)))
255 {
256 struct varpool_node *vnode = varpool_node (addr);
257 int walk_subtrees;
258
259 if (lang_hooks.callgraph.analyze_expr)
260 lang_hooks.callgraph.analyze_expr (&addr, &walk_subtrees);
261 varpool_mark_needed_node (vnode);
262 ipa_record_reference ((struct cgraph_node *)data, NULL,
263 NULL, vnode,
264 IPA_REF_ADDR, stmt);
265 }
266
267 return false;
268 }
269
270 /* Mark load of T. */
271
272 static bool
273 mark_load (gimple stmt, tree t, void *data)
274 {
275 t = get_base_address (t);
276 if (t && TREE_CODE (t) == FUNCTION_DECL)
277 {
278 /* ??? This can happen on platforms with descriptors when these are
279 directly manipulated in the code. Pretend that it's an address. */
280 struct cgraph_node *node = cgraph_get_create_node (t);
281 cgraph_mark_address_taken_node (node);
282 ipa_record_reference ((struct cgraph_node *)data, NULL,
283 node, NULL,
284 IPA_REF_ADDR, stmt);
285 }
286 else if (t && TREE_CODE (t) == VAR_DECL
287 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
288 {
289 struct varpool_node *vnode = varpool_node (t);
290 int walk_subtrees;
291
292 if (lang_hooks.callgraph.analyze_expr)
293 lang_hooks.callgraph.analyze_expr (&t, &walk_subtrees);
294 varpool_mark_needed_node (vnode);
295 ipa_record_reference ((struct cgraph_node *)data, NULL,
296 NULL, vnode,
297 IPA_REF_LOAD, stmt);
298 }
299 return false;
300 }
301
302 /* Mark store of T. */
303
304 static bool
305 mark_store (gimple stmt, tree t, void *data)
306 {
307 t = get_base_address (t);
308 if (t && TREE_CODE (t) == VAR_DECL
309 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
310 {
311 struct varpool_node *vnode = varpool_node (t);
312 int walk_subtrees;
313
314 if (lang_hooks.callgraph.analyze_expr)
315 lang_hooks.callgraph.analyze_expr (&t, &walk_subtrees);
316 varpool_mark_needed_node (vnode);
317 ipa_record_reference ((struct cgraph_node *)data, NULL,
318 NULL, vnode,
319 IPA_REF_STORE, stmt);
320 }
321 return false;
322 }
323
324 /* Create cgraph edges for function calls.
325 Also look for functions and variables having addresses taken. */
326
327 static unsigned int
328 build_cgraph_edges (void)
329 {
330 basic_block bb;
331 struct cgraph_node *node = cgraph_get_node (current_function_decl);
332 struct pointer_set_t *visited_nodes = pointer_set_create ();
333 gimple_stmt_iterator gsi;
334 tree decl;
335 unsigned ix;
336
337 /* Create the callgraph edges and record the nodes referenced by the function.
338 body. */
339 FOR_EACH_BB (bb)
340 {
341 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
342 {
343 gimple stmt = gsi_stmt (gsi);
344 tree decl;
345
346 if (is_gimple_call (stmt))
347 {
348 int freq = compute_call_stmt_bb_frequency (current_function_decl,
349 bb);
350 decl = gimple_call_fndecl (stmt);
351 if (decl)
352 cgraph_create_edge (node, cgraph_get_create_node (decl),
353 stmt, bb->count, freq);
354 else
355 cgraph_create_indirect_edge (node, stmt,
356 gimple_call_flags (stmt),
357 bb->count, freq);
358 }
359 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
360 mark_store, mark_address);
361 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
362 && gimple_omp_parallel_child_fn (stmt))
363 {
364 tree fn = gimple_omp_parallel_child_fn (stmt);
365 ipa_record_reference (node, NULL, cgraph_get_create_node (fn),
366 NULL, IPA_REF_ADDR, stmt);
367 }
368 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
369 {
370 tree fn = gimple_omp_task_child_fn (stmt);
371 if (fn)
372 ipa_record_reference (node, NULL, cgraph_get_create_node (fn),
373 NULL, IPA_REF_ADDR, stmt);
374 fn = gimple_omp_task_copy_fn (stmt);
375 if (fn)
376 ipa_record_reference (node, NULL, cgraph_get_create_node (fn),
377 NULL, IPA_REF_ADDR, stmt);
378 }
379 }
380 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
381 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
382 mark_load, mark_store, mark_address);
383 }
384
385 /* Look for initializers of constant variables and private statics. */
386 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
387 if (TREE_CODE (decl) == VAR_DECL
388 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl)))
389 varpool_finalize_decl (decl);
390 record_eh_tables (node, cfun);
391
392 pointer_set_destroy (visited_nodes);
393 return 0;
394 }
395
396 struct gimple_opt_pass pass_build_cgraph_edges =
397 {
398 {
399 GIMPLE_PASS,
400 "*build_cgraph_edges", /* name */
401 NULL, /* gate */
402 build_cgraph_edges, /* execute */
403 NULL, /* sub */
404 NULL, /* next */
405 0, /* static_pass_number */
406 TV_NONE, /* tv_id */
407 PROP_cfg, /* properties_required */
408 0, /* properties_provided */
409 0, /* properties_destroyed */
410 0, /* todo_flags_start */
411 0 /* todo_flags_finish */
412 }
413 };
414
415 /* Record references to functions and other variables present in the
416 initial value of DECL, a variable.
417 When ONLY_VARS is true, we mark needed only variables, not functions. */
418
419 void
420 record_references_in_initializer (tree decl, bool only_vars)
421 {
422 struct pointer_set_t *visited_nodes = pointer_set_create ();
423 struct varpool_node *node = varpool_node (decl);
424 struct record_reference_ctx ctx = {false, NULL};
425
426 ctx.varpool_node = node;
427 ctx.only_vars = only_vars;
428 walk_tree (&DECL_INITIAL (decl), record_reference,
429 &ctx, visited_nodes);
430 pointer_set_destroy (visited_nodes);
431 }
432
433 /* Rebuild cgraph edges for current function node. This needs to be run after
434 passes that don't update the cgraph. */
435
436 unsigned int
437 rebuild_cgraph_edges (void)
438 {
439 basic_block bb;
440 struct cgraph_node *node = cgraph_get_node (current_function_decl);
441 gimple_stmt_iterator gsi;
442
443 cgraph_node_remove_callees (node);
444 ipa_remove_all_references (&node->ref_list);
445
446 node->count = ENTRY_BLOCK_PTR->count;
447
448 FOR_EACH_BB (bb)
449 {
450 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
451 {
452 gimple stmt = gsi_stmt (gsi);
453 tree decl;
454
455 if (is_gimple_call (stmt))
456 {
457 int freq = compute_call_stmt_bb_frequency (current_function_decl,
458 bb);
459 decl = gimple_call_fndecl (stmt);
460 if (decl)
461 cgraph_create_edge (node, cgraph_get_create_node (decl), stmt,
462 bb->count, freq);
463 else
464 cgraph_create_indirect_edge (node, stmt,
465 gimple_call_flags (stmt),
466 bb->count, freq);
467 }
468 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
469 mark_store, mark_address);
470
471 }
472 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
473 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
474 mark_load, mark_store, mark_address);
475 }
476 record_eh_tables (node, cfun);
477 gcc_assert (!node->global.inlined_to);
478
479 return 0;
480 }
481
482 /* Rebuild cgraph edges for current function node. This needs to be run after
483 passes that don't update the cgraph. */
484
485 void
486 cgraph_rebuild_references (void)
487 {
488 basic_block bb;
489 struct cgraph_node *node = cgraph_get_node (current_function_decl);
490 gimple_stmt_iterator gsi;
491
492 ipa_remove_all_references (&node->ref_list);
493
494 node->count = ENTRY_BLOCK_PTR->count;
495
496 FOR_EACH_BB (bb)
497 {
498 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
499 {
500 gimple stmt = gsi_stmt (gsi);
501
502 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
503 mark_store, mark_address);
504
505 }
506 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
507 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
508 mark_load, mark_store, mark_address);
509 }
510 record_eh_tables (node, cfun);
511 }
512
513 struct gimple_opt_pass pass_rebuild_cgraph_edges =
514 {
515 {
516 GIMPLE_PASS,
517 "*rebuild_cgraph_edges", /* name */
518 NULL, /* gate */
519 rebuild_cgraph_edges, /* execute */
520 NULL, /* sub */
521 NULL, /* next */
522 0, /* static_pass_number */
523 TV_CGRAPH, /* tv_id */
524 PROP_cfg, /* properties_required */
525 0, /* properties_provided */
526 0, /* properties_destroyed */
527 0, /* todo_flags_start */
528 0, /* todo_flags_finish */
529 }
530 };
531
532
533 static unsigned int
534 remove_cgraph_callee_edges (void)
535 {
536 cgraph_node_remove_callees (cgraph_get_node (current_function_decl));
537 return 0;
538 }
539
540 struct gimple_opt_pass pass_remove_cgraph_callee_edges =
541 {
542 {
543 GIMPLE_PASS,
544 "*remove_cgraph_callee_edges", /* name */
545 NULL, /* gate */
546 remove_cgraph_callee_edges, /* execute */
547 NULL, /* sub */
548 NULL, /* next */
549 0, /* static_pass_number */
550 TV_NONE, /* tv_id */
551 0, /* properties_required */
552 0, /* properties_provided */
553 0, /* properties_destroyed */
554 0, /* todo_flags_start */
555 0, /* todo_flags_finish */
556 }
557 };