]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraphbuild.c
Autogenerated fixes of "->symbol." to "->"
[thirdparty/gcc.git] / gcc / cgraphbuild.c
1 /* Callgraph construction.
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "langhooks.h"
28 #include "pointer-set.h"
29 #include "intl.h"
30 #include "tree-pass.h"
31 #include "ipa-utils.h"
32 #include "except.h"
33 #include "ipa-inline.h"
34
35 /* Context of record_reference. */
36 struct record_reference_ctx
37 {
38 bool only_vars;
39 struct varpool_node *varpool_node;
40 };
41
42 /* Walk tree and record all calls and references to functions/variables.
43 Called via walk_tree: TP is pointer to tree to be examined.
44 When DATA is non-null, record references to callgraph.
45 */
46
47 static tree
48 record_reference (tree *tp, int *walk_subtrees, void *data)
49 {
50 tree t = *tp;
51 tree decl;
52 struct record_reference_ctx *ctx = (struct record_reference_ctx *)data;
53
54 t = canonicalize_constructor_val (t, NULL);
55 if (!t)
56 t = *tp;
57 else if (t != *tp)
58 *tp = t;
59
60 switch (TREE_CODE (t))
61 {
62 case VAR_DECL:
63 case FUNCTION_DECL:
64 gcc_unreachable ();
65 break;
66
67 case FDESC_EXPR:
68 case ADDR_EXPR:
69 /* Record dereferences to the functions. This makes the
70 functions reachable unconditionally. */
71 decl = get_base_var (*tp);
72 if (TREE_CODE (decl) == FUNCTION_DECL)
73 {
74 struct cgraph_node *node = cgraph_get_create_real_symbol_node (decl);
75 if (!ctx->only_vars)
76 cgraph_mark_address_taken_node (node);
77 ipa_record_reference (ctx->varpool_node,
78 node,
79 IPA_REF_ADDR, NULL);
80 }
81
82 if (TREE_CODE (decl) == VAR_DECL)
83 {
84 struct varpool_node *vnode = varpool_node_for_decl (decl);
85 ipa_record_reference (ctx->varpool_node,
86 vnode,
87 IPA_REF_ADDR, NULL);
88 }
89 *walk_subtrees = 0;
90 break;
91
92 default:
93 /* Save some cycles by not walking types and declaration as we
94 won't find anything useful there anyway. */
95 if (IS_TYPE_OR_DECL_P (*tp))
96 {
97 *walk_subtrees = 0;
98 break;
99 }
100 break;
101 }
102
103 return NULL_TREE;
104 }
105
106 /* Record references to typeinfos in the type list LIST. */
107
108 static void
109 record_type_list (struct cgraph_node *node, tree list)
110 {
111 for (; list; list = TREE_CHAIN (list))
112 {
113 tree type = TREE_VALUE (list);
114
115 if (TYPE_P (type))
116 type = lookup_type_for_runtime (type);
117 STRIP_NOPS (type);
118 if (TREE_CODE (type) == ADDR_EXPR)
119 {
120 type = TREE_OPERAND (type, 0);
121 if (TREE_CODE (type) == VAR_DECL)
122 {
123 struct varpool_node *vnode = varpool_node_for_decl (type);
124 ipa_record_reference (node,
125 vnode,
126 IPA_REF_ADDR, NULL);
127 }
128 }
129 }
130 }
131
132 /* Record all references we will introduce by producing EH tables
133 for NODE. */
134
135 static void
136 record_eh_tables (struct cgraph_node *node, struct function *fun)
137 {
138 eh_region i;
139
140 if (DECL_FUNCTION_PERSONALITY (node->decl))
141 {
142 struct cgraph_node *per_node;
143
144 per_node = cgraph_get_create_real_symbol_node (DECL_FUNCTION_PERSONALITY (node->decl));
145 ipa_record_reference (node, per_node, IPA_REF_ADDR, NULL);
146 cgraph_mark_address_taken_node (per_node);
147 }
148
149 i = fun->eh->region_tree;
150 if (!i)
151 return;
152
153 while (1)
154 {
155 switch (i->type)
156 {
157 case ERT_CLEANUP:
158 case ERT_MUST_NOT_THROW:
159 break;
160
161 case ERT_TRY:
162 {
163 eh_catch c;
164 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
165 record_type_list (node, c->type_list);
166 }
167 break;
168
169 case ERT_ALLOWED_EXCEPTIONS:
170 record_type_list (node, i->u.allowed.type_list);
171 break;
172 }
173 /* If there are sub-regions, process them. */
174 if (i->inner)
175 i = i->inner;
176 /* If there are peers, process them. */
177 else if (i->next_peer)
178 i = i->next_peer;
179 /* Otherwise, step back up the tree to the next peer. */
180 else
181 {
182 do
183 {
184 i = i->outer;
185 if (i == NULL)
186 return;
187 }
188 while (i->next_peer == NULL);
189 i = i->next_peer;
190 }
191 }
192 }
193
194 /* Computes the frequency of the call statement so that it can be stored in
195 cgraph_edge. BB is the basic block of the call statement. */
196 int
197 compute_call_stmt_bb_frequency (tree decl, basic_block bb)
198 {
199 int entry_freq = ENTRY_BLOCK_PTR_FOR_FUNCTION
200 (DECL_STRUCT_FUNCTION (decl))->frequency;
201 int freq = bb->frequency;
202
203 if (profile_status_for_function (DECL_STRUCT_FUNCTION (decl)) == PROFILE_ABSENT)
204 return CGRAPH_FREQ_BASE;
205
206 if (!entry_freq)
207 entry_freq = 1, freq++;
208
209 freq = freq * CGRAPH_FREQ_BASE / entry_freq;
210 if (freq > CGRAPH_FREQ_MAX)
211 freq = CGRAPH_FREQ_MAX;
212
213 return freq;
214 }
215
216 /* Mark address taken in STMT. */
217
218 static bool
219 mark_address (gimple stmt, tree addr, void *data)
220 {
221 addr = get_base_address (addr);
222 if (TREE_CODE (addr) == FUNCTION_DECL)
223 {
224 struct cgraph_node *node = cgraph_get_create_real_symbol_node (addr);
225 cgraph_mark_address_taken_node (node);
226 ipa_record_reference ((symtab_node)data,
227 node,
228 IPA_REF_ADDR, stmt);
229 }
230 else if (addr && TREE_CODE (addr) == VAR_DECL
231 && (TREE_STATIC (addr) || DECL_EXTERNAL (addr)))
232 {
233 struct varpool_node *vnode = varpool_node_for_decl (addr);
234
235 ipa_record_reference ((symtab_node)data,
236 vnode,
237 IPA_REF_ADDR, stmt);
238 }
239
240 return false;
241 }
242
243 /* Mark load of T. */
244
245 static bool
246 mark_load (gimple stmt, tree t, void *data)
247 {
248 t = get_base_address (t);
249 if (t && TREE_CODE (t) == FUNCTION_DECL)
250 {
251 /* ??? This can happen on platforms with descriptors when these are
252 directly manipulated in the code. Pretend that it's an address. */
253 struct cgraph_node *node = cgraph_get_create_real_symbol_node (t);
254 cgraph_mark_address_taken_node (node);
255 ipa_record_reference ((symtab_node)data,
256 node,
257 IPA_REF_ADDR, stmt);
258 }
259 else if (t && TREE_CODE (t) == VAR_DECL
260 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
261 {
262 struct varpool_node *vnode = varpool_node_for_decl (t);
263
264 ipa_record_reference ((symtab_node)data,
265 vnode,
266 IPA_REF_LOAD, stmt);
267 }
268 return false;
269 }
270
271 /* Mark store of T. */
272
273 static bool
274 mark_store (gimple stmt, tree t, void *data)
275 {
276 t = get_base_address (t);
277 if (t && TREE_CODE (t) == VAR_DECL
278 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
279 {
280 struct varpool_node *vnode = varpool_node_for_decl (t);
281
282 ipa_record_reference ((symtab_node)data,
283 vnode,
284 IPA_REF_STORE, stmt);
285 }
286 return false;
287 }
288
289 /* Record all references from NODE that are taken in statement STMT. */
290 void
291 ipa_record_stmt_references (struct cgraph_node *node, gimple stmt)
292 {
293 walk_stmt_load_store_addr_ops (stmt, node, mark_load, mark_store,
294 mark_address);
295 }
296
297 /* Create cgraph edges for function calls.
298 Also look for functions and variables having addresses taken. */
299
300 static unsigned int
301 build_cgraph_edges (void)
302 {
303 basic_block bb;
304 struct cgraph_node *node = cgraph_get_node (current_function_decl);
305 struct pointer_set_t *visited_nodes = pointer_set_create ();
306 gimple_stmt_iterator gsi;
307 tree decl;
308 unsigned ix;
309
310 /* Create the callgraph edges and record the nodes referenced by the function.
311 body. */
312 FOR_EACH_BB (bb)
313 {
314 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
315 {
316 gimple stmt = gsi_stmt (gsi);
317 tree decl;
318
319 if (is_gimple_debug (stmt))
320 continue;
321
322 if (is_gimple_call (stmt))
323 {
324 int freq = compute_call_stmt_bb_frequency (current_function_decl,
325 bb);
326 decl = gimple_call_fndecl (stmt);
327 if (decl)
328 cgraph_create_edge (node, cgraph_get_create_node (decl),
329 stmt, bb->count, freq);
330 else
331 cgraph_create_indirect_edge (node, stmt,
332 gimple_call_flags (stmt),
333 bb->count, freq);
334 }
335 ipa_record_stmt_references (node, stmt);
336 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
337 && gimple_omp_parallel_child_fn (stmt))
338 {
339 tree fn = gimple_omp_parallel_child_fn (stmt);
340 ipa_record_reference (node,
341 cgraph_get_create_real_symbol_node (fn),
342 IPA_REF_ADDR, stmt);
343 }
344 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
345 {
346 tree fn = gimple_omp_task_child_fn (stmt);
347 if (fn)
348 ipa_record_reference (node,
349 cgraph_get_create_real_symbol_node (fn),
350 IPA_REF_ADDR, stmt);
351 fn = gimple_omp_task_copy_fn (stmt);
352 if (fn)
353 ipa_record_reference (node,
354 cgraph_get_create_real_symbol_node (fn),
355 IPA_REF_ADDR, stmt);
356 }
357 }
358 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
359 ipa_record_stmt_references (node, gsi_stmt (gsi));
360 }
361
362 /* Look for initializers of constant variables and private statics. */
363 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
364 if (TREE_CODE (decl) == VAR_DECL
365 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
366 && !DECL_HAS_VALUE_EXPR_P (decl))
367 varpool_finalize_decl (decl);
368 record_eh_tables (node, cfun);
369
370 pointer_set_destroy (visited_nodes);
371 return 0;
372 }
373
374 namespace {
375
376 const pass_data pass_data_build_cgraph_edges =
377 {
378 GIMPLE_PASS, /* type */
379 "*build_cgraph_edges", /* name */
380 OPTGROUP_NONE, /* optinfo_flags */
381 false, /* has_gate */
382 true, /* has_execute */
383 TV_NONE, /* tv_id */
384 PROP_cfg, /* properties_required */
385 0, /* properties_provided */
386 0, /* properties_destroyed */
387 0, /* todo_flags_start */
388 0, /* todo_flags_finish */
389 };
390
391 class pass_build_cgraph_edges : public gimple_opt_pass
392 {
393 public:
394 pass_build_cgraph_edges (gcc::context *ctxt)
395 : gimple_opt_pass (pass_data_build_cgraph_edges, ctxt)
396 {}
397
398 /* opt_pass methods: */
399 unsigned int execute () { return build_cgraph_edges (); }
400
401 }; // class pass_build_cgraph_edges
402
403 } // anon namespace
404
405 gimple_opt_pass *
406 make_pass_build_cgraph_edges (gcc::context *ctxt)
407 {
408 return new pass_build_cgraph_edges (ctxt);
409 }
410
411 /* Record references to functions and other variables present in the
412 initial value of DECL, a variable.
413 When ONLY_VARS is true, we mark needed only variables, not functions. */
414
415 void
416 record_references_in_initializer (tree decl, bool only_vars)
417 {
418 struct pointer_set_t *visited_nodes = pointer_set_create ();
419 struct varpool_node *node = varpool_node_for_decl (decl);
420 struct record_reference_ctx ctx = {false, NULL};
421
422 ctx.varpool_node = node;
423 ctx.only_vars = only_vars;
424 walk_tree (&DECL_INITIAL (decl), record_reference,
425 &ctx, visited_nodes);
426 pointer_set_destroy (visited_nodes);
427 }
428
429 /* Rebuild cgraph edges for current function node. This needs to be run after
430 passes that don't update the cgraph. */
431
432 unsigned int
433 rebuild_cgraph_edges (void)
434 {
435 basic_block bb;
436 struct cgraph_node *node = cgraph_get_node (current_function_decl);
437 gimple_stmt_iterator gsi;
438
439 cgraph_node_remove_callees (node);
440 ipa_remove_all_references (&node->ref_list);
441
442 node->count = ENTRY_BLOCK_PTR->count;
443
444 FOR_EACH_BB (bb)
445 {
446 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
447 {
448 gimple stmt = gsi_stmt (gsi);
449 tree decl;
450
451 if (is_gimple_call (stmt))
452 {
453 int freq = compute_call_stmt_bb_frequency (current_function_decl,
454 bb);
455 decl = gimple_call_fndecl (stmt);
456 if (decl)
457 cgraph_create_edge (node, cgraph_get_create_node (decl), stmt,
458 bb->count, freq);
459 else
460 cgraph_create_indirect_edge (node, stmt,
461 gimple_call_flags (stmt),
462 bb->count, freq);
463 }
464 ipa_record_stmt_references (node, stmt);
465 }
466 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
467 ipa_record_stmt_references (node, gsi_stmt (gsi));
468 }
469 record_eh_tables (node, cfun);
470 gcc_assert (!node->global.inlined_to);
471
472 return 0;
473 }
474
475 /* Rebuild cgraph edges for current function node. This needs to be run after
476 passes that don't update the cgraph. */
477
478 void
479 cgraph_rebuild_references (void)
480 {
481 basic_block bb;
482 struct cgraph_node *node = cgraph_get_node (current_function_decl);
483 gimple_stmt_iterator gsi;
484 struct ipa_ref *ref;
485 int i;
486
487 /* Keep speculative references for further cgraph edge expansion. */
488 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref);)
489 if (!ref->speculative)
490 ipa_remove_reference (ref);
491 else
492 i++;
493
494 node->count = ENTRY_BLOCK_PTR->count;
495
496 FOR_EACH_BB (bb)
497 {
498 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
499 ipa_record_stmt_references (node, gsi_stmt (gsi));
500 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
501 ipa_record_stmt_references (node, gsi_stmt (gsi));
502 }
503 record_eh_tables (node, cfun);
504 }
505
506 namespace {
507
508 const pass_data pass_data_rebuild_cgraph_edges =
509 {
510 GIMPLE_PASS, /* type */
511 "*rebuild_cgraph_edges", /* name */
512 OPTGROUP_NONE, /* optinfo_flags */
513 false, /* has_gate */
514 true, /* has_execute */
515 TV_CGRAPH, /* tv_id */
516 PROP_cfg, /* properties_required */
517 0, /* properties_provided */
518 0, /* properties_destroyed */
519 0, /* todo_flags_start */
520 0, /* todo_flags_finish */
521 };
522
523 class pass_rebuild_cgraph_edges : public gimple_opt_pass
524 {
525 public:
526 pass_rebuild_cgraph_edges (gcc::context *ctxt)
527 : gimple_opt_pass (pass_data_rebuild_cgraph_edges, ctxt)
528 {}
529
530 /* opt_pass methods: */
531 opt_pass * clone () { return new pass_rebuild_cgraph_edges (m_ctxt); }
532 unsigned int execute () { return rebuild_cgraph_edges (); }
533
534 }; // class pass_rebuild_cgraph_edges
535
536 } // anon namespace
537
538 gimple_opt_pass *
539 make_pass_rebuild_cgraph_edges (gcc::context *ctxt)
540 {
541 return new pass_rebuild_cgraph_edges (ctxt);
542 }
543
544
545 static unsigned int
546 remove_cgraph_callee_edges (void)
547 {
548 struct cgraph_node *node = cgraph_get_node (current_function_decl);
549 cgraph_node_remove_callees (node);
550 ipa_remove_all_references (&node->ref_list);
551 return 0;
552 }
553
554 namespace {
555
556 const pass_data pass_data_remove_cgraph_callee_edges =
557 {
558 GIMPLE_PASS, /* type */
559 "*remove_cgraph_callee_edges", /* name */
560 OPTGROUP_NONE, /* optinfo_flags */
561 false, /* has_gate */
562 true, /* has_execute */
563 TV_NONE, /* tv_id */
564 0, /* properties_required */
565 0, /* properties_provided */
566 0, /* properties_destroyed */
567 0, /* todo_flags_start */
568 0, /* todo_flags_finish */
569 };
570
571 class pass_remove_cgraph_callee_edges : public gimple_opt_pass
572 {
573 public:
574 pass_remove_cgraph_callee_edges (gcc::context *ctxt)
575 : gimple_opt_pass (pass_data_remove_cgraph_callee_edges, ctxt)
576 {}
577
578 /* opt_pass methods: */
579 opt_pass * clone () {
580 return new pass_remove_cgraph_callee_edges (m_ctxt);
581 }
582 unsigned int execute () { return remove_cgraph_callee_edges (); }
583
584 }; // class pass_remove_cgraph_callee_edges
585
586 } // anon namespace
587
588 gimple_opt_pass *
589 make_pass_remove_cgraph_callee_edges (gcc::context *ctxt)
590 {
591 return new pass_remove_cgraph_callee_edges (ctxt);
592 }