]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-utils.c
decl.c (start_decl): Look through member variable template.
[thirdparty/gcc.git] / gcc / ipa-utils.c
CommitLineData
ea900239 1/* Utilities for ipa analysis.
23a5b65a 2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
ea900239
DB
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9dcd6f09 9Software Foundation; either version 3, or (at your option) any later
ea900239
DB
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
ea900239
DB
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
2fb9a547
AM
26#include "basic-block.h"
27#include "tree-ssa-alias.h"
28#include "internal-fn.h"
29#include "gimple-expr.h"
30#include "is-a.h"
8e9055ae 31#include "gimple.h"
ea900239 32#include "tree-inline.h"
7ee2468b 33#include "dumpfile.h"
ea900239 34#include "langhooks.h"
ea264ca5 35#include "splay-tree.h"
ea900239
DB
36#include "ipa-utils.h"
37#include "ipa-reference.h"
ea900239 38#include "flags.h"
ea900239
DB
39#include "diagnostic.h"
40#include "langhooks.h"
4843f032
JH
41#include "lto-streamer.h"
42#include "ipa-inline.h"
ea900239
DB
43
44/* Debugging function for postorder and inorder code. NOTE is a string
45 that is printed before the nodes are printed. ORDER is an array of
46 cgraph_nodes that has COUNT useful nodes in it. */
47
b8698a0f 48void
af8bca3c
MJ
49ipa_print_order (FILE* out,
50 const char * note,
51 struct cgraph_node** order,
52 int count)
ea900239
DB
53{
54 int i;
55 fprintf (out, "\n\n ordered call graph: %s\n", note);
b8698a0f 56
ea900239 57 for (i = count - 1; i >= 0; i--)
d52f5295 58 order[i]->dump (out);
ea900239 59 fprintf (out, "\n");
c3284718 60 fflush (out);
ea900239
DB
61}
62
d52f5295 63
ea900239
DB
64struct searchc_env {
65 struct cgraph_node **stack;
66 int stack_size;
67 struct cgraph_node **result;
68 int order_pos;
69 splay_tree nodes_marked_new;
70 bool reduce;
b6156cf2 71 bool allow_overwritable;
ea900239
DB
72 int count;
73};
74
75/* This is an implementation of Tarjan's strongly connected region
76 finder as reprinted in Aho Hopcraft and Ullman's The Design and
77 Analysis of Computer Programs (1975) pages 192-193. This version
78 has been customized for cgraph_nodes. The env parameter is because
79 it is recursive and there are no nested functions here. This
80 function should only be called from itself or
af8bca3c 81 ipa_reduced_postorder. ENV is a stack env and would be
ea900239
DB
82 unnecessary if C had nested functions. V is the node to start
83 searching from. */
84
85static void
2505c5ed
JH
86searchc (struct searchc_env* env, struct cgraph_node *v,
87 bool (*ignore_edge) (struct cgraph_edge *))
ea900239
DB
88{
89 struct cgraph_edge *edge;
67348ccc 90 struct ipa_dfs_info *v_info = (struct ipa_dfs_info *) v->aux;
b8698a0f 91
ea900239 92 /* mark node as old */
c5274326 93 v_info->new_node = false;
ea900239 94 splay_tree_remove (env->nodes_marked_new, v->uid);
b8698a0f 95
ea900239
DB
96 v_info->dfn_number = env->count;
97 v_info->low_link = env->count;
98 env->count++;
99 env->stack[(env->stack_size)++] = v;
100 v_info->on_stack = true;
b8698a0f 101
ea900239
DB
102 for (edge = v->callees; edge; edge = edge->next_callee)
103 {
104 struct ipa_dfs_info * w_info;
fede8efa 105 enum availability avail;
d52f5295 106 struct cgraph_node *w = edge->callee->ultimate_alias_target (&avail);
e2c9111c 107
fede8efa 108 if (!w || (ignore_edge && ignore_edge (edge)))
2505c5ed
JH
109 continue;
110
67348ccc 111 if (w->aux
d52f5295
ML
112 && (avail > AVAIL_INTERPOSABLE
113 || (env->allow_overwritable && avail == AVAIL_INTERPOSABLE)))
ea900239 114 {
67348ccc 115 w_info = (struct ipa_dfs_info *) w->aux;
b8698a0f 116 if (w_info->new_node)
ea900239 117 {
2505c5ed 118 searchc (env, w, ignore_edge);
ea900239
DB
119 v_info->low_link =
120 (v_info->low_link < w_info->low_link) ?
121 v_info->low_link : w_info->low_link;
b8698a0f
L
122 }
123 else
124 if ((w_info->dfn_number < v_info->dfn_number)
125 && (w_info->on_stack))
ea900239
DB
126 v_info->low_link =
127 (w_info->dfn_number < v_info->low_link) ?
128 w_info->dfn_number : v_info->low_link;
129 }
130 }
131
132
b8698a0f 133 if (v_info->low_link == v_info->dfn_number)
ea900239
DB
134 {
135 struct cgraph_node *last = NULL;
136 struct cgraph_node *x;
137 struct ipa_dfs_info *x_info;
138 do {
139 x = env->stack[--(env->stack_size)];
67348ccc 140 x_info = (struct ipa_dfs_info *) x->aux;
ea900239 141 x_info->on_stack = false;
11026b51 142 x_info->scc_no = v_info->dfn_number;
b8698a0f
L
143
144 if (env->reduce)
ea900239
DB
145 {
146 x_info->next_cycle = last;
147 last = x;
b8698a0f
L
148 }
149 else
ea900239 150 env->result[env->order_pos++] = x;
b8698a0f 151 }
ea900239 152 while (v != x);
b8698a0f 153 if (env->reduce)
ea900239
DB
154 env->result[env->order_pos++] = v;
155 }
156}
157
158/* Topsort the call graph by caller relation. Put the result in ORDER.
159
df92c640
SB
160 The REDUCE flag is true if you want the cycles reduced to single nodes.
161 You can use ipa_get_nodes_in_cycle to obtain a vector containing all real
162 call graph nodes in a reduced node.
163
164 Set ALLOW_OVERWRITABLE if nodes with such availability should be included.
af8bca3c
MJ
165 IGNORE_EDGE, if non-NULL is a hook that may make some edges insignificant
166 for the topological sort. */
ea900239
DB
167
168int
af8bca3c
MJ
169ipa_reduced_postorder (struct cgraph_node **order,
170 bool reduce, bool allow_overwritable,
171 bool (*ignore_edge) (struct cgraph_edge *))
ea900239
DB
172{
173 struct cgraph_node *node;
174 struct searchc_env env;
175 splay_tree_node result;
5ed6ace5 176 env.stack = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
ea900239
DB
177 env.stack_size = 0;
178 env.result = order;
179 env.order_pos = 0;
180 env.nodes_marked_new = splay_tree_new (splay_tree_compare_ints, 0, 0);
181 env.count = 1;
182 env.reduce = reduce;
b6156cf2 183 env.allow_overwritable = allow_overwritable;
b8698a0f 184
65c70e6b 185 FOR_EACH_DEFINED_FUNCTION (node)
e2c9111c 186 {
d52f5295 187 enum availability avail = node->get_availability ();
e2c9111c 188
d52f5295 189 if (avail > AVAIL_INTERPOSABLE
b8698a0f 190 || (allow_overwritable
d52f5295 191 && (avail == AVAIL_INTERPOSABLE)))
e2c9111c
JH
192 {
193 /* Reuse the info if it is already there. */
67348ccc 194 struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->aux;
e2c9111c
JH
195 if (!info)
196 info = XCNEW (struct ipa_dfs_info);
197 info->new_node = true;
198 info->on_stack = false;
199 info->next_cycle = NULL;
67348ccc 200 node->aux = info;
b8698a0f 201
e2c9111c 202 splay_tree_insert (env.nodes_marked_new,
b8698a0f 203 (splay_tree_key)node->uid,
e2c9111c 204 (splay_tree_value)node);
b8698a0f
L
205 }
206 else
67348ccc 207 node->aux = NULL;
e2c9111c 208 }
ea900239
DB
209 result = splay_tree_min (env.nodes_marked_new);
210 while (result)
211 {
212 node = (struct cgraph_node *)result->value;
2505c5ed 213 searchc (&env, node, ignore_edge);
ea900239
DB
214 result = splay_tree_min (env.nodes_marked_new);
215 }
216 splay_tree_delete (env.nodes_marked_new);
217 free (env.stack);
218
219 return env.order_pos;
220}
221
af8bca3c
MJ
222/* Deallocate all ipa_dfs_info structures pointed to by the aux pointer of call
223 graph nodes. */
224
225void
226ipa_free_postorder_info (void)
227{
228 struct cgraph_node *node;
65c70e6b 229 FOR_EACH_DEFINED_FUNCTION (node)
af8bca3c
MJ
230 {
231 /* Get rid of the aux information. */
67348ccc 232 if (node->aux)
af8bca3c 233 {
67348ccc
DM
234 free (node->aux);
235 node->aux = NULL;
af8bca3c
MJ
236 }
237 }
238}
239
df92c640
SB
240/* Get the set of nodes for the cycle in the reduced call graph starting
241 from NODE. */
242
d52f5295 243vec<cgraph_node *>
df92c640
SB
244ipa_get_nodes_in_cycle (struct cgraph_node *node)
245{
d52f5295 246 vec<cgraph_node *> v = vNULL;
df92c640
SB
247 struct ipa_dfs_info *node_dfs_info;
248 while (node)
249 {
9771b263 250 v.safe_push (node);
67348ccc 251 node_dfs_info = (struct ipa_dfs_info *) node->aux;
df92c640
SB
252 node = node_dfs_info->next_cycle;
253 }
254 return v;
255}
256
4cb13597
MJ
257/* Return true iff the CS is an edge within a strongly connected component as
258 computed by ipa_reduced_postorder. */
259
260bool
261ipa_edge_within_scc (struct cgraph_edge *cs)
262{
67348ccc 263 struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->aux;
4cb13597 264 struct ipa_dfs_info *callee_dfs;
d52f5295 265 struct cgraph_node *callee = cs->callee->function_symbol ();
4cb13597 266
67348ccc 267 callee_dfs = (struct ipa_dfs_info *) callee->aux;
4cb13597
MJ
268 return (caller_dfs
269 && callee_dfs
270 && caller_dfs->scc_no == callee_dfs->scc_no);
271}
272
8775a18b
JH
273struct postorder_stack
274{
275 struct cgraph_node *node;
276 struct cgraph_edge *edge;
277 int ref;
278};
279
af8bca3c 280/* Fill array order with all nodes with output flag set in the reverse
39e2db00
JH
281 topological order. Return the number of elements in the array.
282 FIXME: While walking, consider aliases, too. */
af8bca3c
MJ
283
284int
285ipa_reverse_postorder (struct cgraph_node **order)
286{
287 struct cgraph_node *node, *node2;
288 int stack_size = 0;
289 int order_pos = 0;
8775a18b 290 struct cgraph_edge *edge;
af8bca3c 291 int pass;
d122681a 292 struct ipa_ref *ref = NULL;
af8bca3c 293
8775a18b
JH
294 struct postorder_stack *stack =
295 XCNEWVEC (struct postorder_stack, cgraph_n_nodes);
af8bca3c
MJ
296
297 /* We have to deal with cycles nicely, so use a depth first traversal
298 output algorithm. Ignore the fact that some functions won't need
299 to be output and put them into order as well, so we get dependencies
300 right through inline functions. */
65c70e6b 301 FOR_EACH_FUNCTION (node)
67348ccc 302 node->aux = NULL;
af8bca3c 303 for (pass = 0; pass < 2; pass++)
65c70e6b 304 FOR_EACH_FUNCTION (node)
67348ccc 305 if (!node->aux
af8bca3c 306 && (pass
67348ccc 307 || (!node->address_taken
af8bca3c 308 && !node->global.inlined_to
67348ccc 309 && !node->alias && !node->thunk.thunk_p
d52f5295 310 && !node->only_called_directly_p ())))
af8bca3c 311 {
8775a18b
JH
312 stack_size = 0;
313 stack[stack_size].node = node;
314 stack[stack_size].edge = node->callers;
315 stack[stack_size].ref = 0;
67348ccc 316 node->aux = (void *)(size_t)1;
8775a18b 317 while (stack_size >= 0)
af8bca3c 318 {
8775a18b 319 while (true)
af8bca3c 320 {
8775a18b
JH
321 node2 = NULL;
322 while (stack[stack_size].edge && !node2)
af8bca3c 323 {
8775a18b 324 edge = stack[stack_size].edge;
af8bca3c 325 node2 = edge->caller;
8775a18b
JH
326 stack[stack_size].edge = edge->next_caller;
327 /* Break possible cycles involving always-inline
328 functions by ignoring edges from always-inline
329 functions to non-always-inline functions. */
67348ccc 330 if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->decl)
8775a18b 331 && !DECL_DISREGARD_INLINE_LIMITS
d52f5295 332 (edge->callee->function_symbol ()->decl))
8775a18b
JH
333 node2 = NULL;
334 }
d122681a 335 for (; stack[stack_size].node->iterate_referring (
8775a18b
JH
336 stack[stack_size].ref,
337 ref) && !node2;
338 stack[stack_size].ref++)
339 {
340 if (ref->use == IPA_REF_ALIAS)
d122681a 341 node2 = dyn_cast <cgraph_node *> (ref->referring);
8775a18b
JH
342 }
343 if (!node2)
344 break;
67348ccc 345 if (!node2->aux)
8775a18b
JH
346 {
347 stack[++stack_size].node = node2;
348 stack[stack_size].edge = node2->callers;
349 stack[stack_size].ref = 0;
67348ccc 350 node2->aux = (void *)(size_t)1;
af8bca3c
MJ
351 }
352 }
8775a18b 353 order[order_pos++] = stack[stack_size--].node;
af8bca3c
MJ
354 }
355 }
356 free (stack);
65c70e6b 357 FOR_EACH_FUNCTION (node)
67348ccc 358 node->aux = NULL;
af8bca3c
MJ
359 return order_pos;
360}
361
362
ea900239
DB
363
364/* Given a memory reference T, will return the variable at the bottom
073a8998 365 of the access. Unlike get_base_address, this will recurse through
ea900239
DB
366 INDIRECT_REFS. */
367
368tree
369get_base_var (tree t)
370{
b8698a0f 371 while (!SSA_VAR_P (t)
ea900239
DB
372 && (!CONSTANT_CLASS_P (t))
373 && TREE_CODE (t) != LABEL_DECL
374 && TREE_CODE (t) != FUNCTION_DECL
3baf459d
DN
375 && TREE_CODE (t) != CONST_DECL
376 && TREE_CODE (t) != CONSTRUCTOR)
ea900239
DB
377 {
378 t = TREE_OPERAND (t, 0);
379 }
380 return t;
b8698a0f 381}
ea900239 382
1cb1a99f 383
4843f032
JH
384/* SRC and DST are going to be merged. Take SRC's profile and merge it into
385 DST so it is not going to be lost. Destroy SRC's body on the way. */
386
387void
388ipa_merge_profiles (struct cgraph_node *dst,
389 struct cgraph_node *src)
390{
67348ccc 391 tree oldsrcdecl = src->decl;
4843f032
JH
392 struct function *srccfun, *dstcfun;
393 bool match = true;
394
67348ccc
DM
395 if (!src->definition
396 || !dst->definition)
4843f032
JH
397 return;
398 if (src->frequency < dst->frequency)
399 src->frequency = dst->frequency;
9cec31f4
ML
400
401 /* Time profiles are merged. */
402 if (dst->tp_first_run > src->tp_first_run && src->tp_first_run)
403 dst->tp_first_run = src->tp_first_run;
404
fd29c024
ML
405 if (src->profile_id && !dst->profile_id)
406 dst->profile_id = src->profile_id;
cb90235d 407
4843f032
JH
408 if (!dst->count)
409 return;
410 if (cgraph_dump_file)
411 {
412 fprintf (cgraph_dump_file, "Merging profiles of %s/%i to %s/%i\n",
fec39fa6
TS
413 xstrdup (src->name ()), src->order,
414 xstrdup (dst->name ()), dst->order);
4843f032
JH
415 }
416 dst->count += src->count;
417
418 /* This is ugly. We need to get both function bodies into memory.
419 If declaration is merged, we need to duplicate it to be able
420 to load body that is being replaced. This makes symbol table
421 temporarily inconsistent. */
67348ccc 422 if (src->decl == dst->decl)
4843f032
JH
423 {
424 void **slot;
425 struct lto_in_decl_state temp;
426 struct lto_in_decl_state *state;
427
428 /* We are going to move the decl, we want to remove its file decl data.
429 and link these with the new decl. */
67348ccc
DM
430 temp.fn_decl = src->decl;
431 slot = htab_find_slot (src->lto_file_data->function_decl_states,
4843f032
JH
432 &temp, NO_INSERT);
433 state = (lto_in_decl_state *)*slot;
67348ccc 434 htab_clear_slot (src->lto_file_data->function_decl_states, slot);
4843f032
JH
435 gcc_assert (state);
436
437 /* Duplicate the decl and be sure it does not link into body of DST. */
67348ccc
DM
438 src->decl = copy_node (src->decl);
439 DECL_STRUCT_FUNCTION (src->decl) = NULL;
440 DECL_ARGUMENTS (src->decl) = NULL;
441 DECL_INITIAL (src->decl) = NULL;
442 DECL_RESULT (src->decl) = NULL;
4843f032
JH
443
444 /* Associate the decl state with new declaration, so LTO streamer
445 can look it up. */
67348ccc
DM
446 state->fn_decl = src->decl;
447 slot = htab_find_slot (src->lto_file_data->function_decl_states,
4843f032
JH
448 state, INSERT);
449 gcc_assert (!*slot);
450 *slot = state;
451 }
d52f5295
ML
452 src->get_body ();
453 dst->get_body ();
67348ccc
DM
454 srccfun = DECL_STRUCT_FUNCTION (src->decl);
455 dstcfun = DECL_STRUCT_FUNCTION (dst->decl);
0cae8d31
DM
456 if (n_basic_blocks_for_fn (srccfun)
457 != n_basic_blocks_for_fn (dstcfun))
4843f032
JH
458 {
459 if (cgraph_dump_file)
460 fprintf (cgraph_dump_file,
461 "Giving up; number of basic block mismatch.\n");
462 match = false;
463 }
3986e690
DM
464 else if (last_basic_block_for_fn (srccfun)
465 != last_basic_block_for_fn (dstcfun))
4843f032
JH
466 {
467 if (cgraph_dump_file)
468 fprintf (cgraph_dump_file,
469 "Giving up; last block mismatch.\n");
470 match = false;
471 }
472 else
473 {
474 basic_block srcbb, dstbb;
475
476 FOR_ALL_BB_FN (srcbb, srccfun)
477 {
478 unsigned int i;
479
bbd79259 480 dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index);
4843f032
JH
481 if (dstbb == NULL)
482 {
483 if (cgraph_dump_file)
484 fprintf (cgraph_dump_file,
485 "No matching block for bb %i.\n",
486 srcbb->index);
487 match = false;
488 break;
489 }
490 if (EDGE_COUNT (srcbb->succs) != EDGE_COUNT (dstbb->succs))
491 {
492 if (cgraph_dump_file)
493 fprintf (cgraph_dump_file,
494 "Edge count mistmatch for bb %i.\n",
495 srcbb->index);
496 match = false;
497 break;
498 }
499 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
500 {
501 edge srce = EDGE_SUCC (srcbb, i);
502 edge dste = EDGE_SUCC (dstbb, i);
503 if (srce->dest->index != dste->dest->index)
504 {
505 if (cgraph_dump_file)
506 fprintf (cgraph_dump_file,
507 "Succ edge mistmatch for bb %i.\n",
508 srce->dest->index);
509 match = false;
510 break;
511 }
512 }
513 }
514 }
515 if (match)
516 {
517 struct cgraph_edge *e;
518 basic_block srcbb, dstbb;
519
520 /* TODO: merge also statement histograms. */
521 FOR_ALL_BB_FN (srcbb, srccfun)
522 {
523 unsigned int i;
524
bbd79259 525 dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index);
4843f032
JH
526 dstbb->count += srcbb->count;
527 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
528 {
529 edge srce = EDGE_SUCC (srcbb, i);
530 edge dste = EDGE_SUCC (dstbb, i);
531 dste->count += srce->count;
532 }
533 }
534 push_cfun (dstcfun);
535 counts_to_freqs ();
536 compute_function_frequency ();
537 pop_cfun ();
538 for (e = dst->callees; e; e = e->next_callee)
539 {
540 gcc_assert (!e->speculative);
541 e->count = gimple_bb (e->call_stmt)->count;
542 e->frequency = compute_call_stmt_bb_frequency
67348ccc 543 (dst->decl,
4843f032
JH
544 gimple_bb (e->call_stmt));
545 }
546 for (e = dst->indirect_calls; e; e = e->next_callee)
547 {
548 gcc_assert (!e->speculative);
549 e->count = gimple_bb (e->call_stmt)->count;
550 e->frequency = compute_call_stmt_bb_frequency
67348ccc 551 (dst->decl,
4843f032
JH
552 gimple_bb (e->call_stmt));
553 }
d52f5295 554 src->release_body ();
4843f032
JH
555 inline_update_overall_summary (dst);
556 }
557 /* TODO: if there is no match, we can scale up. */
67348ccc 558 src->decl = oldsrcdecl;
4843f032
JH
559}
560
fc11f321
JH
561/* Return true if call to DEST is known to be self-recusive call withing FUNC. */
562
563bool
564recursive_call_p (tree func, tree dest)
565{
d52f5295
ML
566 struct cgraph_node *dest_node = cgraph_node::get_create (dest);
567 struct cgraph_node *cnode = cgraph_node::get_create (func);
fc11f321 568
d52f5295 569 return dest_node->semantically_equivalent_p (cnode);
fc11f321 570}