]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-utils.c
2017-06-04 Dominique d'Humieres <dominiq@lps.ens.fr>
[thirdparty/gcc.git] / gcc / ipa-utils.c
CommitLineData
ea900239 1/* Utilities for ipa analysis.
cbe34bb5 2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
ea900239
DB
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9dcd6f09 9Software Foundation; either version 3, or (at your option) any later
ea900239
DB
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
ea900239
DB
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2
AM
24#include "backend.h"
25#include "tree.h"
26#include "gimple.h"
957060b5
AM
27#include "predict.h"
28#include "alloc-pool.h"
29#include "cgraph.h"
30#include "lto-streamer.h"
7ee2468b 31#include "dumpfile.h"
ea264ca5 32#include "splay-tree.h"
ea900239 33#include "ipa-utils.h"
dd912cb8 34#include "symbol-summary.h"
8bc5448f 35#include "tree-vrp.h"
c582198b 36#include "ipa-prop.h"
27d020cf 37#include "ipa-fnsummary.h"
ea900239
DB
38
39/* Debugging function for postorder and inorder code. NOTE is a string
40 that is printed before the nodes are printed. ORDER is an array of
41 cgraph_nodes that has COUNT useful nodes in it. */
42
b8698a0f 43void
af8bca3c
MJ
44ipa_print_order (FILE* out,
45 const char * note,
46 struct cgraph_node** order,
47 int count)
ea900239
DB
48{
49 int i;
50 fprintf (out, "\n\n ordered call graph: %s\n", note);
b8698a0f 51
ea900239 52 for (i = count - 1; i >= 0; i--)
d52f5295 53 order[i]->dump (out);
ea900239 54 fprintf (out, "\n");
c3284718 55 fflush (out);
ea900239
DB
56}
57
d52f5295 58
ea900239
DB
59struct searchc_env {
60 struct cgraph_node **stack;
ea900239 61 struct cgraph_node **result;
34e82342 62 int stack_size;
ea900239
DB
63 int order_pos;
64 splay_tree nodes_marked_new;
65 bool reduce;
b6156cf2 66 bool allow_overwritable;
ea900239
DB
67 int count;
68};
69
70/* This is an implementation of Tarjan's strongly connected region
71 finder as reprinted in Aho Hopcraft and Ullman's The Design and
72 Analysis of Computer Programs (1975) pages 192-193. This version
73 has been customized for cgraph_nodes. The env parameter is because
74 it is recursive and there are no nested functions here. This
75 function should only be called from itself or
af8bca3c 76 ipa_reduced_postorder. ENV is a stack env and would be
ea900239
DB
77 unnecessary if C had nested functions. V is the node to start
78 searching from. */
79
80static void
2505c5ed
JH
81searchc (struct searchc_env* env, struct cgraph_node *v,
82 bool (*ignore_edge) (struct cgraph_edge *))
ea900239
DB
83{
84 struct cgraph_edge *edge;
67348ccc 85 struct ipa_dfs_info *v_info = (struct ipa_dfs_info *) v->aux;
b8698a0f 86
ea900239 87 /* mark node as old */
c5274326 88 v_info->new_node = false;
ea900239 89 splay_tree_remove (env->nodes_marked_new, v->uid);
b8698a0f 90
ea900239
DB
91 v_info->dfn_number = env->count;
92 v_info->low_link = env->count;
93 env->count++;
94 env->stack[(env->stack_size)++] = v;
95 v_info->on_stack = true;
b8698a0f 96
ea900239
DB
97 for (edge = v->callees; edge; edge = edge->next_callee)
98 {
99 struct ipa_dfs_info * w_info;
fede8efa 100 enum availability avail;
d52f5295 101 struct cgraph_node *w = edge->callee->ultimate_alias_target (&avail);
e2c9111c 102
fede8efa 103 if (!w || (ignore_edge && ignore_edge (edge)))
2505c5ed
JH
104 continue;
105
67348ccc 106 if (w->aux
d52f5295
ML
107 && (avail > AVAIL_INTERPOSABLE
108 || (env->allow_overwritable && avail == AVAIL_INTERPOSABLE)))
ea900239 109 {
67348ccc 110 w_info = (struct ipa_dfs_info *) w->aux;
b8698a0f 111 if (w_info->new_node)
ea900239 112 {
2505c5ed 113 searchc (env, w, ignore_edge);
ea900239
DB
114 v_info->low_link =
115 (v_info->low_link < w_info->low_link) ?
116 v_info->low_link : w_info->low_link;
b8698a0f
L
117 }
118 else
119 if ((w_info->dfn_number < v_info->dfn_number)
120 && (w_info->on_stack))
ea900239
DB
121 v_info->low_link =
122 (w_info->dfn_number < v_info->low_link) ?
123 w_info->dfn_number : v_info->low_link;
124 }
125 }
126
127
b8698a0f 128 if (v_info->low_link == v_info->dfn_number)
ea900239
DB
129 {
130 struct cgraph_node *last = NULL;
131 struct cgraph_node *x;
132 struct ipa_dfs_info *x_info;
133 do {
134 x = env->stack[--(env->stack_size)];
67348ccc 135 x_info = (struct ipa_dfs_info *) x->aux;
ea900239 136 x_info->on_stack = false;
11026b51 137 x_info->scc_no = v_info->dfn_number;
b8698a0f
L
138
139 if (env->reduce)
ea900239
DB
140 {
141 x_info->next_cycle = last;
142 last = x;
b8698a0f
L
143 }
144 else
ea900239 145 env->result[env->order_pos++] = x;
b8698a0f 146 }
ea900239 147 while (v != x);
b8698a0f 148 if (env->reduce)
ea900239
DB
149 env->result[env->order_pos++] = v;
150 }
151}
152
153/* Topsort the call graph by caller relation. Put the result in ORDER.
154
df92c640
SB
155 The REDUCE flag is true if you want the cycles reduced to single nodes.
156 You can use ipa_get_nodes_in_cycle to obtain a vector containing all real
157 call graph nodes in a reduced node.
158
159 Set ALLOW_OVERWRITABLE if nodes with such availability should be included.
af8bca3c
MJ
160 IGNORE_EDGE, if non-NULL is a hook that may make some edges insignificant
161 for the topological sort. */
ea900239
DB
162
163int
af8bca3c
MJ
164ipa_reduced_postorder (struct cgraph_node **order,
165 bool reduce, bool allow_overwritable,
166 bool (*ignore_edge) (struct cgraph_edge *))
ea900239
DB
167{
168 struct cgraph_node *node;
169 struct searchc_env env;
170 splay_tree_node result;
3dafb85c 171 env.stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
ea900239
DB
172 env.stack_size = 0;
173 env.result = order;
174 env.order_pos = 0;
175 env.nodes_marked_new = splay_tree_new (splay_tree_compare_ints, 0, 0);
176 env.count = 1;
177 env.reduce = reduce;
b6156cf2 178 env.allow_overwritable = allow_overwritable;
b8698a0f 179
65c70e6b 180 FOR_EACH_DEFINED_FUNCTION (node)
e2c9111c 181 {
d52f5295 182 enum availability avail = node->get_availability ();
e2c9111c 183
d52f5295 184 if (avail > AVAIL_INTERPOSABLE
b8698a0f 185 || (allow_overwritable
d52f5295 186 && (avail == AVAIL_INTERPOSABLE)))
e2c9111c
JH
187 {
188 /* Reuse the info if it is already there. */
67348ccc 189 struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->aux;
e2c9111c
JH
190 if (!info)
191 info = XCNEW (struct ipa_dfs_info);
192 info->new_node = true;
193 info->on_stack = false;
194 info->next_cycle = NULL;
67348ccc 195 node->aux = info;
b8698a0f 196
e2c9111c 197 splay_tree_insert (env.nodes_marked_new,
b8698a0f 198 (splay_tree_key)node->uid,
e2c9111c 199 (splay_tree_value)node);
b8698a0f
L
200 }
201 else
67348ccc 202 node->aux = NULL;
e2c9111c 203 }
ea900239
DB
204 result = splay_tree_min (env.nodes_marked_new);
205 while (result)
206 {
207 node = (struct cgraph_node *)result->value;
2505c5ed 208 searchc (&env, node, ignore_edge);
ea900239
DB
209 result = splay_tree_min (env.nodes_marked_new);
210 }
211 splay_tree_delete (env.nodes_marked_new);
212 free (env.stack);
213
214 return env.order_pos;
215}
216
af8bca3c
MJ
217/* Deallocate all ipa_dfs_info structures pointed to by the aux pointer of call
218 graph nodes. */
219
220void
221ipa_free_postorder_info (void)
222{
223 struct cgraph_node *node;
65c70e6b 224 FOR_EACH_DEFINED_FUNCTION (node)
af8bca3c
MJ
225 {
226 /* Get rid of the aux information. */
67348ccc 227 if (node->aux)
af8bca3c 228 {
67348ccc
DM
229 free (node->aux);
230 node->aux = NULL;
af8bca3c
MJ
231 }
232 }
233}
234
df92c640
SB
235/* Get the set of nodes for the cycle in the reduced call graph starting
236 from NODE. */
237
d52f5295 238vec<cgraph_node *>
df92c640
SB
239ipa_get_nodes_in_cycle (struct cgraph_node *node)
240{
d52f5295 241 vec<cgraph_node *> v = vNULL;
df92c640
SB
242 struct ipa_dfs_info *node_dfs_info;
243 while (node)
244 {
9771b263 245 v.safe_push (node);
67348ccc 246 node_dfs_info = (struct ipa_dfs_info *) node->aux;
df92c640
SB
247 node = node_dfs_info->next_cycle;
248 }
249 return v;
250}
251
4cb13597
MJ
252/* Return true iff the CS is an edge within a strongly connected component as
253 computed by ipa_reduced_postorder. */
254
255bool
256ipa_edge_within_scc (struct cgraph_edge *cs)
257{
67348ccc 258 struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->aux;
4cb13597 259 struct ipa_dfs_info *callee_dfs;
d52f5295 260 struct cgraph_node *callee = cs->callee->function_symbol ();
4cb13597 261
67348ccc 262 callee_dfs = (struct ipa_dfs_info *) callee->aux;
4cb13597
MJ
263 return (caller_dfs
264 && callee_dfs
265 && caller_dfs->scc_no == callee_dfs->scc_no);
266}
267
8775a18b
JH
268struct postorder_stack
269{
270 struct cgraph_node *node;
271 struct cgraph_edge *edge;
272 int ref;
273};
274
af8bca3c 275/* Fill array order with all nodes with output flag set in the reverse
39e2db00
JH
276 topological order. Return the number of elements in the array.
277 FIXME: While walking, consider aliases, too. */
af8bca3c
MJ
278
279int
280ipa_reverse_postorder (struct cgraph_node **order)
281{
282 struct cgraph_node *node, *node2;
283 int stack_size = 0;
284 int order_pos = 0;
8775a18b 285 struct cgraph_edge *edge;
af8bca3c 286 int pass;
d122681a 287 struct ipa_ref *ref = NULL;
af8bca3c 288
8775a18b 289 struct postorder_stack *stack =
3dafb85c 290 XCNEWVEC (struct postorder_stack, symtab->cgraph_count);
af8bca3c
MJ
291
292 /* We have to deal with cycles nicely, so use a depth first traversal
293 output algorithm. Ignore the fact that some functions won't need
294 to be output and put them into order as well, so we get dependencies
295 right through inline functions. */
65c70e6b 296 FOR_EACH_FUNCTION (node)
67348ccc 297 node->aux = NULL;
af8bca3c 298 for (pass = 0; pass < 2; pass++)
65c70e6b 299 FOR_EACH_FUNCTION (node)
67348ccc 300 if (!node->aux
af8bca3c 301 && (pass
67348ccc 302 || (!node->address_taken
af8bca3c 303 && !node->global.inlined_to
67348ccc 304 && !node->alias && !node->thunk.thunk_p
d52f5295 305 && !node->only_called_directly_p ())))
af8bca3c 306 {
8775a18b
JH
307 stack_size = 0;
308 stack[stack_size].node = node;
309 stack[stack_size].edge = node->callers;
310 stack[stack_size].ref = 0;
67348ccc 311 node->aux = (void *)(size_t)1;
8775a18b 312 while (stack_size >= 0)
af8bca3c 313 {
8775a18b 314 while (true)
af8bca3c 315 {
8775a18b
JH
316 node2 = NULL;
317 while (stack[stack_size].edge && !node2)
af8bca3c 318 {
8775a18b 319 edge = stack[stack_size].edge;
af8bca3c 320 node2 = edge->caller;
8775a18b
JH
321 stack[stack_size].edge = edge->next_caller;
322 /* Break possible cycles involving always-inline
323 functions by ignoring edges from always-inline
324 functions to non-always-inline functions. */
67348ccc 325 if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->decl)
8775a18b 326 && !DECL_DISREGARD_INLINE_LIMITS
d52f5295 327 (edge->callee->function_symbol ()->decl))
8775a18b
JH
328 node2 = NULL;
329 }
d122681a 330 for (; stack[stack_size].node->iterate_referring (
8775a18b
JH
331 stack[stack_size].ref,
332 ref) && !node2;
333 stack[stack_size].ref++)
334 {
335 if (ref->use == IPA_REF_ALIAS)
d122681a 336 node2 = dyn_cast <cgraph_node *> (ref->referring);
8775a18b
JH
337 }
338 if (!node2)
339 break;
67348ccc 340 if (!node2->aux)
8775a18b
JH
341 {
342 stack[++stack_size].node = node2;
343 stack[stack_size].edge = node2->callers;
344 stack[stack_size].ref = 0;
67348ccc 345 node2->aux = (void *)(size_t)1;
af8bca3c
MJ
346 }
347 }
8775a18b 348 order[order_pos++] = stack[stack_size--].node;
af8bca3c
MJ
349 }
350 }
351 free (stack);
65c70e6b 352 FOR_EACH_FUNCTION (node)
67348ccc 353 node->aux = NULL;
af8bca3c
MJ
354 return order_pos;
355}
356
357
ea900239
DB
358
359/* Given a memory reference T, will return the variable at the bottom
073a8998 360 of the access. Unlike get_base_address, this will recurse through
ea900239
DB
361 INDIRECT_REFS. */
362
363tree
364get_base_var (tree t)
365{
b8698a0f 366 while (!SSA_VAR_P (t)
ea900239
DB
367 && (!CONSTANT_CLASS_P (t))
368 && TREE_CODE (t) != LABEL_DECL
369 && TREE_CODE (t) != FUNCTION_DECL
3baf459d
DN
370 && TREE_CODE (t) != CONST_DECL
371 && TREE_CODE (t) != CONSTRUCTOR)
ea900239
DB
372 {
373 t = TREE_OPERAND (t, 0);
374 }
375 return t;
b8698a0f 376}
ea900239 377
1cb1a99f 378
4843f032 379/* SRC and DST are going to be merged. Take SRC's profile and merge it into
b730d1c9
JH
380 DST so it is not going to be lost. Possibly destroy SRC's body on the way
381 unless PRESERVE_BODY is set. */
4843f032
JH
382
383void
384ipa_merge_profiles (struct cgraph_node *dst,
b730d1c9
JH
385 struct cgraph_node *src,
386 bool preserve_body)
4843f032 387{
67348ccc 388 tree oldsrcdecl = src->decl;
4843f032
JH
389 struct function *srccfun, *dstcfun;
390 bool match = true;
391
67348ccc
DM
392 if (!src->definition
393 || !dst->definition)
4843f032
JH
394 return;
395 if (src->frequency < dst->frequency)
396 src->frequency = dst->frequency;
9cec31f4
ML
397
398 /* Time profiles are merged. */
399 if (dst->tp_first_run > src->tp_first_run && src->tp_first_run)
400 dst->tp_first_run = src->tp_first_run;
401
fd29c024
ML
402 if (src->profile_id && !dst->profile_id)
403 dst->profile_id = src->profile_id;
cb90235d 404
4843f032
JH
405 if (!dst->count)
406 return;
3dafb85c 407 if (symtab->dump_file)
4843f032 408 {
464d0118
ML
409 fprintf (symtab->dump_file, "Merging profiles of %s to %s\n",
410 src->dump_name (), dst->dump_name ());
4843f032
JH
411 }
412 dst->count += src->count;
413
414 /* This is ugly. We need to get both function bodies into memory.
415 If declaration is merged, we need to duplicate it to be able
416 to load body that is being replaced. This makes symbol table
417 temporarily inconsistent. */
67348ccc 418 if (src->decl == dst->decl)
4843f032 419 {
4843f032
JH
420 struct lto_in_decl_state temp;
421 struct lto_in_decl_state *state;
422
423 /* We are going to move the decl, we want to remove its file decl data.
424 and link these with the new decl. */
67348ccc 425 temp.fn_decl = src->decl;
907dadbd
TS
426 lto_in_decl_state **slot
427 = src->lto_file_data->function_decl_states->find_slot (&temp,
428 NO_INSERT);
429 state = *slot;
430 src->lto_file_data->function_decl_states->clear_slot (slot);
4843f032
JH
431 gcc_assert (state);
432
433 /* Duplicate the decl and be sure it does not link into body of DST. */
67348ccc
DM
434 src->decl = copy_node (src->decl);
435 DECL_STRUCT_FUNCTION (src->decl) = NULL;
436 DECL_ARGUMENTS (src->decl) = NULL;
437 DECL_INITIAL (src->decl) = NULL;
438 DECL_RESULT (src->decl) = NULL;
4843f032
JH
439
440 /* Associate the decl state with new declaration, so LTO streamer
441 can look it up. */
67348ccc 442 state->fn_decl = src->decl;
907dadbd
TS
443 slot
444 = src->lto_file_data->function_decl_states->find_slot (state, INSERT);
4843f032
JH
445 gcc_assert (!*slot);
446 *slot = state;
447 }
e3bde69a
JH
448 src->get_untransformed_body ();
449 dst->get_untransformed_body ();
67348ccc
DM
450 srccfun = DECL_STRUCT_FUNCTION (src->decl);
451 dstcfun = DECL_STRUCT_FUNCTION (dst->decl);
0cae8d31
DM
452 if (n_basic_blocks_for_fn (srccfun)
453 != n_basic_blocks_for_fn (dstcfun))
4843f032 454 {
3dafb85c
ML
455 if (symtab->dump_file)
456 fprintf (symtab->dump_file,
4843f032
JH
457 "Giving up; number of basic block mismatch.\n");
458 match = false;
459 }
3986e690
DM
460 else if (last_basic_block_for_fn (srccfun)
461 != last_basic_block_for_fn (dstcfun))
4843f032 462 {
3dafb85c
ML
463 if (symtab->dump_file)
464 fprintf (symtab->dump_file,
4843f032
JH
465 "Giving up; last block mismatch.\n");
466 match = false;
467 }
468 else
469 {
470 basic_block srcbb, dstbb;
471
472 FOR_ALL_BB_FN (srcbb, srccfun)
473 {
474 unsigned int i;
475
bbd79259 476 dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index);
4843f032
JH
477 if (dstbb == NULL)
478 {
3dafb85c
ML
479 if (symtab->dump_file)
480 fprintf (symtab->dump_file,
4843f032
JH
481 "No matching block for bb %i.\n",
482 srcbb->index);
483 match = false;
484 break;
485 }
486 if (EDGE_COUNT (srcbb->succs) != EDGE_COUNT (dstbb->succs))
487 {
3dafb85c
ML
488 if (symtab->dump_file)
489 fprintf (symtab->dump_file,
4843f032
JH
490 "Edge count mistmatch for bb %i.\n",
491 srcbb->index);
492 match = false;
493 break;
494 }
495 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
496 {
497 edge srce = EDGE_SUCC (srcbb, i);
498 edge dste = EDGE_SUCC (dstbb, i);
499 if (srce->dest->index != dste->dest->index)
500 {
3dafb85c
ML
501 if (symtab->dump_file)
502 fprintf (symtab->dump_file,
4843f032
JH
503 "Succ edge mistmatch for bb %i.\n",
504 srce->dest->index);
505 match = false;
506 break;
507 }
508 }
509 }
510 }
511 if (match)
512 {
befb1f36 513 struct cgraph_edge *e, *e2;
4843f032
JH
514 basic_block srcbb, dstbb;
515
516 /* TODO: merge also statement histograms. */
517 FOR_ALL_BB_FN (srcbb, srccfun)
518 {
519 unsigned int i;
520
bbd79259 521 dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index);
4843f032
JH
522 dstbb->count += srcbb->count;
523 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
524 {
525 edge srce = EDGE_SUCC (srcbb, i);
526 edge dste = EDGE_SUCC (dstbb, i);
527 dste->count += srce->count;
528 }
529 }
530 push_cfun (dstcfun);
531 counts_to_freqs ();
532 compute_function_frequency ();
533 pop_cfun ();
534 for (e = dst->callees; e; e = e->next_callee)
535 {
befb1f36
JH
536 if (e->speculative)
537 continue;
4843f032
JH
538 e->count = gimple_bb (e->call_stmt)->count;
539 e->frequency = compute_call_stmt_bb_frequency
67348ccc 540 (dst->decl,
4843f032
JH
541 gimple_bb (e->call_stmt));
542 }
befb1f36
JH
543 for (e = dst->indirect_calls, e2 = src->indirect_calls; e;
544 e2 = (e2 ? e2->next_callee : NULL), e = e->next_callee)
4843f032 545 {
befb1f36
JH
546 gcov_type count = gimple_bb (e->call_stmt)->count;
547 int freq = compute_call_stmt_bb_frequency
548 (dst->decl,
549 gimple_bb (e->call_stmt));
550 /* When call is speculative, we need to re-distribute probabilities
551 the same way as they was. This is not really correct because
552 in the other copy the speculation may differ; but probably it
553 is not really worth the effort. */
554 if (e->speculative)
555 {
556 cgraph_edge *direct, *indirect;
557 cgraph_edge *direct2 = NULL, *indirect2 = NULL;
558 ipa_ref *ref;
559
560 e->speculative_call_info (direct, indirect, ref);
561 gcc_assert (e == indirect);
562 if (e2 && e2->speculative)
563 e2->speculative_call_info (direct2, indirect2, ref);
564 if (indirect->count || direct->count)
565 {
566 /* We should mismatch earlier if there is no matching
567 indirect edge. */
568 if (!e2)
569 {
570 if (dump_file)
571 fprintf (dump_file,
572 "Mismatch in merging indirect edges\n");
573 }
574 else if (!e2->speculative)
575 indirect->count += e2->count;
576 else if (e2->speculative)
577 {
578 if (DECL_ASSEMBLER_NAME (direct2->callee->decl)
579 != DECL_ASSEMBLER_NAME (direct->callee->decl))
580 {
581 if (direct2->count >= direct->count)
582 {
583 direct->redirect_callee (direct2->callee);
584 indirect->count += indirect2->count
585 + direct->count;
586 direct->count = direct2->count;
587 }
588 else
589 indirect->count += indirect2->count + direct2->count;
590 }
591 else
592 {
593 direct->count += direct2->count;
594 indirect->count += indirect2->count;
595 }
596 }
597 int prob = RDIV (direct->count * REG_BR_PROB_BASE ,
598 direct->count + indirect->count);
599 direct->frequency = RDIV (freq * prob, REG_BR_PROB_BASE);
600 indirect->frequency = RDIV (freq * (REG_BR_PROB_BASE - prob),
601 REG_BR_PROB_BASE);
602 }
603 else
604 /* At the moment we should have only profile feedback based
605 speculations when merging. */
606 gcc_unreachable ();
607 }
173148bb 608 else if (e2 && e2->speculative)
befb1f36
JH
609 {
610 cgraph_edge *direct, *indirect;
611 ipa_ref *ref;
612
613 e2->speculative_call_info (direct, indirect, ref);
614 e->count = count;
615 e->frequency = freq;
616 int prob = RDIV (direct->count * REG_BR_PROB_BASE, e->count);
617 e->make_speculative (direct->callee, direct->count,
618 RDIV (freq * prob, REG_BR_PROB_BASE));
619 }
620 else
621 {
622 e->count = count;
623 e->frequency = freq;
624 }
4843f032 625 }
b730d1c9
JH
626 if (!preserve_body)
627 src->release_body ();
0bceb671 628 ipa_update_overall_fn_summary (dst);
4843f032
JH
629 }
630 /* TODO: if there is no match, we can scale up. */
67348ccc 631 src->decl = oldsrcdecl;
4843f032
JH
632}
633
fc11f321
JH
634/* Return true if call to DEST is known to be self-recusive call withing FUNC. */
635
636bool
637recursive_call_p (tree func, tree dest)
638{
d52f5295
ML
639 struct cgraph_node *dest_node = cgraph_node::get_create (dest);
640 struct cgraph_node *cnode = cgraph_node::get_create (func);
acbbac04
JH
641 ipa_ref *alias;
642 enum availability avail;
643
644 gcc_assert (!cnode->alias);
645 if (cnode != dest_node->ultimate_alias_target (&avail))
646 return false;
647 if (avail >= AVAIL_AVAILABLE)
648 return true;
649 if (!dest_node->semantically_equivalent_p (cnode))
650 return false;
651 /* If there is only one way to call the fuction or we know all of them
652 are semantically equivalent, we still can consider call recursive. */
653 FOR_EACH_ALIAS (cnode, alias)
654 if (!dest_node->semantically_equivalent_p (alias->referring))
655 return false;
656 return true;
fc11f321 657}