]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-utils.c
Update copyright years.
[thirdparty/gcc.git] / gcc / ipa-utils.c
CommitLineData
f7d118a9 1/* Utilities for ipa analysis.
fbd26352 2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
f7d118a9 3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
8c4c00c1 9Software Foundation; either version 3, or (at your option) any later
f7d118a9 10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
f7d118a9 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
9ef16211 24#include "backend.h"
25#include "tree.h"
26#include "gimple.h"
7c29e30e 27#include "predict.h"
28#include "alloc-pool.h"
29#include "cgraph.h"
30#include "lto-streamer.h"
b9ed1410 31#include "dumpfile.h"
5863771b 32#include "splay-tree.h"
f7d118a9 33#include "ipa-utils.h"
2cc80ac3 34#include "symbol-summary.h"
25a8e007 35#include "tree-vrp.h"
1140c305 36#include "ipa-prop.h"
b9a58fc5 37#include "ipa-fnsummary.h"
f7d118a9 38
39/* Debugging function for postorder and inorder code. NOTE is a string
40 that is printed before the nodes are printed. ORDER is an array of
41 cgraph_nodes that has COUNT useful nodes in it. */
42
48e1416a 43void
7771d558 44ipa_print_order (FILE* out,
45 const char * note,
46 struct cgraph_node** order,
47 int count)
f7d118a9 48{
49 int i;
50 fprintf (out, "\n\n ordered call graph: %s\n", note);
48e1416a 51
f7d118a9 52 for (i = count - 1; i >= 0; i--)
415d1b9a 53 order[i]->dump (out);
f7d118a9 54 fprintf (out, "\n");
9af5ce0c 55 fflush (out);
f7d118a9 56}
57
415d1b9a 58
f7d118a9 59struct searchc_env {
60 struct cgraph_node **stack;
f7d118a9 61 struct cgraph_node **result;
487798e2 62 int stack_size;
f7d118a9 63 int order_pos;
64 splay_tree nodes_marked_new;
65 bool reduce;
c336a49e 66 bool allow_overwritable;
f7d118a9 67 int count;
68};
69
70/* This is an implementation of Tarjan's strongly connected region
71 finder as reprinted in Aho Hopcraft and Ullman's The Design and
72 Analysis of Computer Programs (1975) pages 192-193. This version
73 has been customized for cgraph_nodes. The env parameter is because
74 it is recursive and there are no nested functions here. This
75 function should only be called from itself or
7771d558 76 ipa_reduced_postorder. ENV is a stack env and would be
f7d118a9 77 unnecessary if C had nested functions. V is the node to start
78 searching from. */
79
80static void
17b28e52 81searchc (struct searchc_env* env, struct cgraph_node *v,
82 bool (*ignore_edge) (struct cgraph_edge *))
f7d118a9 83{
84 struct cgraph_edge *edge;
02774f2d 85 struct ipa_dfs_info *v_info = (struct ipa_dfs_info *) v->aux;
48e1416a 86
f7d118a9 87 /* mark node as old */
cda6870f 88 v_info->new_node = false;
2c8bbd94 89 splay_tree_remove (env->nodes_marked_new, v->get_uid ());
48e1416a 90
f7d118a9 91 v_info->dfn_number = env->count;
92 v_info->low_link = env->count;
93 env->count++;
94 env->stack[(env->stack_size)++] = v;
95 v_info->on_stack = true;
48e1416a 96
f7d118a9 97 for (edge = v->callees; edge; edge = edge->next_callee)
98 {
99 struct ipa_dfs_info * w_info;
b2c2e188 100 enum availability avail;
415d1b9a 101 struct cgraph_node *w = edge->callee->ultimate_alias_target (&avail);
86844d6c 102
b2c2e188 103 if (!w || (ignore_edge && ignore_edge (edge)))
17b28e52 104 continue;
105
02774f2d 106 if (w->aux
415d1b9a 107 && (avail > AVAIL_INTERPOSABLE
108 || (env->allow_overwritable && avail == AVAIL_INTERPOSABLE)))
f7d118a9 109 {
02774f2d 110 w_info = (struct ipa_dfs_info *) w->aux;
48e1416a 111 if (w_info->new_node)
f7d118a9 112 {
17b28e52 113 searchc (env, w, ignore_edge);
f7d118a9 114 v_info->low_link =
115 (v_info->low_link < w_info->low_link) ?
116 v_info->low_link : w_info->low_link;
48e1416a 117 }
118 else
119 if ((w_info->dfn_number < v_info->dfn_number)
120 && (w_info->on_stack))
f7d118a9 121 v_info->low_link =
122 (w_info->dfn_number < v_info->low_link) ?
123 w_info->dfn_number : v_info->low_link;
124 }
125 }
126
127
48e1416a 128 if (v_info->low_link == v_info->dfn_number)
f7d118a9 129 {
130 struct cgraph_node *last = NULL;
131 struct cgraph_node *x;
132 struct ipa_dfs_info *x_info;
133 do {
134 x = env->stack[--(env->stack_size)];
02774f2d 135 x_info = (struct ipa_dfs_info *) x->aux;
f7d118a9 136 x_info->on_stack = false;
572635a5 137 x_info->scc_no = v_info->dfn_number;
48e1416a 138
139 if (env->reduce)
f7d118a9 140 {
141 x_info->next_cycle = last;
142 last = x;
48e1416a 143 }
144 else
f7d118a9 145 env->result[env->order_pos++] = x;
48e1416a 146 }
f7d118a9 147 while (v != x);
48e1416a 148 if (env->reduce)
f7d118a9 149 env->result[env->order_pos++] = v;
150 }
151}
152
153/* Topsort the call graph by caller relation. Put the result in ORDER.
154
9631926a 155 The REDUCE flag is true if you want the cycles reduced to single nodes.
156 You can use ipa_get_nodes_in_cycle to obtain a vector containing all real
157 call graph nodes in a reduced node.
158
159 Set ALLOW_OVERWRITABLE if nodes with such availability should be included.
7771d558 160 IGNORE_EDGE, if non-NULL is a hook that may make some edges insignificant
161 for the topological sort. */
f7d118a9 162
163int
7771d558 164ipa_reduced_postorder (struct cgraph_node **order,
165 bool reduce, bool allow_overwritable,
166 bool (*ignore_edge) (struct cgraph_edge *))
f7d118a9 167{
168 struct cgraph_node *node;
169 struct searchc_env env;
170 splay_tree_node result;
35ee1c66 171 env.stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
f7d118a9 172 env.stack_size = 0;
173 env.result = order;
174 env.order_pos = 0;
175 env.nodes_marked_new = splay_tree_new (splay_tree_compare_ints, 0, 0);
176 env.count = 1;
177 env.reduce = reduce;
c336a49e 178 env.allow_overwritable = allow_overwritable;
48e1416a 179
7c455d87 180 FOR_EACH_DEFINED_FUNCTION (node)
86844d6c 181 {
415d1b9a 182 enum availability avail = node->get_availability ();
86844d6c 183
415d1b9a 184 if (avail > AVAIL_INTERPOSABLE
48e1416a 185 || (allow_overwritable
415d1b9a 186 && (avail == AVAIL_INTERPOSABLE)))
86844d6c 187 {
188 /* Reuse the info if it is already there. */
02774f2d 189 struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->aux;
86844d6c 190 if (!info)
191 info = XCNEW (struct ipa_dfs_info);
192 info->new_node = true;
193 info->on_stack = false;
194 info->next_cycle = NULL;
02774f2d 195 node->aux = info;
48e1416a 196
86844d6c 197 splay_tree_insert (env.nodes_marked_new,
2c8bbd94 198 (splay_tree_key)node->get_uid (),
86844d6c 199 (splay_tree_value)node);
48e1416a 200 }
201 else
02774f2d 202 node->aux = NULL;
86844d6c 203 }
f7d118a9 204 result = splay_tree_min (env.nodes_marked_new);
205 while (result)
206 {
207 node = (struct cgraph_node *)result->value;
17b28e52 208 searchc (&env, node, ignore_edge);
f7d118a9 209 result = splay_tree_min (env.nodes_marked_new);
210 }
211 splay_tree_delete (env.nodes_marked_new);
212 free (env.stack);
213
214 return env.order_pos;
215}
216
7771d558 217/* Deallocate all ipa_dfs_info structures pointed to by the aux pointer of call
218 graph nodes. */
219
220void
221ipa_free_postorder_info (void)
222{
223 struct cgraph_node *node;
7c455d87 224 FOR_EACH_DEFINED_FUNCTION (node)
7771d558 225 {
226 /* Get rid of the aux information. */
02774f2d 227 if (node->aux)
7771d558 228 {
02774f2d 229 free (node->aux);
230 node->aux = NULL;
7771d558 231 }
232 }
233}
234
9631926a 235/* Get the set of nodes for the cycle in the reduced call graph starting
236 from NODE. */
237
415d1b9a 238vec<cgraph_node *>
9631926a 239ipa_get_nodes_in_cycle (struct cgraph_node *node)
240{
415d1b9a 241 vec<cgraph_node *> v = vNULL;
9631926a 242 struct ipa_dfs_info *node_dfs_info;
243 while (node)
244 {
f1f41a6c 245 v.safe_push (node);
02774f2d 246 node_dfs_info = (struct ipa_dfs_info *) node->aux;
9631926a 247 node = node_dfs_info->next_cycle;
248 }
249 return v;
250}
251
a0255a70 252/* Return true iff the CS is an edge within a strongly connected component as
253 computed by ipa_reduced_postorder. */
254
255bool
256ipa_edge_within_scc (struct cgraph_edge *cs)
257{
02774f2d 258 struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->aux;
a0255a70 259 struct ipa_dfs_info *callee_dfs;
415d1b9a 260 struct cgraph_node *callee = cs->callee->function_symbol ();
a0255a70 261
02774f2d 262 callee_dfs = (struct ipa_dfs_info *) callee->aux;
a0255a70 263 return (caller_dfs
264 && callee_dfs
265 && caller_dfs->scc_no == callee_dfs->scc_no);
266}
267
ee5e516b 268struct postorder_stack
269{
270 struct cgraph_node *node;
271 struct cgraph_edge *edge;
272 int ref;
273};
274
7771d558 275/* Fill array order with all nodes with output flag set in the reverse
c70f46b0 276 topological order. Return the number of elements in the array.
277 FIXME: While walking, consider aliases, too. */
7771d558 278
279int
280ipa_reverse_postorder (struct cgraph_node **order)
281{
282 struct cgraph_node *node, *node2;
283 int stack_size = 0;
284 int order_pos = 0;
ee5e516b 285 struct cgraph_edge *edge;
7771d558 286 int pass;
51ce5652 287 struct ipa_ref *ref = NULL;
7771d558 288
ee5e516b 289 struct postorder_stack *stack =
35ee1c66 290 XCNEWVEC (struct postorder_stack, symtab->cgraph_count);
7771d558 291
292 /* We have to deal with cycles nicely, so use a depth first traversal
293 output algorithm. Ignore the fact that some functions won't need
294 to be output and put them into order as well, so we get dependencies
295 right through inline functions. */
7c455d87 296 FOR_EACH_FUNCTION (node)
02774f2d 297 node->aux = NULL;
7771d558 298 for (pass = 0; pass < 2; pass++)
7c455d87 299 FOR_EACH_FUNCTION (node)
02774f2d 300 if (!node->aux
7771d558 301 && (pass
02774f2d 302 || (!node->address_taken
7771d558 303 && !node->global.inlined_to
02774f2d 304 && !node->alias && !node->thunk.thunk_p
415d1b9a 305 && !node->only_called_directly_p ())))
7771d558 306 {
ee5e516b 307 stack_size = 0;
308 stack[stack_size].node = node;
309 stack[stack_size].edge = node->callers;
310 stack[stack_size].ref = 0;
02774f2d 311 node->aux = (void *)(size_t)1;
ee5e516b 312 while (stack_size >= 0)
7771d558 313 {
ee5e516b 314 while (true)
7771d558 315 {
ee5e516b 316 node2 = NULL;
317 while (stack[stack_size].edge && !node2)
7771d558 318 {
ee5e516b 319 edge = stack[stack_size].edge;
7771d558 320 node2 = edge->caller;
ee5e516b 321 stack[stack_size].edge = edge->next_caller;
322 /* Break possible cycles involving always-inline
323 functions by ignoring edges from always-inline
324 functions to non-always-inline functions. */
02774f2d 325 if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->decl)
ee5e516b 326 && !DECL_DISREGARD_INLINE_LIMITS
415d1b9a 327 (edge->callee->function_symbol ()->decl))
ee5e516b 328 node2 = NULL;
329 }
51ce5652 330 for (; stack[stack_size].node->iterate_referring (
ee5e516b 331 stack[stack_size].ref,
332 ref) && !node2;
333 stack[stack_size].ref++)
334 {
335 if (ref->use == IPA_REF_ALIAS)
51ce5652 336 node2 = dyn_cast <cgraph_node *> (ref->referring);
ee5e516b 337 }
338 if (!node2)
339 break;
02774f2d 340 if (!node2->aux)
ee5e516b 341 {
342 stack[++stack_size].node = node2;
343 stack[stack_size].edge = node2->callers;
344 stack[stack_size].ref = 0;
02774f2d 345 node2->aux = (void *)(size_t)1;
7771d558 346 }
347 }
ee5e516b 348 order[order_pos++] = stack[stack_size--].node;
7771d558 349 }
350 }
351 free (stack);
7c455d87 352 FOR_EACH_FUNCTION (node)
02774f2d 353 node->aux = NULL;
7771d558 354 return order_pos;
355}
356
357
f7d118a9 358
359/* Given a memory reference T, will return the variable at the bottom
9d75589a 360 of the access. Unlike get_base_address, this will recurse through
f7d118a9 361 INDIRECT_REFS. */
362
363tree
364get_base_var (tree t)
365{
48e1416a 366 while (!SSA_VAR_P (t)
f7d118a9 367 && (!CONSTANT_CLASS_P (t))
368 && TREE_CODE (t) != LABEL_DECL
369 && TREE_CODE (t) != FUNCTION_DECL
9ed5b1f5 370 && TREE_CODE (t) != CONST_DECL
371 && TREE_CODE (t) != CONSTRUCTOR)
f7d118a9 372 {
373 t = TREE_OPERAND (t, 0);
374 }
375 return t;
48e1416a 376}
f7d118a9 377
19ad01f7 378
7e4b741c 379/* SRC and DST are going to be merged. Take SRC's profile and merge it into
96bf18ea 380 DST so it is not going to be lost. Possibly destroy SRC's body on the way
381 unless PRESERVE_BODY is set. */
7e4b741c 382
383void
384ipa_merge_profiles (struct cgraph_node *dst,
96bf18ea 385 struct cgraph_node *src,
386 bool preserve_body)
7e4b741c 387{
02774f2d 388 tree oldsrcdecl = src->decl;
7e4b741c 389 struct function *srccfun, *dstcfun;
390 bool match = true;
391
02774f2d 392 if (!src->definition
393 || !dst->definition)
7e4b741c 394 return;
b5e4c6ae 395
7e4b741c 396 if (src->frequency < dst->frequency)
397 src->frequency = dst->frequency;
af48f0b1 398
399 /* Time profiles are merged. */
400 if (dst->tp_first_run > src->tp_first_run && src->tp_first_run)
401 dst->tp_first_run = src->tp_first_run;
402
09809ecc 403 if (src->profile_id && !dst->profile_id)
404 dst->profile_id = src->profile_id;
df565eb6 405
e94feafd 406 /* Merging zero profile to dst is no-op. */
407 if (src->count.ipa () == profile_count::zero ())
408 return;
409
db9cef39 410 /* FIXME when we merge in unknown profile, we ought to set counts as
411 unsafe. */
151b9ff5 412 if (!src->count.initialized_p ()
413 || !(src->count.ipa () == src->count))
7e4b741c 414 return;
35ee1c66 415 if (symtab->dump_file)
7e4b741c 416 {
0e388735 417 fprintf (symtab->dump_file, "Merging profiles of %s to %s\n",
418 src->dump_name (), dst->dump_name ());
7e4b741c 419 }
b5e4c6ae 420 profile_count orig_count = dst->count;
421
151b9ff5 422 if (dst->count.initialized_p () && dst->count.ipa () == dst->count)
423 dst->count += src->count.ipa ();
424 else
425 dst->count = src->count.ipa ();
7e4b741c 426
427 /* This is ugly. We need to get both function bodies into memory.
428 If declaration is merged, we need to duplicate it to be able
429 to load body that is being replaced. This makes symbol table
430 temporarily inconsistent. */
02774f2d 431 if (src->decl == dst->decl)
7e4b741c 432 {
7e4b741c 433 struct lto_in_decl_state temp;
434 struct lto_in_decl_state *state;
435
436 /* We are going to move the decl, we want to remove its file decl data.
437 and link these with the new decl. */
02774f2d 438 temp.fn_decl = src->decl;
9f9f871f 439 lto_in_decl_state **slot
440 = src->lto_file_data->function_decl_states->find_slot (&temp,
441 NO_INSERT);
442 state = *slot;
443 src->lto_file_data->function_decl_states->clear_slot (slot);
7e4b741c 444 gcc_assert (state);
445
446 /* Duplicate the decl and be sure it does not link into body of DST. */
02774f2d 447 src->decl = copy_node (src->decl);
448 DECL_STRUCT_FUNCTION (src->decl) = NULL;
449 DECL_ARGUMENTS (src->decl) = NULL;
450 DECL_INITIAL (src->decl) = NULL;
451 DECL_RESULT (src->decl) = NULL;
7e4b741c 452
453 /* Associate the decl state with new declaration, so LTO streamer
454 can look it up. */
02774f2d 455 state->fn_decl = src->decl;
9f9f871f 456 slot
457 = src->lto_file_data->function_decl_states->find_slot (state, INSERT);
7e4b741c 458 gcc_assert (!*slot);
459 *slot = state;
460 }
549519dd 461 src->get_untransformed_body ();
462 dst->get_untransformed_body ();
02774f2d 463 srccfun = DECL_STRUCT_FUNCTION (src->decl);
464 dstcfun = DECL_STRUCT_FUNCTION (dst->decl);
a28770e1 465 if (n_basic_blocks_for_fn (srccfun)
466 != n_basic_blocks_for_fn (dstcfun))
7e4b741c 467 {
35ee1c66 468 if (symtab->dump_file)
469 fprintf (symtab->dump_file,
7e4b741c 470 "Giving up; number of basic block mismatch.\n");
471 match = false;
472 }
776b0663 473 else if (last_basic_block_for_fn (srccfun)
474 != last_basic_block_for_fn (dstcfun))
7e4b741c 475 {
35ee1c66 476 if (symtab->dump_file)
477 fprintf (symtab->dump_file,
7e4b741c 478 "Giving up; last block mismatch.\n");
479 match = false;
480 }
481 else
482 {
483 basic_block srcbb, dstbb;
484
485 FOR_ALL_BB_FN (srcbb, srccfun)
486 {
487 unsigned int i;
488
98e6ab47 489 dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index);
7e4b741c 490 if (dstbb == NULL)
491 {
35ee1c66 492 if (symtab->dump_file)
493 fprintf (symtab->dump_file,
7e4b741c 494 "No matching block for bb %i.\n",
495 srcbb->index);
496 match = false;
497 break;
498 }
499 if (EDGE_COUNT (srcbb->succs) != EDGE_COUNT (dstbb->succs))
500 {
35ee1c66 501 if (symtab->dump_file)
502 fprintf (symtab->dump_file,
7e4b741c 503 "Edge count mistmatch for bb %i.\n",
504 srcbb->index);
505 match = false;
506 break;
507 }
508 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
509 {
510 edge srce = EDGE_SUCC (srcbb, i);
511 edge dste = EDGE_SUCC (dstbb, i);
512 if (srce->dest->index != dste->dest->index)
513 {
35ee1c66 514 if (symtab->dump_file)
515 fprintf (symtab->dump_file,
7e4b741c 516 "Succ edge mistmatch for bb %i.\n",
517 srce->dest->index);
518 match = false;
519 break;
520 }
521 }
522 }
523 }
524 if (match)
525 {
69d99fc4 526 struct cgraph_edge *e, *e2;
7e4b741c 527 basic_block srcbb, dstbb;
528
529 /* TODO: merge also statement histograms. */
530 FOR_ALL_BB_FN (srcbb, srccfun)
531 {
532 unsigned int i;
533
98e6ab47 534 dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index);
205ce1aa 535
536 /* Either sum the profiles if both are IPA and not global0, or
537 pick more informative one (that is nonzero IPA if other is
538 uninitialized, guessed or global0). */
539 if (!dstbb->count.ipa ().initialized_p ()
540 || (dstbb->count.ipa () == profile_count::zero ()
541 && (srcbb->count.ipa ().initialized_p ()
542 && !(srcbb->count.ipa () == profile_count::zero ()))))
7e4b741c 543 {
ea5d3981 544 dstbb->count = srcbb->count;
545 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
546 {
547 edge srce = EDGE_SUCC (srcbb, i);
548 edge dste = EDGE_SUCC (dstbb, i);
549 if (srce->probability.initialized_p ())
550 dste->probability = srce->probability;
551 }
552 }
205ce1aa 553 else if (srcbb->count.ipa ().initialized_p ()
554 && !(srcbb->count.ipa () == profile_count::zero ()))
ea5d3981 555 {
556 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
557 {
558 edge srce = EDGE_SUCC (srcbb, i);
559 edge dste = EDGE_SUCC (dstbb, i);
560 dste->probability =
561 dste->probability * dstbb->count.probability_in (dstbb->count + srcbb->count)
562 + srce->probability * srcbb->count.probability_in (dstbb->count + srcbb->count);
563 }
564 dstbb->count += srcbb->count;
7e4b741c 565 }
566 }
567 push_cfun (dstcfun);
688b6bc6 568 update_max_bb_count ();
7e4b741c 569 compute_function_frequency ();
570 pop_cfun ();
571 for (e = dst->callees; e; e = e->next_callee)
572 {
69d99fc4 573 if (e->speculative)
574 continue;
151b9ff5 575 e->count = gimple_bb (e->call_stmt)->count;
7e4b741c 576 }
69d99fc4 577 for (e = dst->indirect_calls, e2 = src->indirect_calls; e;
578 e2 = (e2 ? e2->next_callee : NULL), e = e->next_callee)
7e4b741c 579 {
db9cef39 580 profile_count count = gimple_bb (e->call_stmt)->count;
69d99fc4 581 /* When call is speculative, we need to re-distribute probabilities
582 the same way as they was. This is not really correct because
583 in the other copy the speculation may differ; but probably it
584 is not really worth the effort. */
585 if (e->speculative)
586 {
587 cgraph_edge *direct, *indirect;
588 cgraph_edge *direct2 = NULL, *indirect2 = NULL;
589 ipa_ref *ref;
590
591 e->speculative_call_info (direct, indirect, ref);
592 gcc_assert (e == indirect);
593 if (e2 && e2->speculative)
594 e2->speculative_call_info (direct2, indirect2, ref);
db9cef39 595 if (indirect->count > profile_count::zero ()
596 || direct->count > profile_count::zero ())
69d99fc4 597 {
598 /* We should mismatch earlier if there is no matching
599 indirect edge. */
600 if (!e2)
601 {
602 if (dump_file)
603 fprintf (dump_file,
604 "Mismatch in merging indirect edges\n");
605 }
606 else if (!e2->speculative)
607 indirect->count += e2->count;
608 else if (e2->speculative)
609 {
610 if (DECL_ASSEMBLER_NAME (direct2->callee->decl)
611 != DECL_ASSEMBLER_NAME (direct->callee->decl))
612 {
613 if (direct2->count >= direct->count)
614 {
615 direct->redirect_callee (direct2->callee);
616 indirect->count += indirect2->count
617 + direct->count;
618 direct->count = direct2->count;
619 }
620 else
621 indirect->count += indirect2->count + direct2->count;
622 }
623 else
624 {
625 direct->count += direct2->count;
626 indirect->count += indirect2->count;
627 }
628 }
69d99fc4 629 }
630 else
631 /* At the moment we should have only profile feedback based
632 speculations when merging. */
633 gcc_unreachable ();
634 }
7b1e7168 635 else if (e2 && e2->speculative)
69d99fc4 636 {
637 cgraph_edge *direct, *indirect;
638 ipa_ref *ref;
639
640 e2->speculative_call_info (direct, indirect, ref);
151b9ff5 641 e->count = count;
642 e->make_speculative (direct->callee, direct->count);
69d99fc4 643 }
644 else
151b9ff5 645 e->count = count;
7e4b741c 646 }
96bf18ea 647 if (!preserve_body)
648 src->release_body ();
bff6533d 649 /* Update summary. */
b5e4c6ae 650 compute_fn_summary (dst, 0);
651 }
652 /* We can't update CFG profile, but we can scale IPA profile. CFG
653 will be scaled according to dst->count after IPA passes. */
654 else
655 {
656 profile_count to = dst->count;
657 profile_count::adjust_for_ipa_scaling (&to, &orig_count);
658 struct cgraph_edge *e;
659
660 for (e = dst->callees; e; e = e->next_callee)
661 e->count = e->count.apply_scale (to, orig_count);
662 for (e = dst->indirect_calls; e; e = e->next_callee)
663 e->count = e->count.apply_scale (to, orig_count);
7e4b741c 664 }
02774f2d 665 src->decl = oldsrcdecl;
7e4b741c 666}
667
e9de52cc 668/* Return true if call to DEST is known to be self-recusive call withing FUNC. */
669
670bool
671recursive_call_p (tree func, tree dest)
672{
415d1b9a 673 struct cgraph_node *dest_node = cgraph_node::get_create (dest);
674 struct cgraph_node *cnode = cgraph_node::get_create (func);
29b884ae 675 ipa_ref *alias;
676 enum availability avail;
677
678 gcc_assert (!cnode->alias);
679 if (cnode != dest_node->ultimate_alias_target (&avail))
680 return false;
681 if (avail >= AVAIL_AVAILABLE)
682 return true;
683 if (!dest_node->semantically_equivalent_p (cnode))
684 return false;
685 /* If there is only one way to call the fuction or we know all of them
686 are semantically equivalent, we still can consider call recursive. */
687 FOR_EACH_ALIAS (cnode, alias)
688 if (!dest_node->semantically_equivalent_p (alias->referring))
689 return false;
690 return true;
e9de52cc 691}