]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-utils.c
c++: Handle multiple aggregate overloads [PR95319].
[thirdparty/gcc.git] / gcc / ipa-utils.c
CommitLineData
ea900239 1/* Utilities for ipa analysis.
8d9254fc 2 Copyright (C) 2005-2020 Free Software Foundation, Inc.
ea900239
DB
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9dcd6f09 9Software Foundation; either version 3, or (at your option) any later
ea900239
DB
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
ea900239
DB
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2
AM
24#include "backend.h"
25#include "tree.h"
26#include "gimple.h"
957060b5
AM
27#include "predict.h"
28#include "alloc-pool.h"
29#include "cgraph.h"
30#include "lto-streamer.h"
7ee2468b 31#include "dumpfile.h"
ea264ca5 32#include "splay-tree.h"
ea900239 33#include "ipa-utils.h"
dd912cb8 34#include "symbol-summary.h"
8bc5448f 35#include "tree-vrp.h"
c582198b 36#include "ipa-prop.h"
27d020cf 37#include "ipa-fnsummary.h"
ea900239
DB
38
39/* Debugging function for postorder and inorder code. NOTE is a string
40 that is printed before the nodes are printed. ORDER is an array of
41 cgraph_nodes that has COUNT useful nodes in it. */
42
b8698a0f 43void
af8bca3c
MJ
44ipa_print_order (FILE* out,
45 const char * note,
46 struct cgraph_node** order,
47 int count)
ea900239
DB
48{
49 int i;
50 fprintf (out, "\n\n ordered call graph: %s\n", note);
b8698a0f 51
ea900239 52 for (i = count - 1; i >= 0; i--)
d52f5295 53 order[i]->dump (out);
ea900239 54 fprintf (out, "\n");
c3284718 55 fflush (out);
ea900239
DB
56}
57
d52f5295 58
ea900239
DB
59struct searchc_env {
60 struct cgraph_node **stack;
ea900239 61 struct cgraph_node **result;
34e82342 62 int stack_size;
ea900239
DB
63 int order_pos;
64 splay_tree nodes_marked_new;
65 bool reduce;
66 int count;
67};
68
69/* This is an implementation of Tarjan's strongly connected region
70 finder as reprinted in Aho Hopcraft and Ullman's The Design and
71 Analysis of Computer Programs (1975) pages 192-193. This version
72 has been customized for cgraph_nodes. The env parameter is because
73 it is recursive and there are no nested functions here. This
74 function should only be called from itself or
af8bca3c 75 ipa_reduced_postorder. ENV is a stack env and would be
ea900239
DB
76 unnecessary if C had nested functions. V is the node to start
77 searching from. */
78
79static void
2505c5ed
JH
80searchc (struct searchc_env* env, struct cgraph_node *v,
81 bool (*ignore_edge) (struct cgraph_edge *))
ea900239
DB
82{
83 struct cgraph_edge *edge;
67348ccc 84 struct ipa_dfs_info *v_info = (struct ipa_dfs_info *) v->aux;
b8698a0f 85
ea900239 86 /* mark node as old */
c5274326 87 v_info->new_node = false;
4325656f 88 splay_tree_remove (env->nodes_marked_new, v->get_uid ());
b8698a0f 89
ea900239
DB
90 v_info->dfn_number = env->count;
91 v_info->low_link = env->count;
92 env->count++;
93 env->stack[(env->stack_size)++] = v;
94 v_info->on_stack = true;
b8698a0f 95
ea900239
DB
96 for (edge = v->callees; edge; edge = edge->next_callee)
97 {
98 struct ipa_dfs_info * w_info;
fede8efa 99 enum availability avail;
d52f5295 100 struct cgraph_node *w = edge->callee->ultimate_alias_target (&avail);
e2c9111c 101
fede8efa 102 if (!w || (ignore_edge && ignore_edge (edge)))
2505c5ed
JH
103 continue;
104
67348ccc 105 if (w->aux
97e59627 106 && (avail >= AVAIL_INTERPOSABLE))
ea900239 107 {
67348ccc 108 w_info = (struct ipa_dfs_info *) w->aux;
b8698a0f 109 if (w_info->new_node)
ea900239 110 {
2505c5ed 111 searchc (env, w, ignore_edge);
ea900239
DB
112 v_info->low_link =
113 (v_info->low_link < w_info->low_link) ?
114 v_info->low_link : w_info->low_link;
b8698a0f
L
115 }
116 else
117 if ((w_info->dfn_number < v_info->dfn_number)
118 && (w_info->on_stack))
ea900239
DB
119 v_info->low_link =
120 (w_info->dfn_number < v_info->low_link) ?
121 w_info->dfn_number : v_info->low_link;
122 }
123 }
124
125
b8698a0f 126 if (v_info->low_link == v_info->dfn_number)
ea900239
DB
127 {
128 struct cgraph_node *last = NULL;
129 struct cgraph_node *x;
130 struct ipa_dfs_info *x_info;
131 do {
132 x = env->stack[--(env->stack_size)];
67348ccc 133 x_info = (struct ipa_dfs_info *) x->aux;
ea900239 134 x_info->on_stack = false;
11026b51 135 x_info->scc_no = v_info->dfn_number;
b8698a0f
L
136
137 if (env->reduce)
ea900239
DB
138 {
139 x_info->next_cycle = last;
140 last = x;
b8698a0f
L
141 }
142 else
ea900239 143 env->result[env->order_pos++] = x;
b8698a0f 144 }
ea900239 145 while (v != x);
b8698a0f 146 if (env->reduce)
ea900239
DB
147 env->result[env->order_pos++] = v;
148 }
149}
150
151/* Topsort the call graph by caller relation. Put the result in ORDER.
152
df92c640
SB
153 The REDUCE flag is true if you want the cycles reduced to single nodes.
154 You can use ipa_get_nodes_in_cycle to obtain a vector containing all real
155 call graph nodes in a reduced node.
156
157 Set ALLOW_OVERWRITABLE if nodes with such availability should be included.
af8bca3c
MJ
158 IGNORE_EDGE, if non-NULL is a hook that may make some edges insignificant
159 for the topological sort. */
ea900239
DB
160
161int
af8bca3c 162ipa_reduced_postorder (struct cgraph_node **order,
45272fd2 163 bool reduce,
af8bca3c 164 bool (*ignore_edge) (struct cgraph_edge *))
ea900239
DB
165{
166 struct cgraph_node *node;
167 struct searchc_env env;
168 splay_tree_node result;
3dafb85c 169 env.stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
ea900239
DB
170 env.stack_size = 0;
171 env.result = order;
172 env.order_pos = 0;
173 env.nodes_marked_new = splay_tree_new (splay_tree_compare_ints, 0, 0);
174 env.count = 1;
175 env.reduce = reduce;
b8698a0f 176
65c70e6b 177 FOR_EACH_DEFINED_FUNCTION (node)
e2c9111c 178 {
d52f5295 179 enum availability avail = node->get_availability ();
e2c9111c 180
d52f5295 181 if (avail > AVAIL_INTERPOSABLE
45272fd2 182 || avail == AVAIL_INTERPOSABLE)
e2c9111c
JH
183 {
184 /* Reuse the info if it is already there. */
67348ccc 185 struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->aux;
e2c9111c
JH
186 if (!info)
187 info = XCNEW (struct ipa_dfs_info);
188 info->new_node = true;
189 info->on_stack = false;
190 info->next_cycle = NULL;
67348ccc 191 node->aux = info;
b8698a0f 192
e2c9111c 193 splay_tree_insert (env.nodes_marked_new,
4325656f 194 (splay_tree_key)node->get_uid (),
e2c9111c 195 (splay_tree_value)node);
b8698a0f
L
196 }
197 else
67348ccc 198 node->aux = NULL;
e2c9111c 199 }
ea900239
DB
200 result = splay_tree_min (env.nodes_marked_new);
201 while (result)
202 {
203 node = (struct cgraph_node *)result->value;
2505c5ed 204 searchc (&env, node, ignore_edge);
ea900239
DB
205 result = splay_tree_min (env.nodes_marked_new);
206 }
207 splay_tree_delete (env.nodes_marked_new);
208 free (env.stack);
209
210 return env.order_pos;
211}
212
af8bca3c
MJ
213/* Deallocate all ipa_dfs_info structures pointed to by the aux pointer of call
214 graph nodes. */
215
216void
217ipa_free_postorder_info (void)
218{
219 struct cgraph_node *node;
65c70e6b 220 FOR_EACH_DEFINED_FUNCTION (node)
af8bca3c
MJ
221 {
222 /* Get rid of the aux information. */
67348ccc 223 if (node->aux)
af8bca3c 224 {
67348ccc
DM
225 free (node->aux);
226 node->aux = NULL;
af8bca3c
MJ
227 }
228 }
229}
230
df92c640
SB
231/* Get the set of nodes for the cycle in the reduced call graph starting
232 from NODE. */
233
d52f5295 234vec<cgraph_node *>
df92c640
SB
235ipa_get_nodes_in_cycle (struct cgraph_node *node)
236{
d52f5295 237 vec<cgraph_node *> v = vNULL;
df92c640
SB
238 struct ipa_dfs_info *node_dfs_info;
239 while (node)
240 {
9771b263 241 v.safe_push (node);
67348ccc 242 node_dfs_info = (struct ipa_dfs_info *) node->aux;
df92c640
SB
243 node = node_dfs_info->next_cycle;
244 }
245 return v;
246}
247
4cb13597
MJ
248/* Return true iff the CS is an edge within a strongly connected component as
249 computed by ipa_reduced_postorder. */
250
251bool
252ipa_edge_within_scc (struct cgraph_edge *cs)
253{
67348ccc 254 struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->aux;
4cb13597 255 struct ipa_dfs_info *callee_dfs;
d52f5295 256 struct cgraph_node *callee = cs->callee->function_symbol ();
4cb13597 257
67348ccc 258 callee_dfs = (struct ipa_dfs_info *) callee->aux;
4cb13597
MJ
259 return (caller_dfs
260 && callee_dfs
261 && caller_dfs->scc_no == callee_dfs->scc_no);
262}
263
8775a18b
JH
264struct postorder_stack
265{
266 struct cgraph_node *node;
267 struct cgraph_edge *edge;
268 int ref;
269};
270
af8bca3c 271/* Fill array order with all nodes with output flag set in the reverse
39e2db00
JH
272 topological order. Return the number of elements in the array.
273 FIXME: While walking, consider aliases, too. */
af8bca3c
MJ
274
275int
276ipa_reverse_postorder (struct cgraph_node **order)
277{
278 struct cgraph_node *node, *node2;
279 int stack_size = 0;
280 int order_pos = 0;
8775a18b 281 struct cgraph_edge *edge;
af8bca3c 282 int pass;
d122681a 283 struct ipa_ref *ref = NULL;
af8bca3c 284
8775a18b 285 struct postorder_stack *stack =
3dafb85c 286 XCNEWVEC (struct postorder_stack, symtab->cgraph_count);
af8bca3c
MJ
287
288 /* We have to deal with cycles nicely, so use a depth first traversal
289 output algorithm. Ignore the fact that some functions won't need
290 to be output and put them into order as well, so we get dependencies
291 right through inline functions. */
65c70e6b 292 FOR_EACH_FUNCTION (node)
67348ccc 293 node->aux = NULL;
af8bca3c 294 for (pass = 0; pass < 2; pass++)
65c70e6b 295 FOR_EACH_FUNCTION (node)
67348ccc 296 if (!node->aux
af8bca3c 297 && (pass
67348ccc 298 || (!node->address_taken
a62bfab5 299 && !node->inlined_to
67348ccc 300 && !node->alias && !node->thunk.thunk_p
d52f5295 301 && !node->only_called_directly_p ())))
af8bca3c 302 {
8775a18b
JH
303 stack_size = 0;
304 stack[stack_size].node = node;
305 stack[stack_size].edge = node->callers;
306 stack[stack_size].ref = 0;
67348ccc 307 node->aux = (void *)(size_t)1;
8775a18b 308 while (stack_size >= 0)
af8bca3c 309 {
8775a18b 310 while (true)
af8bca3c 311 {
8775a18b
JH
312 node2 = NULL;
313 while (stack[stack_size].edge && !node2)
af8bca3c 314 {
8775a18b 315 edge = stack[stack_size].edge;
af8bca3c 316 node2 = edge->caller;
8775a18b
JH
317 stack[stack_size].edge = edge->next_caller;
318 /* Break possible cycles involving always-inline
319 functions by ignoring edges from always-inline
320 functions to non-always-inline functions. */
67348ccc 321 if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->decl)
8775a18b 322 && !DECL_DISREGARD_INLINE_LIMITS
d52f5295 323 (edge->callee->function_symbol ()->decl))
8775a18b
JH
324 node2 = NULL;
325 }
d122681a 326 for (; stack[stack_size].node->iterate_referring (
8775a18b
JH
327 stack[stack_size].ref,
328 ref) && !node2;
329 stack[stack_size].ref++)
330 {
331 if (ref->use == IPA_REF_ALIAS)
d122681a 332 node2 = dyn_cast <cgraph_node *> (ref->referring);
8775a18b
JH
333 }
334 if (!node2)
335 break;
67348ccc 336 if (!node2->aux)
8775a18b
JH
337 {
338 stack[++stack_size].node = node2;
339 stack[stack_size].edge = node2->callers;
340 stack[stack_size].ref = 0;
67348ccc 341 node2->aux = (void *)(size_t)1;
af8bca3c
MJ
342 }
343 }
8775a18b 344 order[order_pos++] = stack[stack_size--].node;
af8bca3c
MJ
345 }
346 }
347 free (stack);
65c70e6b 348 FOR_EACH_FUNCTION (node)
67348ccc 349 node->aux = NULL;
af8bca3c
MJ
350 return order_pos;
351}
352
353
ea900239
DB
354
355/* Given a memory reference T, will return the variable at the bottom
073a8998 356 of the access. Unlike get_base_address, this will recurse through
ea900239
DB
357 INDIRECT_REFS. */
358
359tree
360get_base_var (tree t)
361{
b8698a0f 362 while (!SSA_VAR_P (t)
ea900239
DB
363 && (!CONSTANT_CLASS_P (t))
364 && TREE_CODE (t) != LABEL_DECL
365 && TREE_CODE (t) != FUNCTION_DECL
3baf459d
DN
366 && TREE_CODE (t) != CONST_DECL
367 && TREE_CODE (t) != CONSTRUCTOR)
ea900239
DB
368 {
369 t = TREE_OPERAND (t, 0);
370 }
371 return t;
b8698a0f 372}
ea900239 373
8ae4d429
JH
374/* Scale function of calls in NODE by ratio ORIG_COUNT/NODE->count. */
375
376void
377scale_ipa_profile_for_fn (struct cgraph_node *node, profile_count orig_count)
378{
379 profile_count to = node->count;
380 profile_count::adjust_for_ipa_scaling (&to, &orig_count);
381 struct cgraph_edge *e;
382
383 for (e = node->callees; e; e = e->next_callee)
384 e->count = e->count.apply_scale (to, orig_count);
385 for (e = node->indirect_calls; e; e = e->next_callee)
386 e->count = e->count.apply_scale (to, orig_count);
387}
1cb1a99f 388
4843f032 389/* SRC and DST are going to be merged. Take SRC's profile and merge it into
b730d1c9
JH
390 DST so it is not going to be lost. Possibly destroy SRC's body on the way
391 unless PRESERVE_BODY is set. */
4843f032
JH
392
393void
394ipa_merge_profiles (struct cgraph_node *dst,
b730d1c9
JH
395 struct cgraph_node *src,
396 bool preserve_body)
4843f032 397{
67348ccc 398 tree oldsrcdecl = src->decl;
4843f032
JH
399 struct function *srccfun, *dstcfun;
400 bool match = true;
eb081fd0 401 bool copy_counts = false;
4843f032 402
67348ccc
DM
403 if (!src->definition
404 || !dst->definition)
4843f032 405 return;
959b8c82 406
4843f032
JH
407 if (src->frequency < dst->frequency)
408 src->frequency = dst->frequency;
9cec31f4
ML
409
410 /* Time profiles are merged. */
411 if (dst->tp_first_run > src->tp_first_run && src->tp_first_run)
412 dst->tp_first_run = src->tp_first_run;
413
fd29c024
ML
414 if (src->profile_id && !dst->profile_id)
415 dst->profile_id = src->profile_id;
cb90235d 416
6263c29d
JH
417 /* Merging zero profile to dst is no-op. */
418 if (src->count.ipa () == profile_count::zero ())
419 return;
420
3995f3a2
JH
421 /* FIXME when we merge in unknown profile, we ought to set counts as
422 unsafe. */
1bad9c18
JH
423 if (!src->count.initialized_p ()
424 || !(src->count.ipa () == src->count))
4843f032 425 return;
959b8c82
JH
426 profile_count orig_count = dst->count;
427
eb081fd0
JH
428 /* Either sum the profiles if both are IPA and not global0, or
429 pick more informative one (that is nonzero IPA if other is
430 uninitialized, guessed or global0). */
431
432 if ((dst->count.ipa ().nonzero_p ()
433 || src->count.ipa ().nonzero_p ())
434 && dst->count.ipa ().initialized_p ()
435 && src->count.ipa ().initialized_p ())
436 dst->count = dst->count.ipa () + src->count.ipa ();
437 else if (dst->count.ipa ().initialized_p ())
438 ;
439 else if (src->count.ipa ().initialized_p ())
440 {
441 copy_counts = true;
442 dst->count = src->count.ipa ();
443 }
444
445 /* If no updating needed return early. */
446 if (dst->count == orig_count)
447 return;
4843f032 448
e44deb43
JH
449 if (symtab->dump_file)
450 {
451 fprintf (symtab->dump_file, "Merging profiles of %s count:",
452 src->dump_name ());
453 src->count.dump (symtab->dump_file);
454 fprintf (symtab->dump_file, " to %s count:",
455 dst->dump_name ());
456 orig_count.dump (symtab->dump_file);
457 fprintf (symtab->dump_file, " resulting count:");
458 dst->count.dump (symtab->dump_file);
459 fprintf (symtab->dump_file, "\n");
460 }
461
8ae4d429
JH
462 /* First handle functions with no gimple body. */
463 if (dst->thunk.thunk_p || dst->alias
464 || src->thunk.thunk_p || src->alias)
465 {
466 scale_ipa_profile_for_fn (dst, orig_count);
467 return;
468 }
469
4843f032
JH
470 /* This is ugly. We need to get both function bodies into memory.
471 If declaration is merged, we need to duplicate it to be able
472 to load body that is being replaced. This makes symbol table
473 temporarily inconsistent. */
67348ccc 474 if (src->decl == dst->decl)
4843f032 475 {
4843f032
JH
476 struct lto_in_decl_state temp;
477 struct lto_in_decl_state *state;
478
479 /* We are going to move the decl, we want to remove its file decl data.
480 and link these with the new decl. */
67348ccc 481 temp.fn_decl = src->decl;
907dadbd
TS
482 lto_in_decl_state **slot
483 = src->lto_file_data->function_decl_states->find_slot (&temp,
484 NO_INSERT);
485 state = *slot;
486 src->lto_file_data->function_decl_states->clear_slot (slot);
4843f032
JH
487 gcc_assert (state);
488
489 /* Duplicate the decl and be sure it does not link into body of DST. */
67348ccc
DM
490 src->decl = copy_node (src->decl);
491 DECL_STRUCT_FUNCTION (src->decl) = NULL;
492 DECL_ARGUMENTS (src->decl) = NULL;
493 DECL_INITIAL (src->decl) = NULL;
494 DECL_RESULT (src->decl) = NULL;
4843f032
JH
495
496 /* Associate the decl state with new declaration, so LTO streamer
497 can look it up. */
67348ccc 498 state->fn_decl = src->decl;
907dadbd
TS
499 slot
500 = src->lto_file_data->function_decl_states->find_slot (state, INSERT);
4843f032
JH
501 gcc_assert (!*slot);
502 *slot = state;
503 }
e3bde69a
JH
504 src->get_untransformed_body ();
505 dst->get_untransformed_body ();
67348ccc
DM
506 srccfun = DECL_STRUCT_FUNCTION (src->decl);
507 dstcfun = DECL_STRUCT_FUNCTION (dst->decl);
0cae8d31
DM
508 if (n_basic_blocks_for_fn (srccfun)
509 != n_basic_blocks_for_fn (dstcfun))
4843f032 510 {
3dafb85c
ML
511 if (symtab->dump_file)
512 fprintf (symtab->dump_file,
4843f032
JH
513 "Giving up; number of basic block mismatch.\n");
514 match = false;
515 }
3986e690
DM
516 else if (last_basic_block_for_fn (srccfun)
517 != last_basic_block_for_fn (dstcfun))
4843f032 518 {
3dafb85c
ML
519 if (symtab->dump_file)
520 fprintf (symtab->dump_file,
4843f032
JH
521 "Giving up; last block mismatch.\n");
522 match = false;
523 }
524 else
525 {
526 basic_block srcbb, dstbb;
e44deb43 527 struct cgraph_edge *e, *e2;
4843f032 528
e44deb43
JH
529 for (e = dst->callees, e2 = src->callees; e && e2 && match;
530 e2 = e2->next_callee, e = e->next_callee)
4843f032 531 {
e44deb43
JH
532 if (gimple_bb (e->call_stmt)->index
533 != gimple_bb (e2->call_stmt)->index)
4843f032 534 {
3dafb85c
ML
535 if (symtab->dump_file)
536 fprintf (symtab->dump_file,
e44deb43 537 "Giving up; call stmt mismatch.\n");
4843f032 538 match = false;
4843f032 539 }
e44deb43
JH
540 }
541 if (e || e2)
542 {
543 if (symtab->dump_file)
544 fprintf (symtab->dump_file,
545 "Giving up; number of calls differs.\n");
546 match = false;
547 }
548 for (e = dst->indirect_calls, e2 = src->indirect_calls; e && e2 && match;
549 e2 = e2->next_callee, e = e->next_callee)
550 {
551 if (gimple_bb (e->call_stmt)->index
552 != gimple_bb (e2->call_stmt)->index)
4843f032 553 {
3dafb85c
ML
554 if (symtab->dump_file)
555 fprintf (symtab->dump_file,
e44deb43 556 "Giving up; indirect call stmt mismatch.\n");
4843f032 557 match = false;
4843f032
JH
558 }
559 }
e44deb43
JH
560 if (e || e2)
561 {
562 if (symtab->dump_file)
563 fprintf (symtab->dump_file,
564 "Giving up; number of indirect calls differs.\n");
565 match=false;
566 }
567
568 if (match)
569 FOR_ALL_BB_FN (srcbb, srccfun)
570 {
571 unsigned int i;
572
573 dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index);
574 if (dstbb == NULL)
575 {
576 if (symtab->dump_file)
577 fprintf (symtab->dump_file,
578 "No matching block for bb %i.\n",
579 srcbb->index);
580 match = false;
581 break;
582 }
583 if (EDGE_COUNT (srcbb->succs) != EDGE_COUNT (dstbb->succs))
584 {
585 if (symtab->dump_file)
586 fprintf (symtab->dump_file,
587 "Edge count mismatch for bb %i.\n",
588 srcbb->index);
589 match = false;
590 break;
591 }
592 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
593 {
594 edge srce = EDGE_SUCC (srcbb, i);
595 edge dste = EDGE_SUCC (dstbb, i);
596 if (srce->dest->index != dste->dest->index)
597 {
598 if (symtab->dump_file)
599 fprintf (symtab->dump_file,
600 "Succ edge mismatch for bb %i.\n",
601 srce->dest->index);
602 match = false;
603 break;
604 }
605 }
606 }
4843f032
JH
607 }
608 if (match)
609 {
befb1f36 610 struct cgraph_edge *e, *e2;
4843f032
JH
611 basic_block srcbb, dstbb;
612
eb081fd0
JH
613 /* Function and global profile may be out of sync. First scale it same
614 way as fixup_cfg would. */
615 profile_count srcnum = src->count;
616 profile_count srcden = ENTRY_BLOCK_PTR_FOR_FN (srccfun)->count;
617 bool srcscale = srcnum.initialized_p () && !(srcnum == srcden);
618 profile_count dstnum = orig_count;
619 profile_count dstden = ENTRY_BLOCK_PTR_FOR_FN (dstcfun)->count;
620 bool dstscale = !copy_counts
621 && dstnum.initialized_p () && !(dstnum == dstden);
622
4843f032
JH
623 /* TODO: merge also statement histograms. */
624 FOR_ALL_BB_FN (srcbb, srccfun)
625 {
626 unsigned int i;
627
bbd79259 628 dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index);
e7a74006 629
eb081fd0
JH
630 profile_count srccount = srcbb->count;
631 if (srcscale)
632 srccount = srccount.apply_scale (srcnum, srcden);
633 if (dstscale)
634 dstbb->count = dstbb->count.apply_scale (dstnum, dstden);
635
636 if (copy_counts)
4843f032 637 {
eb081fd0 638 dstbb->count = srccount;
ef30ab83
JH
639 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
640 {
641 edge srce = EDGE_SUCC (srcbb, i);
642 edge dste = EDGE_SUCC (dstbb, i);
643 if (srce->probability.initialized_p ())
644 dste->probability = srce->probability;
645 }
646 }
eb081fd0 647 else
ef30ab83
JH
648 {
649 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
650 {
651 edge srce = EDGE_SUCC (srcbb, i);
652 edge dste = EDGE_SUCC (dstbb, i);
653 dste->probability =
eb081fd0
JH
654 dste->probability * dstbb->count.ipa ().probability_in
655 (dstbb->count.ipa ()
656 + srccount.ipa ())
657 + srce->probability * srcbb->count.ipa ().probability_in
658 (dstbb->count.ipa ()
659 + srccount.ipa ());
ef30ab83 660 }
eb081fd0 661 dstbb->count = dstbb->count.ipa () + srccount.ipa ();
4843f032
JH
662 }
663 }
664 push_cfun (dstcfun);
fc06ae0d 665 update_max_bb_count ();
4843f032
JH
666 compute_function_frequency ();
667 pop_cfun ();
668 for (e = dst->callees; e; e = e->next_callee)
669 {
befb1f36
JH
670 if (e->speculative)
671 continue;
1bad9c18 672 e->count = gimple_bb (e->call_stmt)->count;
4843f032 673 }
befb1f36
JH
674 for (e = dst->indirect_calls, e2 = src->indirect_calls; e;
675 e2 = (e2 ? e2->next_callee : NULL), e = e->next_callee)
4843f032 676 {
845bb366 677 if (!e->speculative && !e2->speculative)
e44deb43 678 {
845bb366
JH
679 /* FIXME: we need to also merge ipa-profile histograms
680 because with LTO merging happens from lto-symtab before
681 these are converted to indirect edges. */
682 e->count = gimple_bb (e->call_stmt)->count;
683 continue;
e44deb43 684 }
845bb366
JH
685
686 /* When copying just remove all speuclations on dst and then copy
687 one from src. */
688 if (copy_counts)
e44deb43 689 {
845bb366
JH
690 while (e->speculative)
691 cgraph_edge::resolve_speculation (e, NULL);
692 e->count = gimple_bb (e->call_stmt)->count;
693 if (e2->speculative)
e44deb43 694 {
845bb366
JH
695 for (cgraph_edge *e3 = e2->first_speculative_call_target ();
696 e3;
697 e3 = e3->next_speculative_call_target ())
f1ba88b1 698 {
845bb366
JH
699 cgraph_edge *ns;
700 ns = e->make_speculative
701 (dyn_cast <cgraph_node *>
702 (e3->speculative_call_target_ref ()->referred),
703 e3->count, e3->speculative_id);
704 /* Target may differ from ref (for example it may be
705 redirected to local alias. */
706 ns->redirect_callee (e3->callee);
f1ba88b1 707 }
e44deb43 708 }
845bb366 709 continue;
e44deb43
JH
710 }
711
845bb366
JH
712 /* Iterate all speculations in SRC, see if corresponding ones exist
713 int DST and if so, sum the counts. Otherwise create new
714 speculation. */
715 int max_spec = 0;
716 for (cgraph_edge *e3 = e->first_speculative_call_target ();
717 e3;
718 e3 = e3->next_speculative_call_target ())
719 if (e3->speculative_id > max_spec)
720 max_spec = e3->speculative_id;
721 for (cgraph_edge *e3 = e2->first_speculative_call_target ();
722 e3;
723 e3 = e3->next_speculative_call_target ())
befb1f36 724 {
845bb366
JH
725 cgraph_edge *te
726 = e->speculative_call_for_target
727 (dyn_cast <cgraph_node *>
728 (e3->speculative_call_target_ref ()->referred));
729 if (te)
730 te->count = te->count + e3->count;
731 else
befb1f36 732 {
845bb366
JH
733 e->count = e->count + e3->count;
734 cgraph_edge *ns;
735 ns = e->make_speculative
736 (dyn_cast <cgraph_node *>
737 (e3->speculative_call_target_ref ()
738 ->referred),
739 e3->count,
740 e3->speculative_id + max_spec + 1);
741 /* Target may differ from ref (for example it may be
742 redirected to local alias. */
743 ns->redirect_callee (e3->callee);
befb1f36 744 }
befb1f36 745 }
4843f032 746 }
b730d1c9
JH
747 if (!preserve_body)
748 src->release_body ();
61e8dc4b 749 /* Update summary. */
959b8c82
JH
750 compute_fn_summary (dst, 0);
751 }
752 /* We can't update CFG profile, but we can scale IPA profile. CFG
753 will be scaled according to dst->count after IPA passes. */
754 else
8ae4d429 755 scale_ipa_profile_for_fn (dst, orig_count);
67348ccc 756 src->decl = oldsrcdecl;
4843f032
JH
757}
758
845bb366
JH
759/* Return true if call to DEST is known to be self-recusive
760 call withing FUNC. */
fc11f321
JH
761
762bool
763recursive_call_p (tree func, tree dest)
764{
d52f5295
ML
765 struct cgraph_node *dest_node = cgraph_node::get_create (dest);
766 struct cgraph_node *cnode = cgraph_node::get_create (func);
acbbac04
JH
767 ipa_ref *alias;
768 enum availability avail;
769
770 gcc_assert (!cnode->alias);
771 if (cnode != dest_node->ultimate_alias_target (&avail))
772 return false;
773 if (avail >= AVAIL_AVAILABLE)
774 return true;
775 if (!dest_node->semantically_equivalent_p (cnode))
776 return false;
777 /* If there is only one way to call the fuction or we know all of them
778 are semantically equivalent, we still can consider call recursive. */
779 FOR_EACH_ALIAS (cnode, alias)
780 if (!dest_node->semantically_equivalent_p (alias->referring))
781 return false;
782 return true;
fc11f321 783}