]>
Commit | Line | Data |
---|---|---|
f7d118a9 | 1 | /* Utilities for ipa analysis. |
3aea1f79 | 2 | Copyright (C) 2005-2014 Free Software Foundation, Inc. |
f7d118a9 | 3 | Contributed by Kenneth Zadeck <zadeck@naturalbridge.com> |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 9 | Software Foundation; either version 3, or (at your option) any later |
f7d118a9 | 10 | version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
f7d118a9 | 20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
25 | #include "tree.h" | |
bc61cadb | 26 | #include "basic-block.h" |
27 | #include "tree-ssa-alias.h" | |
28 | #include "internal-fn.h" | |
29 | #include "gimple-expr.h" | |
30 | #include "is-a.h" | |
b23fb4cb | 31 | #include "gimple.h" |
f7d118a9 | 32 | #include "tree-inline.h" |
b9ed1410 | 33 | #include "dumpfile.h" |
f7d118a9 | 34 | #include "langhooks.h" |
5863771b | 35 | #include "splay-tree.h" |
f7d118a9 | 36 | #include "ipa-utils.h" |
37 | #include "ipa-reference.h" | |
f7d118a9 | 38 | #include "flags.h" |
f7d118a9 | 39 | #include "diagnostic.h" |
40 | #include "langhooks.h" | |
7e4b741c | 41 | #include "lto-streamer.h" |
42 | #include "ipa-inline.h" | |
f7d118a9 | 43 | |
44 | /* Debugging function for postorder and inorder code. NOTE is a string | |
45 | that is printed before the nodes are printed. ORDER is an array of | |
46 | cgraph_nodes that has COUNT useful nodes in it. */ | |
47 | ||
48e1416a | 48 | void |
7771d558 | 49 | ipa_print_order (FILE* out, |
50 | const char * note, | |
51 | struct cgraph_node** order, | |
52 | int count) | |
f7d118a9 | 53 | { |
54 | int i; | |
55 | fprintf (out, "\n\n ordered call graph: %s\n", note); | |
48e1416a | 56 | |
f7d118a9 | 57 | for (i = count - 1; i >= 0; i--) |
415d1b9a | 58 | order[i]->dump (out); |
f7d118a9 | 59 | fprintf (out, "\n"); |
9af5ce0c | 60 | fflush (out); |
f7d118a9 | 61 | } |
62 | ||
415d1b9a | 63 | |
f7d118a9 | 64 | struct searchc_env { |
65 | struct cgraph_node **stack; | |
66 | int stack_size; | |
67 | struct cgraph_node **result; | |
68 | int order_pos; | |
69 | splay_tree nodes_marked_new; | |
70 | bool reduce; | |
c336a49e | 71 | bool allow_overwritable; |
f7d118a9 | 72 | int count; |
73 | }; | |
74 | ||
75 | /* This is an implementation of Tarjan's strongly connected region | |
76 | finder as reprinted in Aho Hopcraft and Ullman's The Design and | |
77 | Analysis of Computer Programs (1975) pages 192-193. This version | |
78 | has been customized for cgraph_nodes. The env parameter is because | |
79 | it is recursive and there are no nested functions here. This | |
80 | function should only be called from itself or | |
7771d558 | 81 | ipa_reduced_postorder. ENV is a stack env and would be |
f7d118a9 | 82 | unnecessary if C had nested functions. V is the node to start |
83 | searching from. */ | |
84 | ||
85 | static void | |
17b28e52 | 86 | searchc (struct searchc_env* env, struct cgraph_node *v, |
87 | bool (*ignore_edge) (struct cgraph_edge *)) | |
f7d118a9 | 88 | { |
89 | struct cgraph_edge *edge; | |
02774f2d | 90 | struct ipa_dfs_info *v_info = (struct ipa_dfs_info *) v->aux; |
48e1416a | 91 | |
f7d118a9 | 92 | /* mark node as old */ |
cda6870f | 93 | v_info->new_node = false; |
f7d118a9 | 94 | splay_tree_remove (env->nodes_marked_new, v->uid); |
48e1416a | 95 | |
f7d118a9 | 96 | v_info->dfn_number = env->count; |
97 | v_info->low_link = env->count; | |
98 | env->count++; | |
99 | env->stack[(env->stack_size)++] = v; | |
100 | v_info->on_stack = true; | |
48e1416a | 101 | |
f7d118a9 | 102 | for (edge = v->callees; edge; edge = edge->next_callee) |
103 | { | |
104 | struct ipa_dfs_info * w_info; | |
b2c2e188 | 105 | enum availability avail; |
415d1b9a | 106 | struct cgraph_node *w = edge->callee->ultimate_alias_target (&avail); |
86844d6c | 107 | |
b2c2e188 | 108 | if (!w || (ignore_edge && ignore_edge (edge))) |
17b28e52 | 109 | continue; |
110 | ||
02774f2d | 111 | if (w->aux |
415d1b9a | 112 | && (avail > AVAIL_INTERPOSABLE |
113 | || (env->allow_overwritable && avail == AVAIL_INTERPOSABLE))) | |
f7d118a9 | 114 | { |
02774f2d | 115 | w_info = (struct ipa_dfs_info *) w->aux; |
48e1416a | 116 | if (w_info->new_node) |
f7d118a9 | 117 | { |
17b28e52 | 118 | searchc (env, w, ignore_edge); |
f7d118a9 | 119 | v_info->low_link = |
120 | (v_info->low_link < w_info->low_link) ? | |
121 | v_info->low_link : w_info->low_link; | |
48e1416a | 122 | } |
123 | else | |
124 | if ((w_info->dfn_number < v_info->dfn_number) | |
125 | && (w_info->on_stack)) | |
f7d118a9 | 126 | v_info->low_link = |
127 | (w_info->dfn_number < v_info->low_link) ? | |
128 | w_info->dfn_number : v_info->low_link; | |
129 | } | |
130 | } | |
131 | ||
132 | ||
48e1416a | 133 | if (v_info->low_link == v_info->dfn_number) |
f7d118a9 | 134 | { |
135 | struct cgraph_node *last = NULL; | |
136 | struct cgraph_node *x; | |
137 | struct ipa_dfs_info *x_info; | |
138 | do { | |
139 | x = env->stack[--(env->stack_size)]; | |
02774f2d | 140 | x_info = (struct ipa_dfs_info *) x->aux; |
f7d118a9 | 141 | x_info->on_stack = false; |
572635a5 | 142 | x_info->scc_no = v_info->dfn_number; |
48e1416a | 143 | |
144 | if (env->reduce) | |
f7d118a9 | 145 | { |
146 | x_info->next_cycle = last; | |
147 | last = x; | |
48e1416a | 148 | } |
149 | else | |
f7d118a9 | 150 | env->result[env->order_pos++] = x; |
48e1416a | 151 | } |
f7d118a9 | 152 | while (v != x); |
48e1416a | 153 | if (env->reduce) |
f7d118a9 | 154 | env->result[env->order_pos++] = v; |
155 | } | |
156 | } | |
157 | ||
158 | /* Topsort the call graph by caller relation. Put the result in ORDER. | |
159 | ||
9631926a | 160 | The REDUCE flag is true if you want the cycles reduced to single nodes. |
161 | You can use ipa_get_nodes_in_cycle to obtain a vector containing all real | |
162 | call graph nodes in a reduced node. | |
163 | ||
164 | Set ALLOW_OVERWRITABLE if nodes with such availability should be included. | |
7771d558 | 165 | IGNORE_EDGE, if non-NULL is a hook that may make some edges insignificant |
166 | for the topological sort. */ | |
f7d118a9 | 167 | |
168 | int | |
7771d558 | 169 | ipa_reduced_postorder (struct cgraph_node **order, |
170 | bool reduce, bool allow_overwritable, | |
171 | bool (*ignore_edge) (struct cgraph_edge *)) | |
f7d118a9 | 172 | { |
173 | struct cgraph_node *node; | |
174 | struct searchc_env env; | |
175 | splay_tree_node result; | |
35ee1c66 | 176 | env.stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count); |
f7d118a9 | 177 | env.stack_size = 0; |
178 | env.result = order; | |
179 | env.order_pos = 0; | |
180 | env.nodes_marked_new = splay_tree_new (splay_tree_compare_ints, 0, 0); | |
181 | env.count = 1; | |
182 | env.reduce = reduce; | |
c336a49e | 183 | env.allow_overwritable = allow_overwritable; |
48e1416a | 184 | |
7c455d87 | 185 | FOR_EACH_DEFINED_FUNCTION (node) |
86844d6c | 186 | { |
415d1b9a | 187 | enum availability avail = node->get_availability (); |
86844d6c | 188 | |
415d1b9a | 189 | if (avail > AVAIL_INTERPOSABLE |
48e1416a | 190 | || (allow_overwritable |
415d1b9a | 191 | && (avail == AVAIL_INTERPOSABLE))) |
86844d6c | 192 | { |
193 | /* Reuse the info if it is already there. */ | |
02774f2d | 194 | struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->aux; |
86844d6c | 195 | if (!info) |
196 | info = XCNEW (struct ipa_dfs_info); | |
197 | info->new_node = true; | |
198 | info->on_stack = false; | |
199 | info->next_cycle = NULL; | |
02774f2d | 200 | node->aux = info; |
48e1416a | 201 | |
86844d6c | 202 | splay_tree_insert (env.nodes_marked_new, |
48e1416a | 203 | (splay_tree_key)node->uid, |
86844d6c | 204 | (splay_tree_value)node); |
48e1416a | 205 | } |
206 | else | |
02774f2d | 207 | node->aux = NULL; |
86844d6c | 208 | } |
f7d118a9 | 209 | result = splay_tree_min (env.nodes_marked_new); |
210 | while (result) | |
211 | { | |
212 | node = (struct cgraph_node *)result->value; | |
17b28e52 | 213 | searchc (&env, node, ignore_edge); |
f7d118a9 | 214 | result = splay_tree_min (env.nodes_marked_new); |
215 | } | |
216 | splay_tree_delete (env.nodes_marked_new); | |
217 | free (env.stack); | |
218 | ||
219 | return env.order_pos; | |
220 | } | |
221 | ||
7771d558 | 222 | /* Deallocate all ipa_dfs_info structures pointed to by the aux pointer of call |
223 | graph nodes. */ | |
224 | ||
225 | void | |
226 | ipa_free_postorder_info (void) | |
227 | { | |
228 | struct cgraph_node *node; | |
7c455d87 | 229 | FOR_EACH_DEFINED_FUNCTION (node) |
7771d558 | 230 | { |
231 | /* Get rid of the aux information. */ | |
02774f2d | 232 | if (node->aux) |
7771d558 | 233 | { |
02774f2d | 234 | free (node->aux); |
235 | node->aux = NULL; | |
7771d558 | 236 | } |
237 | } | |
238 | } | |
239 | ||
9631926a | 240 | /* Get the set of nodes for the cycle in the reduced call graph starting |
241 | from NODE. */ | |
242 | ||
415d1b9a | 243 | vec<cgraph_node *> |
9631926a | 244 | ipa_get_nodes_in_cycle (struct cgraph_node *node) |
245 | { | |
415d1b9a | 246 | vec<cgraph_node *> v = vNULL; |
9631926a | 247 | struct ipa_dfs_info *node_dfs_info; |
248 | while (node) | |
249 | { | |
f1f41a6c | 250 | v.safe_push (node); |
02774f2d | 251 | node_dfs_info = (struct ipa_dfs_info *) node->aux; |
9631926a | 252 | node = node_dfs_info->next_cycle; |
253 | } | |
254 | return v; | |
255 | } | |
256 | ||
a0255a70 | 257 | /* Return true iff the CS is an edge within a strongly connected component as |
258 | computed by ipa_reduced_postorder. */ | |
259 | ||
260 | bool | |
261 | ipa_edge_within_scc (struct cgraph_edge *cs) | |
262 | { | |
02774f2d | 263 | struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->aux; |
a0255a70 | 264 | struct ipa_dfs_info *callee_dfs; |
415d1b9a | 265 | struct cgraph_node *callee = cs->callee->function_symbol (); |
a0255a70 | 266 | |
02774f2d | 267 | callee_dfs = (struct ipa_dfs_info *) callee->aux; |
a0255a70 | 268 | return (caller_dfs |
269 | && callee_dfs | |
270 | && caller_dfs->scc_no == callee_dfs->scc_no); | |
271 | } | |
272 | ||
ee5e516b | 273 | struct postorder_stack |
274 | { | |
275 | struct cgraph_node *node; | |
276 | struct cgraph_edge *edge; | |
277 | int ref; | |
278 | }; | |
279 | ||
7771d558 | 280 | /* Fill array order with all nodes with output flag set in the reverse |
c70f46b0 | 281 | topological order. Return the number of elements in the array. |
282 | FIXME: While walking, consider aliases, too. */ | |
7771d558 | 283 | |
284 | int | |
285 | ipa_reverse_postorder (struct cgraph_node **order) | |
286 | { | |
287 | struct cgraph_node *node, *node2; | |
288 | int stack_size = 0; | |
289 | int order_pos = 0; | |
ee5e516b | 290 | struct cgraph_edge *edge; |
7771d558 | 291 | int pass; |
51ce5652 | 292 | struct ipa_ref *ref = NULL; |
7771d558 | 293 | |
ee5e516b | 294 | struct postorder_stack *stack = |
35ee1c66 | 295 | XCNEWVEC (struct postorder_stack, symtab->cgraph_count); |
7771d558 | 296 | |
297 | /* We have to deal with cycles nicely, so use a depth first traversal | |
298 | output algorithm. Ignore the fact that some functions won't need | |
299 | to be output and put them into order as well, so we get dependencies | |
300 | right through inline functions. */ | |
7c455d87 | 301 | FOR_EACH_FUNCTION (node) |
02774f2d | 302 | node->aux = NULL; |
7771d558 | 303 | for (pass = 0; pass < 2; pass++) |
7c455d87 | 304 | FOR_EACH_FUNCTION (node) |
02774f2d | 305 | if (!node->aux |
7771d558 | 306 | && (pass |
02774f2d | 307 | || (!node->address_taken |
7771d558 | 308 | && !node->global.inlined_to |
02774f2d | 309 | && !node->alias && !node->thunk.thunk_p |
415d1b9a | 310 | && !node->only_called_directly_p ()))) |
7771d558 | 311 | { |
ee5e516b | 312 | stack_size = 0; |
313 | stack[stack_size].node = node; | |
314 | stack[stack_size].edge = node->callers; | |
315 | stack[stack_size].ref = 0; | |
02774f2d | 316 | node->aux = (void *)(size_t)1; |
ee5e516b | 317 | while (stack_size >= 0) |
7771d558 | 318 | { |
ee5e516b | 319 | while (true) |
7771d558 | 320 | { |
ee5e516b | 321 | node2 = NULL; |
322 | while (stack[stack_size].edge && !node2) | |
7771d558 | 323 | { |
ee5e516b | 324 | edge = stack[stack_size].edge; |
7771d558 | 325 | node2 = edge->caller; |
ee5e516b | 326 | stack[stack_size].edge = edge->next_caller; |
327 | /* Break possible cycles involving always-inline | |
328 | functions by ignoring edges from always-inline | |
329 | functions to non-always-inline functions. */ | |
02774f2d | 330 | if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->decl) |
ee5e516b | 331 | && !DECL_DISREGARD_INLINE_LIMITS |
415d1b9a | 332 | (edge->callee->function_symbol ()->decl)) |
ee5e516b | 333 | node2 = NULL; |
334 | } | |
51ce5652 | 335 | for (; stack[stack_size].node->iterate_referring ( |
ee5e516b | 336 | stack[stack_size].ref, |
337 | ref) && !node2; | |
338 | stack[stack_size].ref++) | |
339 | { | |
340 | if (ref->use == IPA_REF_ALIAS) | |
51ce5652 | 341 | node2 = dyn_cast <cgraph_node *> (ref->referring); |
ee5e516b | 342 | } |
343 | if (!node2) | |
344 | break; | |
02774f2d | 345 | if (!node2->aux) |
ee5e516b | 346 | { |
347 | stack[++stack_size].node = node2; | |
348 | stack[stack_size].edge = node2->callers; | |
349 | stack[stack_size].ref = 0; | |
02774f2d | 350 | node2->aux = (void *)(size_t)1; |
7771d558 | 351 | } |
352 | } | |
ee5e516b | 353 | order[order_pos++] = stack[stack_size--].node; |
7771d558 | 354 | } |
355 | } | |
356 | free (stack); | |
7c455d87 | 357 | FOR_EACH_FUNCTION (node) |
02774f2d | 358 | node->aux = NULL; |
7771d558 | 359 | return order_pos; |
360 | } | |
361 | ||
362 | ||
f7d118a9 | 363 | |
364 | /* Given a memory reference T, will return the variable at the bottom | |
9d75589a | 365 | of the access. Unlike get_base_address, this will recurse through |
f7d118a9 | 366 | INDIRECT_REFS. */ |
367 | ||
368 | tree | |
369 | get_base_var (tree t) | |
370 | { | |
48e1416a | 371 | while (!SSA_VAR_P (t) |
f7d118a9 | 372 | && (!CONSTANT_CLASS_P (t)) |
373 | && TREE_CODE (t) != LABEL_DECL | |
374 | && TREE_CODE (t) != FUNCTION_DECL | |
9ed5b1f5 | 375 | && TREE_CODE (t) != CONST_DECL |
376 | && TREE_CODE (t) != CONSTRUCTOR) | |
f7d118a9 | 377 | { |
378 | t = TREE_OPERAND (t, 0); | |
379 | } | |
380 | return t; | |
48e1416a | 381 | } |
f7d118a9 | 382 | |
19ad01f7 | 383 | |
7e4b741c | 384 | /* SRC and DST are going to be merged. Take SRC's profile and merge it into |
385 | DST so it is not going to be lost. Destroy SRC's body on the way. */ | |
386 | ||
387 | void | |
388 | ipa_merge_profiles (struct cgraph_node *dst, | |
389 | struct cgraph_node *src) | |
390 | { | |
02774f2d | 391 | tree oldsrcdecl = src->decl; |
7e4b741c | 392 | struct function *srccfun, *dstcfun; |
393 | bool match = true; | |
394 | ||
02774f2d | 395 | if (!src->definition |
396 | || !dst->definition) | |
7e4b741c | 397 | return; |
398 | if (src->frequency < dst->frequency) | |
399 | src->frequency = dst->frequency; | |
af48f0b1 | 400 | |
401 | /* Time profiles are merged. */ | |
402 | if (dst->tp_first_run > src->tp_first_run && src->tp_first_run) | |
403 | dst->tp_first_run = src->tp_first_run; | |
404 | ||
09809ecc | 405 | if (src->profile_id && !dst->profile_id) |
406 | dst->profile_id = src->profile_id; | |
df565eb6 | 407 | |
7e4b741c | 408 | if (!dst->count) |
409 | return; | |
35ee1c66 | 410 | if (symtab->dump_file) |
7e4b741c | 411 | { |
35ee1c66 | 412 | fprintf (symtab->dump_file, "Merging profiles of %s/%i to %s/%i\n", |
f1c8b4d7 | 413 | xstrdup (src->name ()), src->order, |
414 | xstrdup (dst->name ()), dst->order); | |
7e4b741c | 415 | } |
416 | dst->count += src->count; | |
417 | ||
418 | /* This is ugly. We need to get both function bodies into memory. | |
419 | If declaration is merged, we need to duplicate it to be able | |
420 | to load body that is being replaced. This makes symbol table | |
421 | temporarily inconsistent. */ | |
02774f2d | 422 | if (src->decl == dst->decl) |
7e4b741c | 423 | { |
424 | void **slot; | |
425 | struct lto_in_decl_state temp; | |
426 | struct lto_in_decl_state *state; | |
427 | ||
428 | /* We are going to move the decl, we want to remove its file decl data. | |
429 | and link these with the new decl. */ | |
02774f2d | 430 | temp.fn_decl = src->decl; |
431 | slot = htab_find_slot (src->lto_file_data->function_decl_states, | |
7e4b741c | 432 | &temp, NO_INSERT); |
433 | state = (lto_in_decl_state *)*slot; | |
02774f2d | 434 | htab_clear_slot (src->lto_file_data->function_decl_states, slot); |
7e4b741c | 435 | gcc_assert (state); |
436 | ||
437 | /* Duplicate the decl and be sure it does not link into body of DST. */ | |
02774f2d | 438 | src->decl = copy_node (src->decl); |
439 | DECL_STRUCT_FUNCTION (src->decl) = NULL; | |
440 | DECL_ARGUMENTS (src->decl) = NULL; | |
441 | DECL_INITIAL (src->decl) = NULL; | |
442 | DECL_RESULT (src->decl) = NULL; | |
7e4b741c | 443 | |
444 | /* Associate the decl state with new declaration, so LTO streamer | |
445 | can look it up. */ | |
02774f2d | 446 | state->fn_decl = src->decl; |
447 | slot = htab_find_slot (src->lto_file_data->function_decl_states, | |
7e4b741c | 448 | state, INSERT); |
449 | gcc_assert (!*slot); | |
450 | *slot = state; | |
451 | } | |
415d1b9a | 452 | src->get_body (); |
453 | dst->get_body (); | |
02774f2d | 454 | srccfun = DECL_STRUCT_FUNCTION (src->decl); |
455 | dstcfun = DECL_STRUCT_FUNCTION (dst->decl); | |
a28770e1 | 456 | if (n_basic_blocks_for_fn (srccfun) |
457 | != n_basic_blocks_for_fn (dstcfun)) | |
7e4b741c | 458 | { |
35ee1c66 | 459 | if (symtab->dump_file) |
460 | fprintf (symtab->dump_file, | |
7e4b741c | 461 | "Giving up; number of basic block mismatch.\n"); |
462 | match = false; | |
463 | } | |
776b0663 | 464 | else if (last_basic_block_for_fn (srccfun) |
465 | != last_basic_block_for_fn (dstcfun)) | |
7e4b741c | 466 | { |
35ee1c66 | 467 | if (symtab->dump_file) |
468 | fprintf (symtab->dump_file, | |
7e4b741c | 469 | "Giving up; last block mismatch.\n"); |
470 | match = false; | |
471 | } | |
472 | else | |
473 | { | |
474 | basic_block srcbb, dstbb; | |
475 | ||
476 | FOR_ALL_BB_FN (srcbb, srccfun) | |
477 | { | |
478 | unsigned int i; | |
479 | ||
98e6ab47 | 480 | dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index); |
7e4b741c | 481 | if (dstbb == NULL) |
482 | { | |
35ee1c66 | 483 | if (symtab->dump_file) |
484 | fprintf (symtab->dump_file, | |
7e4b741c | 485 | "No matching block for bb %i.\n", |
486 | srcbb->index); | |
487 | match = false; | |
488 | break; | |
489 | } | |
490 | if (EDGE_COUNT (srcbb->succs) != EDGE_COUNT (dstbb->succs)) | |
491 | { | |
35ee1c66 | 492 | if (symtab->dump_file) |
493 | fprintf (symtab->dump_file, | |
7e4b741c | 494 | "Edge count mistmatch for bb %i.\n", |
495 | srcbb->index); | |
496 | match = false; | |
497 | break; | |
498 | } | |
499 | for (i = 0; i < EDGE_COUNT (srcbb->succs); i++) | |
500 | { | |
501 | edge srce = EDGE_SUCC (srcbb, i); | |
502 | edge dste = EDGE_SUCC (dstbb, i); | |
503 | if (srce->dest->index != dste->dest->index) | |
504 | { | |
35ee1c66 | 505 | if (symtab->dump_file) |
506 | fprintf (symtab->dump_file, | |
7e4b741c | 507 | "Succ edge mistmatch for bb %i.\n", |
508 | srce->dest->index); | |
509 | match = false; | |
510 | break; | |
511 | } | |
512 | } | |
513 | } | |
514 | } | |
515 | if (match) | |
516 | { | |
517 | struct cgraph_edge *e; | |
518 | basic_block srcbb, dstbb; | |
519 | ||
520 | /* TODO: merge also statement histograms. */ | |
521 | FOR_ALL_BB_FN (srcbb, srccfun) | |
522 | { | |
523 | unsigned int i; | |
524 | ||
98e6ab47 | 525 | dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index); |
7e4b741c | 526 | dstbb->count += srcbb->count; |
527 | for (i = 0; i < EDGE_COUNT (srcbb->succs); i++) | |
528 | { | |
529 | edge srce = EDGE_SUCC (srcbb, i); | |
530 | edge dste = EDGE_SUCC (dstbb, i); | |
531 | dste->count += srce->count; | |
532 | } | |
533 | } | |
534 | push_cfun (dstcfun); | |
535 | counts_to_freqs (); | |
536 | compute_function_frequency (); | |
537 | pop_cfun (); | |
538 | for (e = dst->callees; e; e = e->next_callee) | |
539 | { | |
540 | gcc_assert (!e->speculative); | |
541 | e->count = gimple_bb (e->call_stmt)->count; | |
542 | e->frequency = compute_call_stmt_bb_frequency | |
02774f2d | 543 | (dst->decl, |
7e4b741c | 544 | gimple_bb (e->call_stmt)); |
545 | } | |
546 | for (e = dst->indirect_calls; e; e = e->next_callee) | |
547 | { | |
548 | gcc_assert (!e->speculative); | |
549 | e->count = gimple_bb (e->call_stmt)->count; | |
550 | e->frequency = compute_call_stmt_bb_frequency | |
02774f2d | 551 | (dst->decl, |
7e4b741c | 552 | gimple_bb (e->call_stmt)); |
553 | } | |
415d1b9a | 554 | src->release_body (); |
7e4b741c | 555 | inline_update_overall_summary (dst); |
556 | } | |
557 | /* TODO: if there is no match, we can scale up. */ | |
02774f2d | 558 | src->decl = oldsrcdecl; |
7e4b741c | 559 | } |
560 | ||
e9de52cc | 561 | /* Return true if call to DEST is known to be self-recusive call withing FUNC. */ |
562 | ||
563 | bool | |
564 | recursive_call_p (tree func, tree dest) | |
565 | { | |
415d1b9a | 566 | struct cgraph_node *dest_node = cgraph_node::get_create (dest); |
567 | struct cgraph_node *cnode = cgraph_node::get_create (func); | |
e9de52cc | 568 | |
415d1b9a | 569 | return dest_node->semantically_equivalent_p (cnode); |
e9de52cc | 570 | } |