]>
Commit | Line | Data |
---|---|---|
402209ff JH |
1 | /* Control flow graph analysis code for GNU compiler. |
2 | Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, | |
0ea5865c | 3 | 1999, 2000, 2001, 2003, 2004 Free Software Foundation, Inc. |
402209ff JH |
4 | |
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 2, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING. If not, write to the Free | |
19 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
20 | 02111-1307, USA. */ | |
21 | ||
22 | /* This file contains various simple utilities to analyze the CFG. */ | |
23 | #include "config.h" | |
24 | #include "system.h" | |
4977bab6 ZW |
25 | #include "coretypes.h" |
26 | #include "tm.h" | |
402209ff JH |
27 | #include "rtl.h" |
28 | #include "hard-reg-set.h" | |
29 | #include "basic-block.h" | |
b6878a45 RH |
30 | #include "insn-config.h" |
31 | #include "recog.h" | |
402209ff | 32 | #include "toplev.h" |
02c5a3bd | 33 | #include "tm_p.h" |
402209ff JH |
34 | |
35 | /* Store the data structures necessary for depth-first search. */ | |
36 | struct depth_first_search_dsS { | |
37 | /* stack for backtracking during the algorithm */ | |
38 | basic_block *stack; | |
39 | ||
40 | /* number of edges in the stack. That is, positions 0, ..., sp-1 | |
41 | have edges. */ | |
42 | unsigned int sp; | |
43 | ||
44 | /* record of basic blocks already seen by depth-first search */ | |
45 | sbitmap visited_blocks; | |
46 | }; | |
47 | typedef struct depth_first_search_dsS *depth_first_search_ds; | |
48 | ||
d329e058 AJ |
49 | static void flow_dfs_compute_reverse_init (depth_first_search_ds); |
50 | static void flow_dfs_compute_reverse_add_bb (depth_first_search_ds, | |
51 | basic_block); | |
52 | static basic_block flow_dfs_compute_reverse_execute (depth_first_search_ds); | |
53 | static void flow_dfs_compute_reverse_finish (depth_first_search_ds); | |
54 | static void remove_fake_successors (basic_block); | |
d329e058 | 55 | static bool flow_active_insn_p (rtx); |
402209ff | 56 | \f |
3dfda826 RH |
57 | /* Like active_insn_p, except keep the return value clobber around |
58 | even after reload. */ | |
59 | ||
60 | static bool | |
d329e058 | 61 | flow_active_insn_p (rtx insn) |
3dfda826 RH |
62 | { |
63 | if (active_insn_p (insn)) | |
64 | return true; | |
65 | ||
d329e058 | 66 | /* A clobber of the function return value exists for buggy |
2a5de0f1 | 67 | programs that fail to return a value. Its effect is to |
3dfda826 RH |
68 | keep the return value from being live across the entire |
69 | function. If we allow it to be skipped, we introduce the | |
70 | possibility for register livetime aborts. */ | |
71 | if (GET_CODE (PATTERN (insn)) == CLOBBER | |
72 | && GET_CODE (XEXP (PATTERN (insn), 0)) == REG | |
73 | && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0))) | |
74 | return true; | |
75 | ||
76 | return false; | |
77 | } | |
78 | ||
402209ff JH |
79 | /* Return true if the block has no effect and only forwards control flow to |
80 | its single destination. */ | |
4891442b | 81 | |
402209ff | 82 | bool |
d329e058 | 83 | forwarder_block_p (basic_block bb) |
402209ff | 84 | { |
4891442b RK |
85 | rtx insn; |
86 | ||
402209ff JH |
87 | if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR |
88 | || !bb->succ || bb->succ->succ_next) | |
89 | return false; | |
90 | ||
a813c111 | 91 | for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn)) |
3dfda826 | 92 | if (INSN_P (insn) && flow_active_insn_p (insn)) |
4891442b RK |
93 | return false; |
94 | ||
b932f770 JH |
95 | return (!INSN_P (insn) |
96 | || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn)) | |
3dfda826 | 97 | || !flow_active_insn_p (insn)); |
402209ff JH |
98 | } |
99 | ||
f5143c46 | 100 | /* Return nonzero if we can reach target from src by falling through. */ |
4891442b | 101 | |
402209ff | 102 | bool |
d329e058 | 103 | can_fallthru (basic_block src, basic_block target) |
402209ff | 104 | { |
a813c111 SB |
105 | rtx insn = BB_END (src); |
106 | rtx insn2 = target == EXIT_BLOCK_PTR ? NULL : BB_HEAD (target); | |
402209ff | 107 | |
f6366fc7 | 108 | if (src->next_bb != target) |
3e638a90 JH |
109 | return 0; |
110 | ||
bc35512f | 111 | if (insn2 && !active_insn_p (insn2)) |
402209ff | 112 | insn2 = next_active_insn (insn2); |
4891442b | 113 | |
402209ff JH |
114 | /* ??? Later we may add code to move jump tables offline. */ |
115 | return next_active_insn (insn) == insn2; | |
116 | } | |
117 | \f | |
402209ff | 118 | /* Mark the back edges in DFS traversal. |
da7d8304 | 119 | Return nonzero if a loop (natural or otherwise) is present. |
402209ff JH |
120 | Inspired by Depth_First_Search_PP described in: |
121 | ||
122 | Advanced Compiler Design and Implementation | |
123 | Steven Muchnick | |
124 | Morgan Kaufmann, 1997 | |
125 | ||
126 | and heavily borrowed from flow_depth_first_order_compute. */ | |
127 | ||
128 | bool | |
d329e058 | 129 | mark_dfs_back_edges (void) |
402209ff JH |
130 | { |
131 | edge *stack; | |
132 | int *pre; | |
133 | int *post; | |
134 | int sp; | |
135 | int prenum = 1; | |
136 | int postnum = 1; | |
137 | sbitmap visited; | |
138 | bool found = false; | |
139 | ||
140 | /* Allocate the preorder and postorder number arrays. */ | |
703ad42b KG |
141 | pre = xcalloc (last_basic_block, sizeof (int)); |
142 | post = xcalloc (last_basic_block, sizeof (int)); | |
402209ff JH |
143 | |
144 | /* Allocate stack for back-tracking up CFG. */ | |
703ad42b | 145 | stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge)); |
402209ff JH |
146 | sp = 0; |
147 | ||
148 | /* Allocate bitmap to track nodes that have been visited. */ | |
d55bc081 | 149 | visited = sbitmap_alloc (last_basic_block); |
402209ff JH |
150 | |
151 | /* None of the nodes in the CFG have been visited yet. */ | |
152 | sbitmap_zero (visited); | |
153 | ||
154 | /* Push the first edge on to the stack. */ | |
155 | stack[sp++] = ENTRY_BLOCK_PTR->succ; | |
156 | ||
157 | while (sp) | |
158 | { | |
159 | edge e; | |
160 | basic_block src; | |
161 | basic_block dest; | |
162 | ||
163 | /* Look at the edge on the top of the stack. */ | |
164 | e = stack[sp - 1]; | |
165 | src = e->src; | |
166 | dest = e->dest; | |
167 | e->flags &= ~EDGE_DFS_BACK; | |
168 | ||
169 | /* Check if the edge destination has been visited yet. */ | |
0b17ab2f | 170 | if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)) |
402209ff JH |
171 | { |
172 | /* Mark that we have visited the destination. */ | |
0b17ab2f | 173 | SET_BIT (visited, dest->index); |
402209ff | 174 | |
0b17ab2f | 175 | pre[dest->index] = prenum++; |
402209ff JH |
176 | if (dest->succ) |
177 | { | |
178 | /* Since the DEST node has been visited for the first | |
179 | time, check its successors. */ | |
180 | stack[sp++] = dest->succ; | |
181 | } | |
182 | else | |
0b17ab2f | 183 | post[dest->index] = postnum++; |
402209ff JH |
184 | } |
185 | else | |
186 | { | |
187 | if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR | |
0b17ab2f RH |
188 | && pre[src->index] >= pre[dest->index] |
189 | && post[dest->index] == 0) | |
402209ff JH |
190 | e->flags |= EDGE_DFS_BACK, found = true; |
191 | ||
192 | if (! e->succ_next && src != ENTRY_BLOCK_PTR) | |
0b17ab2f | 193 | post[src->index] = postnum++; |
402209ff JH |
194 | |
195 | if (e->succ_next) | |
196 | stack[sp - 1] = e->succ_next; | |
197 | else | |
198 | sp--; | |
199 | } | |
200 | } | |
201 | ||
202 | free (pre); | |
203 | free (post); | |
204 | free (stack); | |
205 | sbitmap_free (visited); | |
206 | ||
207 | return found; | |
208 | } | |
209 | ||
6c81a490 JH |
210 | /* Set the flag EDGE_CAN_FALLTHRU for edges that can be fallthru. */ |
211 | ||
212 | void | |
d329e058 | 213 | set_edge_can_fallthru_flag (void) |
6c81a490 | 214 | { |
e0082a72 ZD |
215 | basic_block bb; |
216 | ||
217 | FOR_EACH_BB (bb) | |
6c81a490 | 218 | { |
6c81a490 JH |
219 | edge e; |
220 | ||
6c81a490 | 221 | for (e = bb->succ; e; e = e->succ_next) |
6982ae15 JZ |
222 | { |
223 | e->flags &= ~EDGE_CAN_FALLTHRU; | |
224 | ||
225 | /* The FALLTHRU edge is also CAN_FALLTHRU edge. */ | |
226 | if (e->flags & EDGE_FALLTHRU) | |
227 | e->flags |= EDGE_CAN_FALLTHRU; | |
228 | } | |
6c81a490 | 229 | |
95bd1dd7 | 230 | /* If the BB ends with an invertible condjump all (2) edges are |
6c81a490 JH |
231 | CAN_FALLTHRU edges. */ |
232 | if (!bb->succ || !bb->succ->succ_next || bb->succ->succ_next->succ_next) | |
233 | continue; | |
a813c111 | 234 | if (!any_condjump_p (BB_END (bb))) |
6c81a490 | 235 | continue; |
a813c111 | 236 | if (!invert_jump (BB_END (bb), JUMP_LABEL (BB_END (bb)), 0)) |
6c81a490 | 237 | continue; |
a813c111 | 238 | invert_jump (BB_END (bb), JUMP_LABEL (BB_END (bb)), 0); |
6c81a490 JH |
239 | bb->succ->flags |= EDGE_CAN_FALLTHRU; |
240 | bb->succ->succ_next->flags |= EDGE_CAN_FALLTHRU; | |
241 | } | |
242 | } | |
243 | ||
402209ff | 244 | /* Find unreachable blocks. An unreachable block will have 0 in |
da7d8304 | 245 | the reachable bit in block->flags. A nonzero value indicates the |
402209ff JH |
246 | block is reachable. */ |
247 | ||
248 | void | |
d329e058 | 249 | find_unreachable_blocks (void) |
402209ff JH |
250 | { |
251 | edge e; | |
e0082a72 | 252 | basic_block *tos, *worklist, bb; |
402209ff | 253 | |
703ad42b | 254 | tos = worklist = xmalloc (sizeof (basic_block) * n_basic_blocks); |
402209ff JH |
255 | |
256 | /* Clear all the reachability flags. */ | |
257 | ||
e0082a72 ZD |
258 | FOR_EACH_BB (bb) |
259 | bb->flags &= ~BB_REACHABLE; | |
402209ff JH |
260 | |
261 | /* Add our starting points to the worklist. Almost always there will | |
eaec9b3d | 262 | be only one. It isn't inconceivable that we might one day directly |
402209ff JH |
263 | support Fortran alternate entry points. */ |
264 | ||
265 | for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next) | |
266 | { | |
267 | *tos++ = e->dest; | |
268 | ||
269 | /* Mark the block reachable. */ | |
270 | e->dest->flags |= BB_REACHABLE; | |
271 | } | |
272 | ||
273 | /* Iterate: find everything reachable from what we've already seen. */ | |
274 | ||
275 | while (tos != worklist) | |
276 | { | |
277 | basic_block b = *--tos; | |
278 | ||
279 | for (e = b->succ; e; e = e->succ_next) | |
280 | if (!(e->dest->flags & BB_REACHABLE)) | |
281 | { | |
282 | *tos++ = e->dest; | |
283 | e->dest->flags |= BB_REACHABLE; | |
284 | } | |
285 | } | |
286 | ||
287 | free (worklist); | |
288 | } | |
289 | \f | |
290 | /* Functions to access an edge list with a vector representation. | |
291 | Enough data is kept such that given an index number, the | |
292 | pred and succ that edge represents can be determined, or | |
293 | given a pred and a succ, its index number can be returned. | |
294 | This allows algorithms which consume a lot of memory to | |
295 | represent the normally full matrix of edge (pred,succ) with a | |
296 | single indexed vector, edge (EDGE_INDEX (pred, succ)), with no | |
297 | wasted space in the client code due to sparse flow graphs. */ | |
298 | ||
299 | /* This functions initializes the edge list. Basically the entire | |
300 | flowgraph is processed, and all edges are assigned a number, | |
301 | and the data structure is filled in. */ | |
302 | ||
303 | struct edge_list * | |
d329e058 | 304 | create_edge_list (void) |
402209ff JH |
305 | { |
306 | struct edge_list *elist; | |
307 | edge e; | |
308 | int num_edges; | |
402209ff | 309 | int block_count; |
e0082a72 | 310 | basic_block bb; |
402209ff | 311 | |
0b17ab2f | 312 | block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */ |
402209ff JH |
313 | |
314 | num_edges = 0; | |
315 | ||
316 | /* Determine the number of edges in the flow graph by counting successor | |
317 | edges on each basic block. */ | |
e0082a72 | 318 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) |
402209ff | 319 | { |
402209ff JH |
320 | for (e = bb->succ; e; e = e->succ_next) |
321 | num_edges++; | |
322 | } | |
4891442b | 323 | |
703ad42b | 324 | elist = xmalloc (sizeof (struct edge_list)); |
402209ff JH |
325 | elist->num_blocks = block_count; |
326 | elist->num_edges = num_edges; | |
703ad42b | 327 | elist->index_to_edge = xmalloc (sizeof (edge) * num_edges); |
402209ff JH |
328 | |
329 | num_edges = 0; | |
330 | ||
e0082a72 ZD |
331 | /* Follow successors of blocks, and register these edges. */ |
332 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) | |
333 | for (e = bb->succ; e; e = e->succ_next) | |
334 | elist->index_to_edge[num_edges++] = e; | |
4891442b | 335 | |
402209ff JH |
336 | return elist; |
337 | } | |
338 | ||
339 | /* This function free's memory associated with an edge list. */ | |
340 | ||
341 | void | |
d329e058 | 342 | free_edge_list (struct edge_list *elist) |
402209ff JH |
343 | { |
344 | if (elist) | |
345 | { | |
346 | free (elist->index_to_edge); | |
347 | free (elist); | |
348 | } | |
349 | } | |
350 | ||
351 | /* This function provides debug output showing an edge list. */ | |
352 | ||
353 | void | |
d329e058 | 354 | print_edge_list (FILE *f, struct edge_list *elist) |
402209ff JH |
355 | { |
356 | int x; | |
4891442b | 357 | |
402209ff JH |
358 | fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n", |
359 | elist->num_blocks - 2, elist->num_edges); | |
360 | ||
361 | for (x = 0; x < elist->num_edges; x++) | |
362 | { | |
363 | fprintf (f, " %-4d - edge(", x); | |
364 | if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR) | |
365 | fprintf (f, "entry,"); | |
366 | else | |
0b17ab2f | 367 | fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index); |
402209ff JH |
368 | |
369 | if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR) | |
370 | fprintf (f, "exit)\n"); | |
371 | else | |
0b17ab2f | 372 | fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index); |
402209ff JH |
373 | } |
374 | } | |
375 | ||
376 | /* This function provides an internal consistency check of an edge list, | |
377 | verifying that all edges are present, and that there are no | |
378 | extra edges. */ | |
379 | ||
380 | void | |
d329e058 | 381 | verify_edge_list (FILE *f, struct edge_list *elist) |
402209ff | 382 | { |
e0082a72 | 383 | int pred, succ, index; |
402209ff | 384 | edge e; |
e0082a72 | 385 | basic_block bb, p, s; |
402209ff | 386 | |
e0082a72 | 387 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) |
402209ff | 388 | { |
402209ff JH |
389 | for (e = bb->succ; e; e = e->succ_next) |
390 | { | |
0b17ab2f RH |
391 | pred = e->src->index; |
392 | succ = e->dest->index; | |
402209ff JH |
393 | index = EDGE_INDEX (elist, e->src, e->dest); |
394 | if (index == EDGE_INDEX_NO_EDGE) | |
395 | { | |
396 | fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ); | |
397 | continue; | |
398 | } | |
4891442b | 399 | |
0b17ab2f | 400 | if (INDEX_EDGE_PRED_BB (elist, index)->index != pred) |
402209ff | 401 | fprintf (f, "*p* Pred for index %d should be %d not %d\n", |
0b17ab2f RH |
402 | index, pred, INDEX_EDGE_PRED_BB (elist, index)->index); |
403 | if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ) | |
402209ff | 404 | fprintf (f, "*p* Succ for index %d should be %d not %d\n", |
0b17ab2f RH |
405 | index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index); |
406 | } | |
407 | } | |
408 | ||
e0082a72 | 409 | /* We've verified that all the edges are in the list, now lets make sure |
402209ff JH |
410 | there are no spurious edges in the list. */ |
411 | ||
e0082a72 ZD |
412 | FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) |
413 | FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb) | |
402209ff | 414 | { |
402209ff JH |
415 | int found_edge = 0; |
416 | ||
417 | for (e = p->succ; e; e = e->succ_next) | |
418 | if (e->dest == s) | |
419 | { | |
420 | found_edge = 1; | |
421 | break; | |
422 | } | |
4891442b | 423 | |
402209ff JH |
424 | for (e = s->pred; e; e = e->pred_next) |
425 | if (e->src == p) | |
426 | { | |
427 | found_edge = 1; | |
428 | break; | |
429 | } | |
4891442b | 430 | |
e0082a72 | 431 | if (EDGE_INDEX (elist, p, s) |
402209ff JH |
432 | == EDGE_INDEX_NO_EDGE && found_edge != 0) |
433 | fprintf (f, "*** Edge (%d, %d) appears to not have an index\n", | |
e0082a72 ZD |
434 | p->index, s->index); |
435 | if (EDGE_INDEX (elist, p, s) | |
402209ff JH |
436 | != EDGE_INDEX_NO_EDGE && found_edge == 0) |
437 | fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n", | |
e0082a72 | 438 | p->index, s->index, EDGE_INDEX (elist, p, s)); |
402209ff | 439 | } |
402209ff JH |
440 | } |
441 | ||
6de9cd9a DN |
442 | /* Given PRED and SUCC blocks, return the edge which connects the blocks. |
443 | If no such edge exists, return NULL. */ | |
444 | ||
445 | edge | |
446 | find_edge (basic_block pred, basic_block succ) | |
447 | { | |
448 | edge e; | |
449 | ||
450 | for (e = pred->succ; e; e = e->succ_next) | |
451 | if (e->dest == succ) | |
452 | return e; | |
453 | ||
454 | return NULL; | |
455 | } | |
456 | ||
402209ff JH |
457 | /* This routine will determine what, if any, edge there is between |
458 | a specified predecessor and successor. */ | |
459 | ||
460 | int | |
d329e058 | 461 | find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ) |
402209ff JH |
462 | { |
463 | int x; | |
4891442b | 464 | |
402209ff | 465 | for (x = 0; x < NUM_EDGES (edge_list); x++) |
4891442b RK |
466 | if (INDEX_EDGE_PRED_BB (edge_list, x) == pred |
467 | && INDEX_EDGE_SUCC_BB (edge_list, x) == succ) | |
468 | return x; | |
469 | ||
402209ff JH |
470 | return (EDGE_INDEX_NO_EDGE); |
471 | } | |
472 | ||
473 | /* Dump the list of basic blocks in the bitmap NODES. */ | |
474 | ||
475 | void | |
d329e058 | 476 | flow_nodes_print (const char *str, const sbitmap nodes, FILE *file) |
402209ff JH |
477 | { |
478 | int node; | |
479 | ||
480 | if (! nodes) | |
481 | return; | |
482 | ||
483 | fprintf (file, "%s { ", str); | |
484 | EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);}); | |
485 | fputs ("}\n", file); | |
486 | } | |
487 | ||
488 | /* Dump the list of edges in the array EDGE_LIST. */ | |
489 | ||
490 | void | |
d329e058 | 491 | flow_edge_list_print (const char *str, const edge *edge_list, int num_edges, FILE *file) |
402209ff JH |
492 | { |
493 | int i; | |
494 | ||
495 | if (! edge_list) | |
496 | return; | |
497 | ||
498 | fprintf (file, "%s { ", str); | |
499 | for (i = 0; i < num_edges; i++) | |
0b17ab2f RH |
500 | fprintf (file, "%d->%d ", edge_list[i]->src->index, |
501 | edge_list[i]->dest->index); | |
4891442b | 502 | |
402209ff JH |
503 | fputs ("}\n", file); |
504 | } | |
505 | ||
506 | \f | |
507 | /* This routine will remove any fake successor edges for a basic block. | |
508 | When the edge is removed, it is also removed from whatever predecessor | |
509 | list it is in. */ | |
510 | ||
511 | static void | |
d329e058 | 512 | remove_fake_successors (basic_block bb) |
402209ff JH |
513 | { |
514 | edge e; | |
4891442b | 515 | |
402209ff JH |
516 | for (e = bb->succ; e;) |
517 | { | |
518 | edge tmp = e; | |
4891442b | 519 | |
402209ff JH |
520 | e = e->succ_next; |
521 | if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE) | |
522 | remove_edge (tmp); | |
523 | } | |
524 | } | |
525 | ||
526 | /* This routine will remove all fake edges from the flow graph. If | |
527 | we remove all fake successors, it will automatically remove all | |
528 | fake predecessors. */ | |
529 | ||
530 | void | |
d329e058 | 531 | remove_fake_edges (void) |
402209ff | 532 | { |
e0082a72 | 533 | basic_block bb; |
402209ff | 534 | |
e0082a72 ZD |
535 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) |
536 | remove_fake_successors (bb); | |
402209ff JH |
537 | } |
538 | ||
539 | /* This function will add a fake edge between any block which has no | |
540 | successors, and the exit block. Some data flow equations require these | |
541 | edges to exist. */ | |
542 | ||
543 | void | |
d329e058 | 544 | add_noreturn_fake_exit_edges (void) |
402209ff | 545 | { |
e0082a72 | 546 | basic_block bb; |
402209ff | 547 | |
e0082a72 ZD |
548 | FOR_EACH_BB (bb) |
549 | if (bb->succ == NULL) | |
550 | make_single_succ_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE); | |
402209ff JH |
551 | } |
552 | ||
553 | /* This function adds a fake edge between any infinite loops to the | |
554 | exit block. Some optimizations require a path from each node to | |
555 | the exit node. | |
556 | ||
557 | See also Morgan, Figure 3.10, pp. 82-83. | |
558 | ||
559 | The current implementation is ugly, not attempting to minimize the | |
560 | number of inserted fake edges. To reduce the number of fake edges | |
561 | to insert, add fake edges from _innermost_ loops containing only | |
562 | nodes not reachable from the exit block. */ | |
563 | ||
564 | void | |
d329e058 | 565 | connect_infinite_loops_to_exit (void) |
402209ff JH |
566 | { |
567 | basic_block unvisited_block; | |
4891442b | 568 | struct depth_first_search_dsS dfs_ds; |
402209ff JH |
569 | |
570 | /* Perform depth-first search in the reverse graph to find nodes | |
571 | reachable from the exit block. */ | |
402209ff JH |
572 | flow_dfs_compute_reverse_init (&dfs_ds); |
573 | flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR); | |
574 | ||
575 | /* Repeatedly add fake edges, updating the unreachable nodes. */ | |
576 | while (1) | |
577 | { | |
578 | unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds); | |
579 | if (!unvisited_block) | |
580 | break; | |
4891442b | 581 | |
7ded4467 | 582 | make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE); |
402209ff JH |
583 | flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block); |
584 | } | |
585 | ||
586 | flow_dfs_compute_reverse_finish (&dfs_ds); | |
402209ff JH |
587 | return; |
588 | } | |
589 | \f | |
f9da5064 | 590 | /* Compute reverse top sort order. */ |
4891442b | 591 | |
402209ff | 592 | void |
d329e058 | 593 | flow_reverse_top_sort_order_compute (int *rts_order) |
402209ff JH |
594 | { |
595 | edge *stack; | |
596 | int sp; | |
597 | int postnum = 0; | |
598 | sbitmap visited; | |
599 | ||
600 | /* Allocate stack for back-tracking up CFG. */ | |
703ad42b | 601 | stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge)); |
402209ff JH |
602 | sp = 0; |
603 | ||
604 | /* Allocate bitmap to track nodes that have been visited. */ | |
d55bc081 | 605 | visited = sbitmap_alloc (last_basic_block); |
402209ff JH |
606 | |
607 | /* None of the nodes in the CFG have been visited yet. */ | |
608 | sbitmap_zero (visited); | |
609 | ||
610 | /* Push the first edge on to the stack. */ | |
611 | stack[sp++] = ENTRY_BLOCK_PTR->succ; | |
612 | ||
613 | while (sp) | |
614 | { | |
615 | edge e; | |
616 | basic_block src; | |
617 | basic_block dest; | |
618 | ||
619 | /* Look at the edge on the top of the stack. */ | |
620 | e = stack[sp - 1]; | |
621 | src = e->src; | |
622 | dest = e->dest; | |
623 | ||
624 | /* Check if the edge destination has been visited yet. */ | |
0b17ab2f | 625 | if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)) |
402209ff JH |
626 | { |
627 | /* Mark that we have visited the destination. */ | |
0b17ab2f | 628 | SET_BIT (visited, dest->index); |
402209ff JH |
629 | |
630 | if (dest->succ) | |
4891442b RK |
631 | /* Since the DEST node has been visited for the first |
632 | time, check its successors. */ | |
633 | stack[sp++] = dest->succ; | |
402209ff | 634 | else |
0b17ab2f | 635 | rts_order[postnum++] = dest->index; |
402209ff JH |
636 | } |
637 | else | |
638 | { | |
639 | if (! e->succ_next && src != ENTRY_BLOCK_PTR) | |
0b17ab2f | 640 | rts_order[postnum++] = src->index; |
402209ff JH |
641 | |
642 | if (e->succ_next) | |
643 | stack[sp - 1] = e->succ_next; | |
644 | else | |
645 | sp--; | |
646 | } | |
647 | } | |
648 | ||
649 | free (stack); | |
650 | sbitmap_free (visited); | |
651 | } | |
652 | ||
653 | /* Compute the depth first search order and store in the array | |
da7d8304 KH |
654 | DFS_ORDER if nonzero, marking the nodes visited in VISITED. If |
655 | RC_ORDER is nonzero, return the reverse completion number for each | |
402209ff JH |
656 | node. Returns the number of nodes visited. A depth first search |
657 | tries to get as far away from the starting point as quickly as | |
658 | possible. */ | |
659 | ||
660 | int | |
d329e058 | 661 | flow_depth_first_order_compute (int *dfs_order, int *rc_order) |
402209ff JH |
662 | { |
663 | edge *stack; | |
664 | int sp; | |
665 | int dfsnum = 0; | |
0b17ab2f | 666 | int rcnum = n_basic_blocks - 1; |
402209ff JH |
667 | sbitmap visited; |
668 | ||
669 | /* Allocate stack for back-tracking up CFG. */ | |
703ad42b | 670 | stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge)); |
402209ff JH |
671 | sp = 0; |
672 | ||
673 | /* Allocate bitmap to track nodes that have been visited. */ | |
d55bc081 | 674 | visited = sbitmap_alloc (last_basic_block); |
402209ff JH |
675 | |
676 | /* None of the nodes in the CFG have been visited yet. */ | |
677 | sbitmap_zero (visited); | |
678 | ||
679 | /* Push the first edge on to the stack. */ | |
680 | stack[sp++] = ENTRY_BLOCK_PTR->succ; | |
681 | ||
682 | while (sp) | |
683 | { | |
684 | edge e; | |
685 | basic_block src; | |
686 | basic_block dest; | |
687 | ||
688 | /* Look at the edge on the top of the stack. */ | |
689 | e = stack[sp - 1]; | |
690 | src = e->src; | |
691 | dest = e->dest; | |
692 | ||
693 | /* Check if the edge destination has been visited yet. */ | |
0b17ab2f | 694 | if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)) |
402209ff JH |
695 | { |
696 | /* Mark that we have visited the destination. */ | |
0b17ab2f | 697 | SET_BIT (visited, dest->index); |
402209ff JH |
698 | |
699 | if (dfs_order) | |
0b17ab2f | 700 | dfs_order[dfsnum] = dest->index; |
f5f53ae3 JW |
701 | |
702 | dfsnum++; | |
402209ff JH |
703 | |
704 | if (dest->succ) | |
4891442b RK |
705 | /* Since the DEST node has been visited for the first |
706 | time, check its successors. */ | |
707 | stack[sp++] = dest->succ; | |
708 | else if (rc_order) | |
709 | /* There are no successors for the DEST node so assign | |
710 | its reverse completion number. */ | |
0b17ab2f | 711 | rc_order[rcnum--] = dest->index; |
402209ff JH |
712 | } |
713 | else | |
714 | { | |
4891442b RK |
715 | if (! e->succ_next && src != ENTRY_BLOCK_PTR |
716 | && rc_order) | |
717 | /* There are no more successors for the SRC node | |
718 | so assign its reverse completion number. */ | |
0b17ab2f | 719 | rc_order[rcnum--] = src->index; |
402209ff JH |
720 | |
721 | if (e->succ_next) | |
722 | stack[sp - 1] = e->succ_next; | |
723 | else | |
724 | sp--; | |
725 | } | |
726 | } | |
727 | ||
728 | free (stack); | |
729 | sbitmap_free (visited); | |
730 | ||
731 | /* The number of nodes visited should not be greater than | |
0b17ab2f RH |
732 | n_basic_blocks. */ |
733 | if (dfsnum > n_basic_blocks) | |
402209ff JH |
734 | abort (); |
735 | ||
736 | /* There are some nodes left in the CFG that are unreachable. */ | |
0b17ab2f | 737 | if (dfsnum < n_basic_blocks) |
402209ff | 738 | abort (); |
4891442b | 739 | |
402209ff JH |
740 | return dfsnum; |
741 | } | |
742 | ||
4891442b RK |
743 | struct dfst_node |
744 | { | |
3dba4251 JW |
745 | unsigned nnodes; |
746 | struct dfst_node **node; | |
747 | struct dfst_node *up; | |
748 | }; | |
749 | ||
750 | /* Compute a preorder transversal ordering such that a sub-tree which | |
751 | is the source of a cross edge appears before the sub-tree which is | |
752 | the destination of the cross edge. This allows for easy detection | |
753 | of all the entry blocks for a loop. | |
754 | ||
755 | The ordering is compute by: | |
756 | ||
757 | 1) Generating a depth first spanning tree. | |
758 | ||
759 | 2) Walking the resulting tree from right to left. */ | |
760 | ||
761 | void | |
d329e058 | 762 | flow_preorder_transversal_compute (int *pot_order) |
3dba4251 JW |
763 | { |
764 | edge e; | |
765 | edge *stack; | |
766 | int i; | |
767 | int max_successors; | |
768 | int sp; | |
769 | sbitmap visited; | |
770 | struct dfst_node *node; | |
771 | struct dfst_node *dfst; | |
e0082a72 | 772 | basic_block bb; |
3dba4251 JW |
773 | |
774 | /* Allocate stack for back-tracking up CFG. */ | |
703ad42b | 775 | stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge)); |
3dba4251 JW |
776 | sp = 0; |
777 | ||
778 | /* Allocate the tree. */ | |
703ad42b | 779 | dfst = xcalloc (last_basic_block, sizeof (struct dfst_node)); |
4891442b | 780 | |
e0082a72 | 781 | FOR_EACH_BB (bb) |
3dba4251 JW |
782 | { |
783 | max_successors = 0; | |
e0082a72 | 784 | for (e = bb->succ; e; e = e->succ_next) |
3dba4251 | 785 | max_successors++; |
4891442b | 786 | |
bf77398c | 787 | dfst[bb->index].node |
0b17ab2f | 788 | = (max_successors |
703ad42b | 789 | ? xcalloc (max_successors, sizeof (struct dfst_node *)) : NULL); |
3dba4251 JW |
790 | } |
791 | ||
792 | /* Allocate bitmap to track nodes that have been visited. */ | |
d55bc081 | 793 | visited = sbitmap_alloc (last_basic_block); |
3dba4251 JW |
794 | |
795 | /* None of the nodes in the CFG have been visited yet. */ | |
796 | sbitmap_zero (visited); | |
797 | ||
798 | /* Push the first edge on to the stack. */ | |
799 | stack[sp++] = ENTRY_BLOCK_PTR->succ; | |
800 | ||
801 | while (sp) | |
802 | { | |
803 | basic_block src; | |
804 | basic_block dest; | |
805 | ||
806 | /* Look at the edge on the top of the stack. */ | |
807 | e = stack[sp - 1]; | |
808 | src = e->src; | |
809 | dest = e->dest; | |
810 | ||
811 | /* Check if the edge destination has been visited yet. */ | |
0b17ab2f | 812 | if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)) |
3dba4251 JW |
813 | { |
814 | /* Mark that we have visited the destination. */ | |
0b17ab2f | 815 | SET_BIT (visited, dest->index); |
3dba4251 JW |
816 | |
817 | /* Add the destination to the preorder tree. */ | |
818 | if (src != ENTRY_BLOCK_PTR) | |
819 | { | |
0b17ab2f RH |
820 | dfst[src->index].node[dfst[src->index].nnodes++] |
821 | = &dfst[dest->index]; | |
822 | dfst[dest->index].up = &dfst[src->index]; | |
3dba4251 JW |
823 | } |
824 | ||
825 | if (dest->succ) | |
4891442b RK |
826 | /* Since the DEST node has been visited for the first |
827 | time, check its successors. */ | |
828 | stack[sp++] = dest->succ; | |
3dba4251 | 829 | } |
4891442b RK |
830 | |
831 | else if (e->succ_next) | |
832 | stack[sp - 1] = e->succ_next; | |
3dba4251 | 833 | else |
4891442b | 834 | sp--; |
3dba4251 JW |
835 | } |
836 | ||
837 | free (stack); | |
838 | sbitmap_free (visited); | |
839 | ||
840 | /* Record the preorder transversal order by | |
841 | walking the tree from right to left. */ | |
842 | ||
843 | i = 0; | |
f6366fc7 | 844 | node = &dfst[ENTRY_BLOCK_PTR->next_bb->index]; |
3dba4251 JW |
845 | pot_order[i++] = 0; |
846 | ||
847 | while (node) | |
848 | { | |
849 | if (node->nnodes) | |
850 | { | |
851 | node = node->node[--node->nnodes]; | |
852 | pot_order[i++] = node - dfst; | |
853 | } | |
854 | else | |
855 | node = node->up; | |
856 | } | |
857 | ||
858 | /* Free the tree. */ | |
859 | ||
d55bc081 | 860 | for (i = 0; i < last_basic_block; i++) |
3dba4251 JW |
861 | if (dfst[i].node) |
862 | free (dfst[i].node); | |
4891442b | 863 | |
3dba4251 JW |
864 | free (dfst); |
865 | } | |
866 | ||
402209ff JH |
867 | /* Compute the depth first search order on the _reverse_ graph and |
868 | store in the array DFS_ORDER, marking the nodes visited in VISITED. | |
869 | Returns the number of nodes visited. | |
870 | ||
871 | The computation is split into three pieces: | |
872 | ||
873 | flow_dfs_compute_reverse_init () creates the necessary data | |
874 | structures. | |
875 | ||
876 | flow_dfs_compute_reverse_add_bb () adds a basic block to the data | |
877 | structures. The block will start the search. | |
878 | ||
879 | flow_dfs_compute_reverse_execute () continues (or starts) the | |
880 | search using the block on the top of the stack, stopping when the | |
881 | stack is empty. | |
882 | ||
883 | flow_dfs_compute_reverse_finish () destroys the necessary data | |
884 | structures. | |
885 | ||
886 | Thus, the user will probably call ..._init(), call ..._add_bb() to | |
887 | add a beginning basic block to the stack, call ..._execute(), | |
888 | possibly add another bb to the stack and again call ..._execute(), | |
889 | ..., and finally call _finish(). */ | |
890 | ||
891 | /* Initialize the data structures used for depth-first search on the | |
892 | reverse graph. If INITIALIZE_STACK is nonzero, the exit block is | |
893 | added to the basic block stack. DATA is the current depth-first | |
da7d8304 | 894 | search context. If INITIALIZE_STACK is nonzero, there is an |
402209ff JH |
895 | element on the stack. */ |
896 | ||
897 | static void | |
d329e058 | 898 | flow_dfs_compute_reverse_init (depth_first_search_ds data) |
402209ff JH |
899 | { |
900 | /* Allocate stack for back-tracking up CFG. */ | |
703ad42b KG |
901 | data->stack = xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1)) |
902 | * sizeof (basic_block)); | |
402209ff JH |
903 | data->sp = 0; |
904 | ||
905 | /* Allocate bitmap to track nodes that have been visited. */ | |
d55bc081 | 906 | data->visited_blocks = sbitmap_alloc (last_basic_block - (INVALID_BLOCK + 1)); |
402209ff JH |
907 | |
908 | /* None of the nodes in the CFG have been visited yet. */ | |
909 | sbitmap_zero (data->visited_blocks); | |
910 | ||
911 | return; | |
912 | } | |
913 | ||
914 | /* Add the specified basic block to the top of the dfs data | |
915 | structures. When the search continues, it will start at the | |
916 | block. */ | |
917 | ||
918 | static void | |
d329e058 | 919 | flow_dfs_compute_reverse_add_bb (depth_first_search_ds data, basic_block bb) |
402209ff JH |
920 | { |
921 | data->stack[data->sp++] = bb; | |
0b17ab2f | 922 | SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)); |
402209ff JH |
923 | } |
924 | ||
4891442b RK |
925 | /* Continue the depth-first search through the reverse graph starting with the |
926 | block at the stack's top and ending when the stack is empty. Visited nodes | |
927 | are marked. Returns an unvisited basic block, or NULL if there is none | |
928 | available. */ | |
402209ff JH |
929 | |
930 | static basic_block | |
d329e058 | 931 | flow_dfs_compute_reverse_execute (depth_first_search_ds data) |
402209ff JH |
932 | { |
933 | basic_block bb; | |
934 | edge e; | |
402209ff JH |
935 | |
936 | while (data->sp > 0) | |
937 | { | |
938 | bb = data->stack[--data->sp]; | |
4891442b | 939 | |
db4a8254 JH |
940 | /* Perform depth-first search on adjacent vertices. */ |
941 | for (e = bb->pred; e; e = e->pred_next) | |
942 | if (!TEST_BIT (data->visited_blocks, | |
0b17ab2f | 943 | e->src->index - (INVALID_BLOCK + 1))) |
db4a8254 | 944 | flow_dfs_compute_reverse_add_bb (data, e->src); |
402209ff JH |
945 | } |
946 | ||
947 | /* Determine if there are unvisited basic blocks. */ | |
e0082a72 ZD |
948 | FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb) |
949 | if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1))) | |
950 | return bb; | |
4891442b | 951 | |
402209ff JH |
952 | return NULL; |
953 | } | |
954 | ||
955 | /* Destroy the data structures needed for depth-first search on the | |
956 | reverse graph. */ | |
957 | ||
958 | static void | |
d329e058 | 959 | flow_dfs_compute_reverse_finish (depth_first_search_ds data) |
402209ff JH |
960 | { |
961 | free (data->stack); | |
962 | sbitmap_free (data->visited_blocks); | |
402209ff | 963 | } |
2ecfd709 ZD |
964 | |
965 | /* Performs dfs search from BB over vertices satisfying PREDICATE; | |
966 | if REVERSE, go against direction of edges. Returns number of blocks | |
967 | found and their list in RSLT. RSLT can contain at most RSLT_MAX items. */ | |
968 | int | |
d329e058 AJ |
969 | dfs_enumerate_from (basic_block bb, int reverse, |
970 | bool (*predicate) (basic_block, void *), | |
971 | basic_block *rslt, int rslt_max, void *data) | |
2ecfd709 ZD |
972 | { |
973 | basic_block *st, lbb; | |
974 | int sp = 0, tv = 0; | |
975 | ||
976 | st = xcalloc (rslt_max, sizeof (basic_block)); | |
977 | rslt[tv++] = st[sp++] = bb; | |
978 | bb->flags |= BB_VISITED; | |
979 | while (sp) | |
980 | { | |
981 | edge e; | |
982 | lbb = st[--sp]; | |
983 | if (reverse) | |
984 | { | |
985 | for (e = lbb->pred; e; e = e->pred_next) | |
986 | if (!(e->src->flags & BB_VISITED) && predicate (e->src, data)) | |
987 | { | |
988 | if (tv == rslt_max) | |
989 | abort (); | |
990 | rslt[tv++] = st[sp++] = e->src; | |
991 | e->src->flags |= BB_VISITED; | |
992 | } | |
993 | } | |
994 | else | |
995 | { | |
996 | for (e = lbb->succ; e; e = e->succ_next) | |
997 | if (!(e->dest->flags & BB_VISITED) && predicate (e->dest, data)) | |
998 | { | |
999 | if (tv == rslt_max) | |
1000 | abort (); | |
1001 | rslt[tv++] = st[sp++] = e->dest; | |
1002 | e->dest->flags |= BB_VISITED; | |
1003 | } | |
1004 | } | |
1005 | } | |
1006 | free (st); | |
1007 | for (sp = 0; sp < tv; sp++) | |
1008 | rslt[sp]->flags &= ~BB_VISITED; | |
1009 | return tv; | |
1010 | } |