]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfganal.c
basic-block.h (cached_make_edge, [...]): New.
[thirdparty/gcc.git] / gcc / cfganal.c
1 /* Control flow graph analysis code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file contains various simple utilities to analyze the CFG. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "hard-reg-set.h"
27 #include "basic-block.h"
28 #include "toplev.h"
29
30 #include "obstack.h"
31
32 /* Store the data structures necessary for depth-first search. */
33 struct depth_first_search_dsS {
34 /* stack for backtracking during the algorithm */
35 basic_block *stack;
36
37 /* number of edges in the stack. That is, positions 0, ..., sp-1
38 have edges. */
39 unsigned int sp;
40
41 /* record of basic blocks already seen by depth-first search */
42 sbitmap visited_blocks;
43 };
44 typedef struct depth_first_search_dsS *depth_first_search_ds;
45
46 static void flow_dfs_compute_reverse_init
47 PARAMS ((depth_first_search_ds));
48 static void flow_dfs_compute_reverse_add_bb
49 PARAMS ((depth_first_search_ds, basic_block));
50 static basic_block flow_dfs_compute_reverse_execute
51 PARAMS ((depth_first_search_ds));
52 static void flow_dfs_compute_reverse_finish
53 PARAMS ((depth_first_search_ds));
54 static void remove_fake_successors PARAMS ((basic_block));
55 static bool need_fake_edge_p PARAMS ((rtx));
56 \f
57 /* Return true if the block has no effect and only forwards control flow to
58 its single destination. */
59 bool
60 forwarder_block_p (bb)
61 basic_block bb;
62 {
63 rtx insn = bb->head;
64 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
65 || !bb->succ || bb->succ->succ_next)
66 return false;
67
68 while (insn != bb->end)
69 {
70 if (active_insn_p (insn))
71 return false;
72 insn = NEXT_INSN (insn);
73 }
74 return (!active_insn_p (insn)
75 || (GET_CODE (insn) == JUMP_INSN && onlyjump_p (insn)));
76 }
77
78 /* Return nonzero if we can reach target from src by falling trought. */
79 bool
80 can_fallthru (src, target)
81 basic_block src, target;
82 {
83 rtx insn = src->end;
84 rtx insn2 = target->head;
85
86 if (src->index + 1 == target->index && !active_insn_p (insn2))
87 insn2 = next_active_insn (insn2);
88 /* ??? Later we may add code to move jump tables offline. */
89 return next_active_insn (insn) == insn2;
90 }
91 \f
92 /* Identify critical edges and set the bits appropriately. */
93
94 void
95 mark_critical_edges ()
96 {
97 int i, n = n_basic_blocks;
98 basic_block bb;
99
100 /* We begin with the entry block. This is not terribly important now,
101 but could be if a front end (Fortran) implemented alternate entry
102 points. */
103 bb = ENTRY_BLOCK_PTR;
104 i = -1;
105
106 while (1)
107 {
108 edge e;
109
110 /* (1) Critical edges must have a source with multiple successors. */
111 if (bb->succ && bb->succ->succ_next)
112 {
113 for (e = bb->succ; e; e = e->succ_next)
114 {
115 /* (2) Critical edges must have a destination with multiple
116 predecessors. Note that we know there is at least one
117 predecessor -- the edge we followed to get here. */
118 if (e->dest->pred->pred_next)
119 e->flags |= EDGE_CRITICAL;
120 else
121 e->flags &= ~EDGE_CRITICAL;
122 }
123 }
124 else
125 {
126 for (e = bb->succ; e; e = e->succ_next)
127 e->flags &= ~EDGE_CRITICAL;
128 }
129
130 if (++i >= n)
131 break;
132 bb = BASIC_BLOCK (i);
133 }
134 }
135
136 /* Mark the back edges in DFS traversal.
137 Return non-zero if a loop (natural or otherwise) is present.
138 Inspired by Depth_First_Search_PP described in:
139
140 Advanced Compiler Design and Implementation
141 Steven Muchnick
142 Morgan Kaufmann, 1997
143
144 and heavily borrowed from flow_depth_first_order_compute. */
145
146 bool
147 mark_dfs_back_edges ()
148 {
149 edge *stack;
150 int *pre;
151 int *post;
152 int sp;
153 int prenum = 1;
154 int postnum = 1;
155 sbitmap visited;
156 bool found = false;
157
158 /* Allocate the preorder and postorder number arrays. */
159 pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
160 post = (int *) xcalloc (n_basic_blocks, sizeof (int));
161
162 /* Allocate stack for back-tracking up CFG. */
163 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
164 sp = 0;
165
166 /* Allocate bitmap to track nodes that have been visited. */
167 visited = sbitmap_alloc (n_basic_blocks);
168
169 /* None of the nodes in the CFG have been visited yet. */
170 sbitmap_zero (visited);
171
172 /* Push the first edge on to the stack. */
173 stack[sp++] = ENTRY_BLOCK_PTR->succ;
174
175 while (sp)
176 {
177 edge e;
178 basic_block src;
179 basic_block dest;
180
181 /* Look at the edge on the top of the stack. */
182 e = stack[sp - 1];
183 src = e->src;
184 dest = e->dest;
185 e->flags &= ~EDGE_DFS_BACK;
186
187 /* Check if the edge destination has been visited yet. */
188 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
189 {
190 /* Mark that we have visited the destination. */
191 SET_BIT (visited, dest->index);
192
193 pre[dest->index] = prenum++;
194
195 if (dest->succ)
196 {
197 /* Since the DEST node has been visited for the first
198 time, check its successors. */
199 stack[sp++] = dest->succ;
200 }
201 else
202 post[dest->index] = postnum++;
203 }
204 else
205 {
206 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
207 && pre[src->index] >= pre[dest->index]
208 && post[dest->index] == 0)
209 e->flags |= EDGE_DFS_BACK, found = true;
210
211 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
212 post[src->index] = postnum++;
213
214 if (e->succ_next)
215 stack[sp - 1] = e->succ_next;
216 else
217 sp--;
218 }
219 }
220
221 free (pre);
222 free (post);
223 free (stack);
224 sbitmap_free (visited);
225
226 return found;
227 }
228
229 /* Return true if we need to add fake edge to exit.
230 Helper function for the flow_call_edges_add. */
231
232 static bool
233 need_fake_edge_p (insn)
234 rtx insn;
235 {
236 if (!INSN_P (insn))
237 return false;
238
239 if ((GET_CODE (insn) == CALL_INSN
240 && !SIBLING_CALL_P (insn)
241 && !find_reg_note (insn, REG_NORETURN, NULL)
242 && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
243 && !CONST_OR_PURE_CALL_P (insn)))
244 return true;
245
246 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
247 && MEM_VOLATILE_P (PATTERN (insn)))
248 || (GET_CODE (PATTERN (insn)) == PARALLEL
249 && asm_noperands (insn) != -1
250 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
251 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
252 }
253
254 /* Add fake edges to the function exit for any non constant and non noreturn
255 calls, volatile inline assembly in the bitmap of blocks specified by
256 BLOCKS or to the whole CFG if BLOCKS is zero. Return the nuber of blocks
257 that were split.
258
259 The goal is to expose cases in which entering a basic block does not imply
260 that all subsequent instructions must be executed. */
261
262 int
263 flow_call_edges_add (blocks)
264 sbitmap blocks;
265 {
266 int i;
267 int blocks_split = 0;
268 int bb_num = 0;
269 basic_block *bbs;
270 bool check_last_block = false;
271
272 /* Map bb indicies into basic block pointers since split_block
273 will renumber the basic blocks. */
274
275 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
276
277 if (! blocks)
278 {
279 for (i = 0; i < n_basic_blocks; i++)
280 bbs[bb_num++] = BASIC_BLOCK (i);
281 check_last_block = true;
282 }
283 else
284 {
285 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
286 {
287 bbs[bb_num++] = BASIC_BLOCK (i);
288 if (i == n_basic_blocks - 1)
289 check_last_block = true;
290 });
291 }
292
293 /* In the last basic block, before epilogue generation, there will be
294 a fallthru edge to EXIT. Special care is required if the last insn
295 of the last basic block is a call because make_edge folds duplicate
296 edges, which would result in the fallthru edge also being marked
297 fake, which would result in the fallthru edge being removed by
298 remove_fake_edges, which would result in an invalid CFG.
299
300 Moreover, we can't elide the outgoing fake edge, since the block
301 profiler needs to take this into account in order to solve the minimal
302 spanning tree in the case that the call doesn't return.
303
304 Handle this by adding a dummy instruction in a new last basic block. */
305 if (check_last_block
306 && need_fake_edge_p (BASIC_BLOCK (n_basic_blocks - 1)->end))
307 {
308 edge e;
309 for (e = BASIC_BLOCK (n_basic_blocks - 1)->succ; e; e = e->succ_next)
310 if (e->dest == EXIT_BLOCK_PTR)
311 break;
312 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
313 commit_edge_insertions ();
314 }
315
316
317 /* Now add fake edges to the function exit for any non constant
318 calls since there is no way that we can determine if they will
319 return or not... */
320
321 for (i = 0; i < bb_num; i++)
322 {
323 basic_block bb = bbs[i];
324 rtx insn;
325 rtx prev_insn;
326
327 for (insn = bb->end; ; insn = prev_insn)
328 {
329 prev_insn = PREV_INSN (insn);
330 if (need_fake_edge_p (insn))
331 {
332 edge e;
333
334 /* The above condition should be enought to verify that there is
335 no edge to the exit block in CFG already. Calling make_edge in
336 such case would make us to mark that edge as fake and remove it
337 later. */
338 #ifdef ENABLE_CHECKING
339 if (insn == bb->end)
340 for (e = bb->succ; e; e = e->succ_next)
341 if (e->dest == EXIT_BLOCK_PTR)
342 abort ();
343 #endif
344
345 /* Note that the following may create a new basic block
346 and renumber the existing basic blocks. */
347 e = split_block (bb, insn);
348 if (e)
349 blocks_split++;
350
351 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
352 }
353 if (insn == bb->head)
354 break;
355 }
356 }
357
358 if (blocks_split)
359 verify_flow_info ();
360
361 free (bbs);
362 return blocks_split;
363 }
364 /* Find unreachable blocks. An unreachable block will have 0 in
365 the reachable bit in block->flags. A non-zero value indicates the
366 block is reachable. */
367
368 void
369 find_unreachable_blocks ()
370 {
371 edge e;
372 int i, n;
373 basic_block *tos, *worklist;
374
375 n = n_basic_blocks;
376 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
377
378 /* Clear all the reachability flags. */
379
380 for (i = 0; i < n; ++i)
381 BASIC_BLOCK (i)->flags &= ~BB_REACHABLE;
382
383 /* Add our starting points to the worklist. Almost always there will
384 be only one. It isn't inconcievable that we might one day directly
385 support Fortran alternate entry points. */
386
387 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
388 {
389 *tos++ = e->dest;
390
391 /* Mark the block reachable. */
392 e->dest->flags |= BB_REACHABLE;
393 }
394
395 /* Iterate: find everything reachable from what we've already seen. */
396
397 while (tos != worklist)
398 {
399 basic_block b = *--tos;
400
401 for (e = b->succ; e; e = e->succ_next)
402 if (!(e->dest->flags & BB_REACHABLE))
403 {
404 *tos++ = e->dest;
405 e->dest->flags |= BB_REACHABLE;
406 }
407 }
408
409 free (worklist);
410 }
411 \f
412 /* Functions to access an edge list with a vector representation.
413 Enough data is kept such that given an index number, the
414 pred and succ that edge represents can be determined, or
415 given a pred and a succ, its index number can be returned.
416 This allows algorithms which consume a lot of memory to
417 represent the normally full matrix of edge (pred,succ) with a
418 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
419 wasted space in the client code due to sparse flow graphs. */
420
421 /* This functions initializes the edge list. Basically the entire
422 flowgraph is processed, and all edges are assigned a number,
423 and the data structure is filled in. */
424
425 struct edge_list *
426 create_edge_list ()
427 {
428 struct edge_list *elist;
429 edge e;
430 int num_edges;
431 int x;
432 int block_count;
433
434 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
435
436 num_edges = 0;
437
438 /* Determine the number of edges in the flow graph by counting successor
439 edges on each basic block. */
440 for (x = 0; x < n_basic_blocks; x++)
441 {
442 basic_block bb = BASIC_BLOCK (x);
443
444 for (e = bb->succ; e; e = e->succ_next)
445 num_edges++;
446 }
447 /* Don't forget successors of the entry block. */
448 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
449 num_edges++;
450
451 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
452 elist->num_blocks = block_count;
453 elist->num_edges = num_edges;
454 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
455
456 num_edges = 0;
457
458 /* Follow successors of the entry block, and register these edges. */
459 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
460 {
461 elist->index_to_edge[num_edges] = e;
462 num_edges++;
463 }
464
465 for (x = 0; x < n_basic_blocks; x++)
466 {
467 basic_block bb = BASIC_BLOCK (x);
468
469 /* Follow all successors of blocks, and register these edges. */
470 for (e = bb->succ; e; e = e->succ_next)
471 {
472 elist->index_to_edge[num_edges] = e;
473 num_edges++;
474 }
475 }
476 return elist;
477 }
478
479 /* This function free's memory associated with an edge list. */
480
481 void
482 free_edge_list (elist)
483 struct edge_list *elist;
484 {
485 if (elist)
486 {
487 free (elist->index_to_edge);
488 free (elist);
489 }
490 }
491
492 /* This function provides debug output showing an edge list. */
493
494 void
495 print_edge_list (f, elist)
496 FILE *f;
497 struct edge_list *elist;
498 {
499 int x;
500 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
501 elist->num_blocks - 2, elist->num_edges);
502
503 for (x = 0; x < elist->num_edges; x++)
504 {
505 fprintf (f, " %-4d - edge(", x);
506 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
507 fprintf (f, "entry,");
508 else
509 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
510
511 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
512 fprintf (f, "exit)\n");
513 else
514 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
515 }
516 }
517
518 /* This function provides an internal consistency check of an edge list,
519 verifying that all edges are present, and that there are no
520 extra edges. */
521
522 void
523 verify_edge_list (f, elist)
524 FILE *f;
525 struct edge_list *elist;
526 {
527 int x, pred, succ, index;
528 edge e;
529
530 for (x = 0; x < n_basic_blocks; x++)
531 {
532 basic_block bb = BASIC_BLOCK (x);
533
534 for (e = bb->succ; e; e = e->succ_next)
535 {
536 pred = e->src->index;
537 succ = e->dest->index;
538 index = EDGE_INDEX (elist, e->src, e->dest);
539 if (index == EDGE_INDEX_NO_EDGE)
540 {
541 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
542 continue;
543 }
544 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
545 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
546 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
547 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
548 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
549 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
550 }
551 }
552 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
553 {
554 pred = e->src->index;
555 succ = e->dest->index;
556 index = EDGE_INDEX (elist, e->src, e->dest);
557 if (index == EDGE_INDEX_NO_EDGE)
558 {
559 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
560 continue;
561 }
562 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
563 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
564 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
565 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
566 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
567 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
568 }
569 /* We've verified that all the edges are in the list, no lets make sure
570 there are no spurious edges in the list. */
571
572 for (pred = 0; pred < n_basic_blocks; pred++)
573 for (succ = 0; succ < n_basic_blocks; succ++)
574 {
575 basic_block p = BASIC_BLOCK (pred);
576 basic_block s = BASIC_BLOCK (succ);
577
578 int found_edge = 0;
579
580 for (e = p->succ; e; e = e->succ_next)
581 if (e->dest == s)
582 {
583 found_edge = 1;
584 break;
585 }
586 for (e = s->pred; e; e = e->pred_next)
587 if (e->src == p)
588 {
589 found_edge = 1;
590 break;
591 }
592 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
593 == EDGE_INDEX_NO_EDGE && found_edge != 0)
594 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
595 pred, succ);
596 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
597 != EDGE_INDEX_NO_EDGE && found_edge == 0)
598 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
599 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
600 BASIC_BLOCK (succ)));
601 }
602 for (succ = 0; succ < n_basic_blocks; succ++)
603 {
604 basic_block p = ENTRY_BLOCK_PTR;
605 basic_block s = BASIC_BLOCK (succ);
606
607 int found_edge = 0;
608
609 for (e = p->succ; e; e = e->succ_next)
610 if (e->dest == s)
611 {
612 found_edge = 1;
613 break;
614 }
615 for (e = s->pred; e; e = e->pred_next)
616 if (e->src == p)
617 {
618 found_edge = 1;
619 break;
620 }
621 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
622 == EDGE_INDEX_NO_EDGE && found_edge != 0)
623 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
624 succ);
625 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
626 != EDGE_INDEX_NO_EDGE && found_edge == 0)
627 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
628 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
629 BASIC_BLOCK (succ)));
630 }
631 for (pred = 0; pred < n_basic_blocks; pred++)
632 {
633 basic_block p = BASIC_BLOCK (pred);
634 basic_block s = EXIT_BLOCK_PTR;
635
636 int found_edge = 0;
637
638 for (e = p->succ; e; e = e->succ_next)
639 if (e->dest == s)
640 {
641 found_edge = 1;
642 break;
643 }
644 for (e = s->pred; e; e = e->pred_next)
645 if (e->src == p)
646 {
647 found_edge = 1;
648 break;
649 }
650 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
651 == EDGE_INDEX_NO_EDGE && found_edge != 0)
652 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
653 pred);
654 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
655 != EDGE_INDEX_NO_EDGE && found_edge == 0)
656 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
657 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
658 EXIT_BLOCK_PTR));
659 }
660 }
661
662 /* This routine will determine what, if any, edge there is between
663 a specified predecessor and successor. */
664
665 int
666 find_edge_index (edge_list, pred, succ)
667 struct edge_list *edge_list;
668 basic_block pred, succ;
669 {
670 int x;
671 for (x = 0; x < NUM_EDGES (edge_list); x++)
672 {
673 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
674 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
675 return x;
676 }
677 return (EDGE_INDEX_NO_EDGE);
678 }
679
680 /* Dump the list of basic blocks in the bitmap NODES. */
681
682 void
683 flow_nodes_print (str, nodes, file)
684 const char *str;
685 const sbitmap nodes;
686 FILE *file;
687 {
688 int node;
689
690 if (! nodes)
691 return;
692
693 fprintf (file, "%s { ", str);
694 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
695 fputs ("}\n", file);
696 }
697
698 /* Dump the list of edges in the array EDGE_LIST. */
699
700 void
701 flow_edge_list_print (str, edge_list, num_edges, file)
702 const char *str;
703 const edge *edge_list;
704 int num_edges;
705 FILE *file;
706 {
707 int i;
708
709 if (! edge_list)
710 return;
711
712 fprintf (file, "%s { ", str);
713 for (i = 0; i < num_edges; i++)
714 fprintf (file, "%d->%d ", edge_list[i]->src->index,
715 edge_list[i]->dest->index);
716 fputs ("}\n", file);
717 }
718
719 \f
720 /* This routine will remove any fake successor edges for a basic block.
721 When the edge is removed, it is also removed from whatever predecessor
722 list it is in. */
723
724 static void
725 remove_fake_successors (bb)
726 basic_block bb;
727 {
728 edge e;
729 for (e = bb->succ; e;)
730 {
731 edge tmp = e;
732 e = e->succ_next;
733 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
734 remove_edge (tmp);
735 }
736 }
737
738 /* This routine will remove all fake edges from the flow graph. If
739 we remove all fake successors, it will automatically remove all
740 fake predecessors. */
741
742 void
743 remove_fake_edges ()
744 {
745 int x;
746
747 for (x = 0; x < n_basic_blocks; x++)
748 remove_fake_successors (BASIC_BLOCK (x));
749
750 /* We've handled all successors except the entry block's. */
751 remove_fake_successors (ENTRY_BLOCK_PTR);
752 }
753
754 /* This function will add a fake edge between any block which has no
755 successors, and the exit block. Some data flow equations require these
756 edges to exist. */
757
758 void
759 add_noreturn_fake_exit_edges ()
760 {
761 int x;
762
763 for (x = 0; x < n_basic_blocks; x++)
764 if (BASIC_BLOCK (x)->succ == NULL)
765 make_single_succ_edge (BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
766 }
767
768 /* This function adds a fake edge between any infinite loops to the
769 exit block. Some optimizations require a path from each node to
770 the exit node.
771
772 See also Morgan, Figure 3.10, pp. 82-83.
773
774 The current implementation is ugly, not attempting to minimize the
775 number of inserted fake edges. To reduce the number of fake edges
776 to insert, add fake edges from _innermost_ loops containing only
777 nodes not reachable from the exit block. */
778
779 void
780 connect_infinite_loops_to_exit ()
781 {
782 basic_block unvisited_block;
783
784 /* Perform depth-first search in the reverse graph to find nodes
785 reachable from the exit block. */
786 struct depth_first_search_dsS dfs_ds;
787
788 flow_dfs_compute_reverse_init (&dfs_ds);
789 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
790
791 /* Repeatedly add fake edges, updating the unreachable nodes. */
792 while (1)
793 {
794 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
795 if (!unvisited_block)
796 break;
797 make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
798 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
799 }
800
801 flow_dfs_compute_reverse_finish (&dfs_ds);
802
803 return;
804 }
805 \f
806 /* Compute reverse top sort order */
807 void
808 flow_reverse_top_sort_order_compute (rts_order)
809 int *rts_order;
810 {
811 edge *stack;
812 int sp;
813 int postnum = 0;
814 sbitmap visited;
815
816 /* Allocate stack for back-tracking up CFG. */
817 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
818 sp = 0;
819
820 /* Allocate bitmap to track nodes that have been visited. */
821 visited = sbitmap_alloc (n_basic_blocks);
822
823 /* None of the nodes in the CFG have been visited yet. */
824 sbitmap_zero (visited);
825
826 /* Push the first edge on to the stack. */
827 stack[sp++] = ENTRY_BLOCK_PTR->succ;
828
829 while (sp)
830 {
831 edge e;
832 basic_block src;
833 basic_block dest;
834
835 /* Look at the edge on the top of the stack. */
836 e = stack[sp - 1];
837 src = e->src;
838 dest = e->dest;
839
840 /* Check if the edge destination has been visited yet. */
841 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
842 {
843 /* Mark that we have visited the destination. */
844 SET_BIT (visited, dest->index);
845
846 if (dest->succ)
847 {
848 /* Since the DEST node has been visited for the first
849 time, check its successors. */
850 stack[sp++] = dest->succ;
851 }
852 else
853 rts_order[postnum++] = dest->index;
854 }
855 else
856 {
857 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
858 rts_order[postnum++] = src->index;
859
860 if (e->succ_next)
861 stack[sp - 1] = e->succ_next;
862 else
863 sp--;
864 }
865 }
866
867 free (stack);
868 sbitmap_free (visited);
869 }
870
871 /* Compute the depth first search order and store in the array
872 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
873 RC_ORDER is non-zero, return the reverse completion number for each
874 node. Returns the number of nodes visited. A depth first search
875 tries to get as far away from the starting point as quickly as
876 possible. */
877
878 int
879 flow_depth_first_order_compute (dfs_order, rc_order)
880 int *dfs_order;
881 int *rc_order;
882 {
883 edge *stack;
884 int sp;
885 int dfsnum = 0;
886 int rcnum = n_basic_blocks - 1;
887 sbitmap visited;
888
889 /* Allocate stack for back-tracking up CFG. */
890 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
891 sp = 0;
892
893 /* Allocate bitmap to track nodes that have been visited. */
894 visited = sbitmap_alloc (n_basic_blocks);
895
896 /* None of the nodes in the CFG have been visited yet. */
897 sbitmap_zero (visited);
898
899 /* Push the first edge on to the stack. */
900 stack[sp++] = ENTRY_BLOCK_PTR->succ;
901
902 while (sp)
903 {
904 edge e;
905 basic_block src;
906 basic_block dest;
907
908 /* Look at the edge on the top of the stack. */
909 e = stack[sp - 1];
910 src = e->src;
911 dest = e->dest;
912
913 /* Check if the edge destination has been visited yet. */
914 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
915 {
916 /* Mark that we have visited the destination. */
917 SET_BIT (visited, dest->index);
918
919 if (dfs_order)
920 dfs_order[dfsnum++] = dest->index;
921
922 if (dest->succ)
923 {
924 /* Since the DEST node has been visited for the first
925 time, check its successors. */
926 stack[sp++] = dest->succ;
927 }
928 else
929 {
930 /* There are no successors for the DEST node so assign
931 its reverse completion number. */
932 if (rc_order)
933 rc_order[rcnum--] = dest->index;
934 }
935 }
936 else
937 {
938 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
939 {
940 /* There are no more successors for the SRC node
941 so assign its reverse completion number. */
942 if (rc_order)
943 rc_order[rcnum--] = src->index;
944 }
945
946 if (e->succ_next)
947 stack[sp - 1] = e->succ_next;
948 else
949 sp--;
950 }
951 }
952
953 free (stack);
954 sbitmap_free (visited);
955
956 /* The number of nodes visited should not be greater than
957 n_basic_blocks. */
958 if (dfsnum > n_basic_blocks)
959 abort ();
960
961 /* There are some nodes left in the CFG that are unreachable. */
962 if (dfsnum < n_basic_blocks)
963 abort ();
964 return dfsnum;
965 }
966
967 /* Compute the depth first search order on the _reverse_ graph and
968 store in the array DFS_ORDER, marking the nodes visited in VISITED.
969 Returns the number of nodes visited.
970
971 The computation is split into three pieces:
972
973 flow_dfs_compute_reverse_init () creates the necessary data
974 structures.
975
976 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
977 structures. The block will start the search.
978
979 flow_dfs_compute_reverse_execute () continues (or starts) the
980 search using the block on the top of the stack, stopping when the
981 stack is empty.
982
983 flow_dfs_compute_reverse_finish () destroys the necessary data
984 structures.
985
986 Thus, the user will probably call ..._init(), call ..._add_bb() to
987 add a beginning basic block to the stack, call ..._execute(),
988 possibly add another bb to the stack and again call ..._execute(),
989 ..., and finally call _finish(). */
990
991 /* Initialize the data structures used for depth-first search on the
992 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
993 added to the basic block stack. DATA is the current depth-first
994 search context. If INITIALIZE_STACK is non-zero, there is an
995 element on the stack. */
996
997 static void
998 flow_dfs_compute_reverse_init (data)
999 depth_first_search_ds data;
1000 {
1001 /* Allocate stack for back-tracking up CFG. */
1002 data->stack =
1003 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
1004 * sizeof (basic_block));
1005 data->sp = 0;
1006
1007 /* Allocate bitmap to track nodes that have been visited. */
1008 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
1009
1010 /* None of the nodes in the CFG have been visited yet. */
1011 sbitmap_zero (data->visited_blocks);
1012
1013 return;
1014 }
1015
1016 /* Add the specified basic block to the top of the dfs data
1017 structures. When the search continues, it will start at the
1018 block. */
1019
1020 static void
1021 flow_dfs_compute_reverse_add_bb (data, bb)
1022 depth_first_search_ds data;
1023 basic_block bb;
1024 {
1025 data->stack[data->sp++] = bb;
1026 return;
1027 }
1028
1029 /* Continue the depth-first search through the reverse graph starting
1030 with the block at the stack's top and ending when the stack is
1031 empty. Visited nodes are marked. Returns an unvisited basic
1032 block, or NULL if there is none available. */
1033
1034 static basic_block
1035 flow_dfs_compute_reverse_execute (data)
1036 depth_first_search_ds data;
1037 {
1038 basic_block bb;
1039 edge e;
1040 int i;
1041
1042 while (data->sp > 0)
1043 {
1044 bb = data->stack[--data->sp];
1045
1046 /* Mark that we have visited this node. */
1047 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
1048 {
1049 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1050
1051 /* Perform depth-first search on adjacent vertices. */
1052 for (e = bb->pred; e; e = e->pred_next)
1053 flow_dfs_compute_reverse_add_bb (data, e->src);
1054 }
1055 }
1056
1057 /* Determine if there are unvisited basic blocks. */
1058 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
1059 if (!TEST_BIT (data->visited_blocks, i))
1060 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
1061 return NULL;
1062 }
1063
1064 /* Destroy the data structures needed for depth-first search on the
1065 reverse graph. */
1066
1067 static void
1068 flow_dfs_compute_reverse_finish (data)
1069 depth_first_search_ds data;
1070 {
1071 free (data->stack);
1072 sbitmap_free (data->visited_blocks);
1073 return;
1074 }