]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfganal.c
Basic block renumbering removal.
[thirdparty/gcc.git] / gcc / cfganal.c
1 /* Control flow graph analysis code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file contains various simple utilities to analyze the CFG. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "hard-reg-set.h"
27 #include "basic-block.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "toplev.h"
31 #include "obstack.h"
32 #include "tm_p.h"
33
34 /* Store the data structures necessary for depth-first search. */
35 struct depth_first_search_dsS {
36 /* stack for backtracking during the algorithm */
37 basic_block *stack;
38
39 /* number of edges in the stack. That is, positions 0, ..., sp-1
40 have edges. */
41 unsigned int sp;
42
43 /* record of basic blocks already seen by depth-first search */
44 sbitmap visited_blocks;
45 };
46 typedef struct depth_first_search_dsS *depth_first_search_ds;
47
48 static void flow_dfs_compute_reverse_init
49 PARAMS ((depth_first_search_ds));
50 static void flow_dfs_compute_reverse_add_bb
51 PARAMS ((depth_first_search_ds, basic_block));
52 static basic_block flow_dfs_compute_reverse_execute
53 PARAMS ((depth_first_search_ds));
54 static void flow_dfs_compute_reverse_finish
55 PARAMS ((depth_first_search_ds));
56 static void remove_fake_successors PARAMS ((basic_block));
57 static bool need_fake_edge_p PARAMS ((rtx));
58 \f
59 /* Return true if the block has no effect and only forwards control flow to
60 its single destination. */
61
62 bool
63 forwarder_block_p (bb)
64 basic_block bb;
65 {
66 rtx insn;
67
68 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
69 || !bb->succ || bb->succ->succ_next)
70 return false;
71
72 for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
73 if (INSN_P (insn) && active_insn_p (insn))
74 return false;
75
76 return (!INSN_P (insn)
77 || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
78 || !active_insn_p (insn));
79 }
80
81 /* Return nonzero if we can reach target from src by falling through. */
82
83 bool
84 can_fallthru (src, target)
85 basic_block src, target;
86 {
87 rtx insn = src->end;
88 rtx insn2 = target->head;
89
90 if (src->next_bb != target)
91 return 0;
92
93 if (!active_insn_p (insn2))
94 insn2 = next_active_insn (insn2);
95
96 /* ??? Later we may add code to move jump tables offline. */
97 return next_active_insn (insn) == insn2;
98 }
99 \f
100 /* Mark the back edges in DFS traversal.
101 Return non-zero if a loop (natural or otherwise) is present.
102 Inspired by Depth_First_Search_PP described in:
103
104 Advanced Compiler Design and Implementation
105 Steven Muchnick
106 Morgan Kaufmann, 1997
107
108 and heavily borrowed from flow_depth_first_order_compute. */
109
110 bool
111 mark_dfs_back_edges ()
112 {
113 edge *stack;
114 int *pre;
115 int *post;
116 int sp;
117 int prenum = 1;
118 int postnum = 1;
119 sbitmap visited;
120 bool found = false;
121
122 /* Allocate the preorder and postorder number arrays. */
123 pre = (int *) xcalloc (last_basic_block, sizeof (int));
124 post = (int *) xcalloc (last_basic_block, sizeof (int));
125
126 /* Allocate stack for back-tracking up CFG. */
127 stack = (edge *) xmalloc ((num_basic_blocks + 1) * sizeof (edge));
128 sp = 0;
129
130 /* Allocate bitmap to track nodes that have been visited. */
131 visited = sbitmap_alloc (last_basic_block);
132
133 /* None of the nodes in the CFG have been visited yet. */
134 sbitmap_zero (visited);
135
136 /* Push the first edge on to the stack. */
137 stack[sp++] = ENTRY_BLOCK_PTR->succ;
138
139 while (sp)
140 {
141 edge e;
142 basic_block src;
143 basic_block dest;
144
145 /* Look at the edge on the top of the stack. */
146 e = stack[sp - 1];
147 src = e->src;
148 dest = e->dest;
149 e->flags &= ~EDGE_DFS_BACK;
150
151 /* Check if the edge destination has been visited yet. */
152 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->sindex))
153 {
154 /* Mark that we have visited the destination. */
155 SET_BIT (visited, dest->sindex);
156
157 pre[dest->sindex] = prenum++;
158 if (dest->succ)
159 {
160 /* Since the DEST node has been visited for the first
161 time, check its successors. */
162 stack[sp++] = dest->succ;
163 }
164 else
165 post[dest->sindex] = postnum++;
166 }
167 else
168 {
169 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
170 && pre[src->sindex] >= pre[dest->sindex]
171 && post[dest->sindex] == 0)
172 e->flags |= EDGE_DFS_BACK, found = true;
173
174 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
175 post[src->sindex] = postnum++;
176
177 if (e->succ_next)
178 stack[sp - 1] = e->succ_next;
179 else
180 sp--;
181 }
182 }
183
184 free (pre);
185 free (post);
186 free (stack);
187 sbitmap_free (visited);
188
189 return found;
190 }
191
192 /* Set the flag EDGE_CAN_FALLTHRU for edges that can be fallthru. */
193
194 void
195 set_edge_can_fallthru_flag ()
196 {
197 basic_block bb;
198
199 FOR_ALL_BB (bb)
200 {
201 edge e;
202
203 /* The FALLTHRU edge is also CAN_FALLTHRU edge. */
204 for (e = bb->succ; e; e = e->succ_next)
205 if (e->flags & EDGE_FALLTHRU)
206 e->flags |= EDGE_CAN_FALLTHRU;
207
208 /* If the BB ends with an invertable condjump all (2) edges are
209 CAN_FALLTHRU edges. */
210 if (!bb->succ || !bb->succ->succ_next || bb->succ->succ_next->succ_next)
211 continue;
212 if (!any_condjump_p (bb->end))
213 continue;
214 if (!invert_jump (bb->end, JUMP_LABEL (bb->end), 0))
215 continue;
216 invert_jump (bb->end, JUMP_LABEL (bb->end), 0);
217 bb->succ->flags |= EDGE_CAN_FALLTHRU;
218 bb->succ->succ_next->flags |= EDGE_CAN_FALLTHRU;
219 }
220 }
221
222 /* Return true if we need to add fake edge to exit.
223 Helper function for the flow_call_edges_add. */
224
225 static bool
226 need_fake_edge_p (insn)
227 rtx insn;
228 {
229 if (!INSN_P (insn))
230 return false;
231
232 if ((GET_CODE (insn) == CALL_INSN
233 && !SIBLING_CALL_P (insn)
234 && !find_reg_note (insn, REG_NORETURN, NULL)
235 && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
236 && !CONST_OR_PURE_CALL_P (insn)))
237 return true;
238
239 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
240 && MEM_VOLATILE_P (PATTERN (insn)))
241 || (GET_CODE (PATTERN (insn)) == PARALLEL
242 && asm_noperands (insn) != -1
243 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
244 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
245 }
246
247 /* Add fake edges to the function exit for any non constant and non noreturn
248 calls, volatile inline assembly in the bitmap of blocks specified by
249 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
250 that were split.
251
252 The goal is to expose cases in which entering a basic block does not imply
253 that all subsequent instructions must be executed. */
254
255 int
256 flow_call_edges_add (blocks)
257 sbitmap blocks;
258 {
259 int i;
260 int blocks_split = 0;
261 int last_bb = last_basic_block;
262 bool check_last_block = false;
263
264 if (num_basic_blocks == 0)
265 return 0;
266
267 if (! blocks)
268 check_last_block = true;
269 else
270 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->sindex);
271
272 /* In the last basic block, before epilogue generation, there will be
273 a fallthru edge to EXIT. Special care is required if the last insn
274 of the last basic block is a call because make_edge folds duplicate
275 edges, which would result in the fallthru edge also being marked
276 fake, which would result in the fallthru edge being removed by
277 remove_fake_edges, which would result in an invalid CFG.
278
279 Moreover, we can't elide the outgoing fake edge, since the block
280 profiler needs to take this into account in order to solve the minimal
281 spanning tree in the case that the call doesn't return.
282
283 Handle this by adding a dummy instruction in a new last basic block. */
284 if (check_last_block)
285 {
286 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
287 rtx insn = bb->end;
288
289 /* Back up past insns that must be kept in the same block as a call. */
290 while (insn != bb->head
291 && keep_with_call_p (insn))
292 insn = PREV_INSN (insn);
293
294 if (need_fake_edge_p (insn))
295 {
296 edge e;
297
298 for (e = bb->succ; e; e = e->succ_next)
299 if (e->dest == EXIT_BLOCK_PTR)
300 break;
301
302 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
303 commit_edge_insertions ();
304 }
305 }
306
307 /* Now add fake edges to the function exit for any non constant
308 calls since there is no way that we can determine if they will
309 return or not... */
310
311 for (i = 0; i < last_bb; i++)
312 {
313 basic_block bb = BASIC_BLOCK (i);
314 rtx insn;
315 rtx prev_insn;
316
317 if (!bb)
318 continue;
319
320 if (blocks && !TEST_BIT (blocks, i))
321 continue;
322
323 for (insn = bb->end; ; insn = prev_insn)
324 {
325 prev_insn = PREV_INSN (insn);
326 if (need_fake_edge_p (insn))
327 {
328 edge e;
329 rtx split_at_insn = insn;
330
331 /* Don't split the block between a call and an insn that should
332 remain in the same block as the call. */
333 if (GET_CODE (insn) == CALL_INSN)
334 while (split_at_insn != bb->end
335 && keep_with_call_p (NEXT_INSN (split_at_insn)))
336 split_at_insn = NEXT_INSN (split_at_insn);
337
338 /* The handling above of the final block before the epilogue
339 should be enough to verify that there is no edge to the exit
340 block in CFG already. Calling make_edge in such case would
341 cause us to mark that edge as fake and remove it later. */
342
343 #ifdef ENABLE_CHECKING
344 if (split_at_insn == bb->end)
345 for (e = bb->succ; e; e = e->succ_next)
346 if (e->dest == EXIT_BLOCK_PTR)
347 abort ();
348 #endif
349
350 /* Note that the following may create a new basic block
351 and renumber the existing basic blocks. */
352 if (split_at_insn != bb->end)
353 {
354 e = split_block (bb, split_at_insn);
355 if (e)
356 blocks_split++;
357 }
358
359 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
360 }
361
362 if (insn == bb->head)
363 break;
364 }
365 }
366
367 if (blocks_split)
368 verify_flow_info ();
369
370 return blocks_split;
371 }
372
373 /* Find unreachable blocks. An unreachable block will have 0 in
374 the reachable bit in block->flags. A non-zero value indicates the
375 block is reachable. */
376
377 void
378 find_unreachable_blocks ()
379 {
380 edge e;
381 basic_block *tos, *worklist, bb;
382
383 tos = worklist =
384 (basic_block *) xmalloc (sizeof (basic_block) * num_basic_blocks);
385
386 /* Clear all the reachability flags. */
387
388 FOR_ALL_BB (bb)
389 bb->flags &= ~BB_REACHABLE;
390
391 /* Add our starting points to the worklist. Almost always there will
392 be only one. It isn't inconceivable that we might one day directly
393 support Fortran alternate entry points. */
394
395 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
396 {
397 *tos++ = e->dest;
398
399 /* Mark the block reachable. */
400 e->dest->flags |= BB_REACHABLE;
401 }
402
403 /* Iterate: find everything reachable from what we've already seen. */
404
405 while (tos != worklist)
406 {
407 basic_block b = *--tos;
408
409 for (e = b->succ; e; e = e->succ_next)
410 if (!(e->dest->flags & BB_REACHABLE))
411 {
412 *tos++ = e->dest;
413 e->dest->flags |= BB_REACHABLE;
414 }
415 }
416
417 free (worklist);
418 }
419 \f
420 /* Functions to access an edge list with a vector representation.
421 Enough data is kept such that given an index number, the
422 pred and succ that edge represents can be determined, or
423 given a pred and a succ, its index number can be returned.
424 This allows algorithms which consume a lot of memory to
425 represent the normally full matrix of edge (pred,succ) with a
426 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
427 wasted space in the client code due to sparse flow graphs. */
428
429 /* This functions initializes the edge list. Basically the entire
430 flowgraph is processed, and all edges are assigned a number,
431 and the data structure is filled in. */
432
433 struct edge_list *
434 create_edge_list ()
435 {
436 struct edge_list *elist;
437 edge e;
438 int num_edges;
439 int block_count;
440 basic_block bb;
441
442 block_count = num_basic_blocks + 2; /* Include the entry and exit blocks. */
443
444 num_edges = 0;
445
446 /* Determine the number of edges in the flow graph by counting successor
447 edges on each basic block. */
448 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
449 {
450
451 for (e = bb->succ; e; e = e->succ_next)
452 num_edges++;
453 }
454
455 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
456 elist->num_blocks = block_count;
457 elist->num_edges = num_edges;
458 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
459
460 num_edges = 0;
461
462 /* Follow successors of blocks, and register these edges. */
463 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
464 for (e = bb->succ; e; e = e->succ_next)
465 elist->index_to_edge[num_edges++] = e;
466
467 return elist;
468 }
469
470 /* This function free's memory associated with an edge list. */
471
472 void
473 free_edge_list (elist)
474 struct edge_list *elist;
475 {
476 if (elist)
477 {
478 free (elist->index_to_edge);
479 free (elist);
480 }
481 }
482
483 /* This function provides debug output showing an edge list. */
484
485 void
486 print_edge_list (f, elist)
487 FILE *f;
488 struct edge_list *elist;
489 {
490 int x;
491
492 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
493 elist->num_blocks - 2, elist->num_edges);
494
495 for (x = 0; x < elist->num_edges; x++)
496 {
497 fprintf (f, " %-4d - edge(", x);
498 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
499 fprintf (f, "entry,");
500 else
501 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->sindex);
502
503 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
504 fprintf (f, "exit)\n");
505 else
506 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->sindex);
507 }
508 }
509
510 /* This function provides an internal consistency check of an edge list,
511 verifying that all edges are present, and that there are no
512 extra edges. */
513
514 void
515 verify_edge_list (f, elist)
516 FILE *f;
517 struct edge_list *elist;
518 {
519 int index, pred, succ;
520 edge e;
521 basic_block bb, p, s;
522
523 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
524 {
525 for (e = bb->succ; e; e = e->succ_next)
526 {
527 pred = e->src->sindex;
528 succ = e->dest->sindex;
529 index = EDGE_INDEX (elist, e->src, e->dest);
530 if (index == EDGE_INDEX_NO_EDGE)
531 {
532 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
533 continue;
534 }
535
536 if (INDEX_EDGE_PRED_BB (elist, index)->sindex != pred)
537 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
538 index, pred, INDEX_EDGE_PRED_BB (elist, index)->sindex);
539 if (INDEX_EDGE_SUCC_BB (elist, index)->sindex != succ)
540 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
541 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->sindex);
542 }
543 }
544
545 /* We've verified that all the edges are in the list, no lets make sure
546 there are no spurious edges in the list. */
547
548 FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
549 FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
550 {
551 int found_edge = 0;
552
553 for (e = p->succ; e; e = e->succ_next)
554 if (e->dest == s)
555 {
556 found_edge = 1;
557 break;
558 }
559
560 for (e = s->pred; e; e = e->pred_next)
561 if (e->src == p)
562 {
563 found_edge = 1;
564 break;
565 }
566
567 if (EDGE_INDEX (elist, p, s)
568 == EDGE_INDEX_NO_EDGE && found_edge != 0)
569 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
570 p->sindex, s->sindex);
571 if (EDGE_INDEX (elist, p, s)
572 != EDGE_INDEX_NO_EDGE && found_edge == 0)
573 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
574 p->sindex, s->sindex, EDGE_INDEX (elist, p, s));
575 }
576
577 }
578
579 /* This routine will determine what, if any, edge there is between
580 a specified predecessor and successor. */
581
582 int
583 find_edge_index (edge_list, pred, succ)
584 struct edge_list *edge_list;
585 basic_block pred, succ;
586 {
587 int x;
588
589 for (x = 0; x < NUM_EDGES (edge_list); x++)
590 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
591 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
592 return x;
593
594 return (EDGE_INDEX_NO_EDGE);
595 }
596
597 /* Dump the list of basic blocks in the bitmap NODES. */
598
599 void
600 flow_nodes_print (str, nodes, file)
601 const char *str;
602 const sbitmap nodes;
603 FILE *file;
604 {
605 int node;
606
607 if (! nodes)
608 return;
609
610 fprintf (file, "%s { ", str);
611 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
612 fputs ("}\n", file);
613 }
614
615 /* Dump the list of edges in the array EDGE_LIST. */
616
617 void
618 flow_edge_list_print (str, edge_list, num_edges, file)
619 const char *str;
620 const edge *edge_list;
621 int num_edges;
622 FILE *file;
623 {
624 int i;
625
626 if (! edge_list)
627 return;
628
629 fprintf (file, "%s { ", str);
630 for (i = 0; i < num_edges; i++)
631 fprintf (file, "%d->%d ", edge_list[i]->src->sindex,
632 edge_list[i]->dest->sindex);
633
634 fputs ("}\n", file);
635 }
636
637 \f
638 /* This routine will remove any fake successor edges for a basic block.
639 When the edge is removed, it is also removed from whatever predecessor
640 list it is in. */
641
642 static void
643 remove_fake_successors (bb)
644 basic_block bb;
645 {
646 edge e;
647
648 for (e = bb->succ; e;)
649 {
650 edge tmp = e;
651
652 e = e->succ_next;
653 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
654 remove_edge (tmp);
655 }
656 }
657
658 /* This routine will remove all fake edges from the flow graph. If
659 we remove all fake successors, it will automatically remove all
660 fake predecessors. */
661
662 void
663 remove_fake_edges ()
664 {
665 basic_block bb;
666
667 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
668 remove_fake_successors (bb);
669 }
670
671 /* This function will add a fake edge between any block which has no
672 successors, and the exit block. Some data flow equations require these
673 edges to exist. */
674
675 void
676 add_noreturn_fake_exit_edges ()
677 {
678 basic_block bb;
679
680 FOR_ALL_BB (bb)
681 if (bb->succ == NULL)
682 make_single_succ_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
683 }
684
685 /* This function adds a fake edge between any infinite loops to the
686 exit block. Some optimizations require a path from each node to
687 the exit node.
688
689 See also Morgan, Figure 3.10, pp. 82-83.
690
691 The current implementation is ugly, not attempting to minimize the
692 number of inserted fake edges. To reduce the number of fake edges
693 to insert, add fake edges from _innermost_ loops containing only
694 nodes not reachable from the exit block. */
695
696 void
697 connect_infinite_loops_to_exit ()
698 {
699 basic_block unvisited_block;
700 struct depth_first_search_dsS dfs_ds;
701
702 /* Perform depth-first search in the reverse graph to find nodes
703 reachable from the exit block. */
704 flow_dfs_compute_reverse_init (&dfs_ds);
705 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
706
707 /* Repeatedly add fake edges, updating the unreachable nodes. */
708 while (1)
709 {
710 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
711 if (!unvisited_block)
712 break;
713
714 make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
715 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
716 }
717
718 flow_dfs_compute_reverse_finish (&dfs_ds);
719 return;
720 }
721 \f
722 /* Compute reverse top sort order */
723
724 void
725 flow_reverse_top_sort_order_compute (rts_order)
726 int *rts_order;
727 {
728 edge *stack;
729 int sp;
730 int postnum = 0;
731 sbitmap visited;
732
733 /* Allocate stack for back-tracking up CFG. */
734 stack = (edge *) xmalloc ((num_basic_blocks + 1) * sizeof (edge));
735 sp = 0;
736
737 /* Allocate bitmap to track nodes that have been visited. */
738 visited = sbitmap_alloc (last_basic_block);
739
740 /* None of the nodes in the CFG have been visited yet. */
741 sbitmap_zero (visited);
742
743 /* Push the first edge on to the stack. */
744 stack[sp++] = ENTRY_BLOCK_PTR->succ;
745
746 while (sp)
747 {
748 edge e;
749 basic_block src;
750 basic_block dest;
751
752 /* Look at the edge on the top of the stack. */
753 e = stack[sp - 1];
754 src = e->src;
755 dest = e->dest;
756
757 /* Check if the edge destination has been visited yet. */
758 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->sindex))
759 {
760 /* Mark that we have visited the destination. */
761 SET_BIT (visited, dest->sindex);
762
763 if (dest->succ)
764 /* Since the DEST node has been visited for the first
765 time, check its successors. */
766 stack[sp++] = dest->succ;
767 else
768 rts_order[postnum++] = dest->sindex;
769 }
770 else
771 {
772 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
773 rts_order[postnum++] = src->sindex;
774
775 if (e->succ_next)
776 stack[sp - 1] = e->succ_next;
777 else
778 sp--;
779 }
780 }
781
782 free (stack);
783 sbitmap_free (visited);
784 }
785
786 /* Compute the depth first search order and store in the array
787 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
788 RC_ORDER is non-zero, return the reverse completion number for each
789 node. Returns the number of nodes visited. A depth first search
790 tries to get as far away from the starting point as quickly as
791 possible. */
792
793 int
794 flow_depth_first_order_compute (dfs_order, rc_order)
795 int *dfs_order;
796 int *rc_order;
797 {
798 edge *stack;
799 int sp;
800 int dfsnum = 0;
801 int rcnum = num_basic_blocks - 1;
802 sbitmap visited;
803
804 /* Allocate stack for back-tracking up CFG. */
805 stack = (edge *) xmalloc ((num_basic_blocks + 1) * sizeof (edge));
806 sp = 0;
807
808 /* Allocate bitmap to track nodes that have been visited. */
809 visited = sbitmap_alloc (last_basic_block);
810
811 /* None of the nodes in the CFG have been visited yet. */
812 sbitmap_zero (visited);
813
814 /* Push the first edge on to the stack. */
815 stack[sp++] = ENTRY_BLOCK_PTR->succ;
816
817 while (sp)
818 {
819 edge e;
820 basic_block src;
821 basic_block dest;
822
823 /* Look at the edge on the top of the stack. */
824 e = stack[sp - 1];
825 src = e->src;
826 dest = e->dest;
827
828 /* Check if the edge destination has been visited yet. */
829 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->sindex))
830 {
831 /* Mark that we have visited the destination. */
832 SET_BIT (visited, dest->sindex);
833
834 if (dfs_order)
835 dfs_order[dfsnum] = dest->sindex;
836
837 dfsnum++;
838
839 if (dest->succ)
840 /* Since the DEST node has been visited for the first
841 time, check its successors. */
842 stack[sp++] = dest->succ;
843 else if (rc_order)
844 /* There are no successors for the DEST node so assign
845 its reverse completion number. */
846 rc_order[rcnum--] = dest->sindex;
847 }
848 else
849 {
850 if (! e->succ_next && src != ENTRY_BLOCK_PTR
851 && rc_order)
852 /* There are no more successors for the SRC node
853 so assign its reverse completion number. */
854 rc_order[rcnum--] = src->sindex;
855
856 if (e->succ_next)
857 stack[sp - 1] = e->succ_next;
858 else
859 sp--;
860 }
861 }
862
863 free (stack);
864 sbitmap_free (visited);
865
866 /* The number of nodes visited should not be greater than
867 num_basic_blocks. */
868 if (dfsnum > num_basic_blocks)
869 abort ();
870
871 /* There are some nodes left in the CFG that are unreachable. */
872 if (dfsnum < num_basic_blocks)
873 abort ();
874
875 return dfsnum;
876 }
877
878 struct dfst_node
879 {
880 unsigned nnodes;
881 struct dfst_node **node;
882 struct dfst_node *up;
883 };
884
885 /* Compute a preorder transversal ordering such that a sub-tree which
886 is the source of a cross edge appears before the sub-tree which is
887 the destination of the cross edge. This allows for easy detection
888 of all the entry blocks for a loop.
889
890 The ordering is compute by:
891
892 1) Generating a depth first spanning tree.
893
894 2) Walking the resulting tree from right to left. */
895
896 void
897 flow_preorder_transversal_compute (pot_order)
898 int *pot_order;
899 {
900 edge e;
901 edge *stack;
902 int i;
903 int max_successors;
904 int sp;
905 sbitmap visited;
906 struct dfst_node *node;
907 struct dfst_node *dfst;
908 basic_block bb;
909
910 /* Allocate stack for back-tracking up CFG. */
911 stack = (edge *) xmalloc ((num_basic_blocks + 1) * sizeof (edge));
912 sp = 0;
913
914 /* Allocate the tree. */
915 dfst = (struct dfst_node *) xcalloc (last_basic_block,
916 sizeof (struct dfst_node));
917
918 FOR_ALL_BB (bb)
919 {
920 max_successors = 0;
921 for (e = bb->succ; e; e = e->succ_next)
922 max_successors++;
923
924 if (max_successors)
925 dfst[bb->sindex].node
926 = (struct dfst_node **) xcalloc (max_successors,
927 sizeof (struct dfst_node *));
928 }
929
930 /* Allocate bitmap to track nodes that have been visited. */
931 visited = sbitmap_alloc (last_basic_block);
932
933 /* None of the nodes in the CFG have been visited yet. */
934 sbitmap_zero (visited);
935
936 /* Push the first edge on to the stack. */
937 stack[sp++] = ENTRY_BLOCK_PTR->succ;
938
939 while (sp)
940 {
941 basic_block src;
942 basic_block dest;
943
944 /* Look at the edge on the top of the stack. */
945 e = stack[sp - 1];
946 src = e->src;
947 dest = e->dest;
948
949 /* Check if the edge destination has been visited yet. */
950 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->sindex))
951 {
952 /* Mark that we have visited the destination. */
953 SET_BIT (visited, dest->sindex);
954
955 /* Add the destination to the preorder tree. */
956 if (src != ENTRY_BLOCK_PTR)
957 {
958 dfst[src->sindex].node[dfst[src->sindex].nnodes++]
959 = &dfst[dest->sindex];
960 dfst[dest->sindex].up = &dfst[src->sindex];
961 }
962
963 if (dest->succ)
964 /* Since the DEST node has been visited for the first
965 time, check its successors. */
966 stack[sp++] = dest->succ;
967 }
968
969 else if (e->succ_next)
970 stack[sp - 1] = e->succ_next;
971 else
972 sp--;
973 }
974
975 free (stack);
976 sbitmap_free (visited);
977
978 /* Record the preorder transversal order by
979 walking the tree from right to left. */
980
981 i = 0;
982 node = &dfst[ENTRY_BLOCK_PTR->next_bb->sindex];
983 pot_order[i++] = 0;
984
985 while (node)
986 {
987 if (node->nnodes)
988 {
989 node = node->node[--node->nnodes];
990 pot_order[i++] = node - dfst;
991 }
992 else
993 node = node->up;
994 }
995
996 /* Free the tree. */
997
998 for (i = 0; i < last_basic_block; i++)
999 if (dfst[i].node)
1000 free (dfst[i].node);
1001
1002 free (dfst);
1003 }
1004
1005 /* Compute the depth first search order on the _reverse_ graph and
1006 store in the array DFS_ORDER, marking the nodes visited in VISITED.
1007 Returns the number of nodes visited.
1008
1009 The computation is split into three pieces:
1010
1011 flow_dfs_compute_reverse_init () creates the necessary data
1012 structures.
1013
1014 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1015 structures. The block will start the search.
1016
1017 flow_dfs_compute_reverse_execute () continues (or starts) the
1018 search using the block on the top of the stack, stopping when the
1019 stack is empty.
1020
1021 flow_dfs_compute_reverse_finish () destroys the necessary data
1022 structures.
1023
1024 Thus, the user will probably call ..._init(), call ..._add_bb() to
1025 add a beginning basic block to the stack, call ..._execute(),
1026 possibly add another bb to the stack and again call ..._execute(),
1027 ..., and finally call _finish(). */
1028
1029 /* Initialize the data structures used for depth-first search on the
1030 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1031 added to the basic block stack. DATA is the current depth-first
1032 search context. If INITIALIZE_STACK is non-zero, there is an
1033 element on the stack. */
1034
1035 static void
1036 flow_dfs_compute_reverse_init (data)
1037 depth_first_search_ds data;
1038 {
1039 /* Allocate stack for back-tracking up CFG. */
1040 data->stack = (basic_block *) xmalloc ((num_basic_blocks - (INVALID_BLOCK + 1))
1041 * sizeof (basic_block));
1042 data->sp = 0;
1043
1044 /* Allocate bitmap to track nodes that have been visited. */
1045 data->visited_blocks = sbitmap_alloc (last_basic_block - (INVALID_BLOCK + 1));
1046
1047 /* None of the nodes in the CFG have been visited yet. */
1048 sbitmap_zero (data->visited_blocks);
1049
1050 return;
1051 }
1052
1053 /* Add the specified basic block to the top of the dfs data
1054 structures. When the search continues, it will start at the
1055 block. */
1056
1057 static void
1058 flow_dfs_compute_reverse_add_bb (data, bb)
1059 depth_first_search_ds data;
1060 basic_block bb;
1061 {
1062 data->stack[data->sp++] = bb;
1063 SET_BIT (data->visited_blocks, bb->sindex - (INVALID_BLOCK + 1));
1064 }
1065
1066 /* Continue the depth-first search through the reverse graph starting with the
1067 block at the stack's top and ending when the stack is empty. Visited nodes
1068 are marked. Returns an unvisited basic block, or NULL if there is none
1069 available. */
1070
1071 static basic_block
1072 flow_dfs_compute_reverse_execute (data)
1073 depth_first_search_ds data;
1074 {
1075 basic_block bb;
1076 edge e;
1077
1078 while (data->sp > 0)
1079 {
1080 bb = data->stack[--data->sp];
1081
1082 /* Perform depth-first search on adjacent vertices. */
1083 for (e = bb->pred; e; e = e->pred_next)
1084 if (!TEST_BIT (data->visited_blocks,
1085 e->src->sindex - (INVALID_BLOCK + 1)))
1086 flow_dfs_compute_reverse_add_bb (data, e->src);
1087 }
1088
1089 /* Determine if there are unvisited basic blocks. */
1090 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1091 if (!TEST_BIT (data->visited_blocks, bb->sindex - (INVALID_BLOCK + 1)))
1092 return bb;
1093
1094 return NULL;
1095 }
1096
1097 /* Destroy the data structures needed for depth-first search on the
1098 reverse graph. */
1099
1100 static void
1101 flow_dfs_compute_reverse_finish (data)
1102 depth_first_search_ds data;
1103 {
1104 free (data->stack);
1105 sbitmap_free (data->visited_blocks);
1106 }