]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfganal.c
i386.md (testsi to testqi spliters): New.
[thirdparty/gcc.git] / gcc / cfganal.c
1 /* Control flow graph analysis code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file contains various simple utilities to analyze the CFG. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "hard-reg-set.h"
27 #include "basic-block.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "toplev.h"
31 #include "obstack.h"
32 #include "tm_p.h"
33
34 /* Store the data structures necessary for depth-first search. */
35 struct depth_first_search_dsS {
36 /* stack for backtracking during the algorithm */
37 basic_block *stack;
38
39 /* number of edges in the stack. That is, positions 0, ..., sp-1
40 have edges. */
41 unsigned int sp;
42
43 /* record of basic blocks already seen by depth-first search */
44 sbitmap visited_blocks;
45 };
46 typedef struct depth_first_search_dsS *depth_first_search_ds;
47
48 static void flow_dfs_compute_reverse_init
49 PARAMS ((depth_first_search_ds));
50 static void flow_dfs_compute_reverse_add_bb
51 PARAMS ((depth_first_search_ds, basic_block));
52 static basic_block flow_dfs_compute_reverse_execute
53 PARAMS ((depth_first_search_ds));
54 static void flow_dfs_compute_reverse_finish
55 PARAMS ((depth_first_search_ds));
56 static void remove_fake_successors PARAMS ((basic_block));
57 static bool need_fake_edge_p PARAMS ((rtx));
58 \f
59 /* Return true if the block has no effect and only forwards control flow to
60 its single destination. */
61
62 bool
63 forwarder_block_p (bb)
64 basic_block bb;
65 {
66 rtx insn;
67
68 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
69 || !bb->succ || bb->succ->succ_next)
70 return false;
71
72 for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
73 if (INSN_P (insn) && active_insn_p (insn))
74 return false;
75
76 return (!INSN_P (insn)
77 || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
78 || !active_insn_p (insn));
79 }
80
81 /* Return nonzero if we can reach target from src by falling through. */
82
83 bool
84 can_fallthru (src, target)
85 basic_block src, target;
86 {
87 rtx insn = src->end;
88 rtx insn2 = target->head;
89
90 if (src->index + 1 != target->index)
91 return 0;
92
93 if (!active_insn_p (insn2))
94 insn2 = next_active_insn (insn2);
95
96 /* ??? Later we may add code to move jump tables offline. */
97 return next_active_insn (insn) == insn2;
98 }
99 \f
100 /* Mark the back edges in DFS traversal.
101 Return non-zero if a loop (natural or otherwise) is present.
102 Inspired by Depth_First_Search_PP described in:
103
104 Advanced Compiler Design and Implementation
105 Steven Muchnick
106 Morgan Kaufmann, 1997
107
108 and heavily borrowed from flow_depth_first_order_compute. */
109
110 bool
111 mark_dfs_back_edges ()
112 {
113 edge *stack;
114 int *pre;
115 int *post;
116 int sp;
117 int prenum = 1;
118 int postnum = 1;
119 sbitmap visited;
120 bool found = false;
121
122 /* Allocate the preorder and postorder number arrays. */
123 pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
124 post = (int *) xcalloc (n_basic_blocks, sizeof (int));
125
126 /* Allocate stack for back-tracking up CFG. */
127 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
128 sp = 0;
129
130 /* Allocate bitmap to track nodes that have been visited. */
131 visited = sbitmap_alloc (n_basic_blocks);
132
133 /* None of the nodes in the CFG have been visited yet. */
134 sbitmap_zero (visited);
135
136 /* Push the first edge on to the stack. */
137 stack[sp++] = ENTRY_BLOCK_PTR->succ;
138
139 while (sp)
140 {
141 edge e;
142 basic_block src;
143 basic_block dest;
144
145 /* Look at the edge on the top of the stack. */
146 e = stack[sp - 1];
147 src = e->src;
148 dest = e->dest;
149 e->flags &= ~EDGE_DFS_BACK;
150
151 /* Check if the edge destination has been visited yet. */
152 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
153 {
154 /* Mark that we have visited the destination. */
155 SET_BIT (visited, dest->index);
156
157 pre[dest->index] = prenum++;
158 if (dest->succ)
159 {
160 /* Since the DEST node has been visited for the first
161 time, check its successors. */
162 stack[sp++] = dest->succ;
163 }
164 else
165 post[dest->index] = postnum++;
166 }
167 else
168 {
169 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
170 && pre[src->index] >= pre[dest->index]
171 && post[dest->index] == 0)
172 e->flags |= EDGE_DFS_BACK, found = true;
173
174 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
175 post[src->index] = postnum++;
176
177 if (e->succ_next)
178 stack[sp - 1] = e->succ_next;
179 else
180 sp--;
181 }
182 }
183
184 free (pre);
185 free (post);
186 free (stack);
187 sbitmap_free (visited);
188
189 return found;
190 }
191
192 /* Set the flag EDGE_CAN_FALLTHRU for edges that can be fallthru. */
193
194 void
195 set_edge_can_fallthru_flag ()
196 {
197 int i;
198 for (i = 0; i < n_basic_blocks; i++)
199 {
200 basic_block bb = BASIC_BLOCK (i);
201 edge e;
202
203 /* The FALLTHRU edge is also CAN_FALLTHRU edge. */
204 for (e = bb->succ; e; e = e->succ_next)
205 if (e->flags & EDGE_FALLTHRU)
206 e->flags |= EDGE_CAN_FALLTHRU;
207
208 /* If the BB ends with an invertable condjump all (2) edges are
209 CAN_FALLTHRU edges. */
210 if (!bb->succ || !bb->succ->succ_next || bb->succ->succ_next->succ_next)
211 continue;
212 if (!any_condjump_p (bb->end))
213 continue;
214 if (!invert_jump (bb->end, JUMP_LABEL (bb->end), 0))
215 continue;
216 invert_jump (bb->end, JUMP_LABEL (bb->end), 0);
217 bb->succ->flags |= EDGE_CAN_FALLTHRU;
218 bb->succ->succ_next->flags |= EDGE_CAN_FALLTHRU;
219 }
220 }
221
222 /* Return true if we need to add fake edge to exit.
223 Helper function for the flow_call_edges_add. */
224
225 static bool
226 need_fake_edge_p (insn)
227 rtx insn;
228 {
229 if (!INSN_P (insn))
230 return false;
231
232 if ((GET_CODE (insn) == CALL_INSN
233 && !SIBLING_CALL_P (insn)
234 && !find_reg_note (insn, REG_NORETURN, NULL)
235 && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
236 && !CONST_OR_PURE_CALL_P (insn)))
237 return true;
238
239 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
240 && MEM_VOLATILE_P (PATTERN (insn)))
241 || (GET_CODE (PATTERN (insn)) == PARALLEL
242 && asm_noperands (insn) != -1
243 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
244 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
245 }
246
247 /* Add fake edges to the function exit for any non constant and non noreturn
248 calls, volatile inline assembly in the bitmap of blocks specified by
249 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
250 that were split.
251
252 The goal is to expose cases in which entering a basic block does not imply
253 that all subsequent instructions must be executed. */
254
255 int
256 flow_call_edges_add (blocks)
257 sbitmap blocks;
258 {
259 int i;
260 int blocks_split = 0;
261 int bb_num = 0;
262 basic_block *bbs;
263 bool check_last_block = false;
264
265 /* Map bb indices into basic block pointers since split_block
266 will renumber the basic blocks. */
267
268 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
269
270 if (! blocks)
271 {
272 for (i = 0; i < n_basic_blocks; i++)
273 bbs[bb_num++] = BASIC_BLOCK (i);
274
275 check_last_block = true;
276 }
277 else
278 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
279 {
280 bbs[bb_num++] = BASIC_BLOCK (i);
281 if (i == n_basic_blocks - 1)
282 check_last_block = true;
283 });
284
285 /* In the last basic block, before epilogue generation, there will be
286 a fallthru edge to EXIT. Special care is required if the last insn
287 of the last basic block is a call because make_edge folds duplicate
288 edges, which would result in the fallthru edge also being marked
289 fake, which would result in the fallthru edge being removed by
290 remove_fake_edges, which would result in an invalid CFG.
291
292 Moreover, we can't elide the outgoing fake edge, since the block
293 profiler needs to take this into account in order to solve the minimal
294 spanning tree in the case that the call doesn't return.
295
296 Handle this by adding a dummy instruction in a new last basic block. */
297 if (check_last_block)
298 {
299 basic_block bb = BASIC_BLOCK (n_basic_blocks - 1);
300 rtx insn = bb->end;
301
302 /* Back up past insns that must be kept in the same block as a call. */
303 while (insn != bb->head
304 && keep_with_call_p (insn))
305 insn = PREV_INSN (insn);
306
307 if (need_fake_edge_p (insn))
308 {
309 edge e;
310
311 for (e = bb->succ; e; e = e->succ_next)
312 if (e->dest == EXIT_BLOCK_PTR)
313 break;
314
315 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
316 commit_edge_insertions ();
317 }
318 }
319
320 /* Now add fake edges to the function exit for any non constant
321 calls since there is no way that we can determine if they will
322 return or not... */
323
324 for (i = 0; i < bb_num; i++)
325 {
326 basic_block bb = bbs[i];
327 rtx insn;
328 rtx prev_insn;
329
330 for (insn = bb->end; ; insn = prev_insn)
331 {
332 prev_insn = PREV_INSN (insn);
333 if (need_fake_edge_p (insn))
334 {
335 edge e;
336 rtx split_at_insn = insn;
337
338 /* Don't split the block between a call and an insn that should
339 remain in the same block as the call. */
340 if (GET_CODE (insn) == CALL_INSN)
341 while (split_at_insn != bb->end
342 && keep_with_call_p (NEXT_INSN (split_at_insn)))
343 split_at_insn = NEXT_INSN (split_at_insn);
344
345 /* The handling above of the final block before the epilogue
346 should be enough to verify that there is no edge to the exit
347 block in CFG already. Calling make_edge in such case would
348 cause us to mark that edge as fake and remove it later. */
349
350 #ifdef ENABLE_CHECKING
351 if (split_at_insn == bb->end)
352 for (e = bb->succ; e; e = e->succ_next)
353 if (e->dest == EXIT_BLOCK_PTR)
354 abort ();
355 #endif
356
357 /* Note that the following may create a new basic block
358 and renumber the existing basic blocks. */
359 if (split_at_insn != bb->end)
360 {
361 e = split_block (bb, split_at_insn);
362 if (e)
363 blocks_split++;
364 }
365
366 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
367 }
368
369 if (insn == bb->head)
370 break;
371 }
372 }
373
374 if (blocks_split)
375 verify_flow_info ();
376
377 free (bbs);
378 return blocks_split;
379 }
380
381 /* Find unreachable blocks. An unreachable block will have 0 in
382 the reachable bit in block->flags. A non-zero value indicates the
383 block is reachable. */
384
385 void
386 find_unreachable_blocks ()
387 {
388 edge e;
389 int i, n;
390 basic_block *tos, *worklist;
391
392 n = n_basic_blocks;
393 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
394
395 /* Clear all the reachability flags. */
396
397 for (i = 0; i < n; ++i)
398 BASIC_BLOCK (i)->flags &= ~BB_REACHABLE;
399
400 /* Add our starting points to the worklist. Almost always there will
401 be only one. It isn't inconceivable that we might one day directly
402 support Fortran alternate entry points. */
403
404 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
405 {
406 *tos++ = e->dest;
407
408 /* Mark the block reachable. */
409 e->dest->flags |= BB_REACHABLE;
410 }
411
412 /* Iterate: find everything reachable from what we've already seen. */
413
414 while (tos != worklist)
415 {
416 basic_block b = *--tos;
417
418 for (e = b->succ; e; e = e->succ_next)
419 if (!(e->dest->flags & BB_REACHABLE))
420 {
421 *tos++ = e->dest;
422 e->dest->flags |= BB_REACHABLE;
423 }
424 }
425
426 free (worklist);
427 }
428 \f
429 /* Functions to access an edge list with a vector representation.
430 Enough data is kept such that given an index number, the
431 pred and succ that edge represents can be determined, or
432 given a pred and a succ, its index number can be returned.
433 This allows algorithms which consume a lot of memory to
434 represent the normally full matrix of edge (pred,succ) with a
435 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
436 wasted space in the client code due to sparse flow graphs. */
437
438 /* This functions initializes the edge list. Basically the entire
439 flowgraph is processed, and all edges are assigned a number,
440 and the data structure is filled in. */
441
442 struct edge_list *
443 create_edge_list ()
444 {
445 struct edge_list *elist;
446 edge e;
447 int num_edges;
448 int x;
449 int block_count;
450
451 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
452
453 num_edges = 0;
454
455 /* Determine the number of edges in the flow graph by counting successor
456 edges on each basic block. */
457 for (x = 0; x < n_basic_blocks; x++)
458 {
459 basic_block bb = BASIC_BLOCK (x);
460
461 for (e = bb->succ; e; e = e->succ_next)
462 num_edges++;
463 }
464
465 /* Don't forget successors of the entry block. */
466 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
467 num_edges++;
468
469 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
470 elist->num_blocks = block_count;
471 elist->num_edges = num_edges;
472 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
473
474 num_edges = 0;
475
476 /* Follow successors of the entry block, and register these edges. */
477 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
478 elist->index_to_edge[num_edges++] = e;
479
480 for (x = 0; x < n_basic_blocks; x++)
481 {
482 basic_block bb = BASIC_BLOCK (x);
483
484 /* Follow all successors of blocks, and register these edges. */
485 for (e = bb->succ; e; e = e->succ_next)
486 elist->index_to_edge[num_edges++] = e;
487 }
488
489 return elist;
490 }
491
492 /* This function free's memory associated with an edge list. */
493
494 void
495 free_edge_list (elist)
496 struct edge_list *elist;
497 {
498 if (elist)
499 {
500 free (elist->index_to_edge);
501 free (elist);
502 }
503 }
504
505 /* This function provides debug output showing an edge list. */
506
507 void
508 print_edge_list (f, elist)
509 FILE *f;
510 struct edge_list *elist;
511 {
512 int x;
513
514 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
515 elist->num_blocks - 2, elist->num_edges);
516
517 for (x = 0; x < elist->num_edges; x++)
518 {
519 fprintf (f, " %-4d - edge(", x);
520 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
521 fprintf (f, "entry,");
522 else
523 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
524
525 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
526 fprintf (f, "exit)\n");
527 else
528 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
529 }
530 }
531
532 /* This function provides an internal consistency check of an edge list,
533 verifying that all edges are present, and that there are no
534 extra edges. */
535
536 void
537 verify_edge_list (f, elist)
538 FILE *f;
539 struct edge_list *elist;
540 {
541 int x, pred, succ, index;
542 edge e;
543
544 for (x = 0; x < n_basic_blocks; x++)
545 {
546 basic_block bb = BASIC_BLOCK (x);
547
548 for (e = bb->succ; e; e = e->succ_next)
549 {
550 pred = e->src->index;
551 succ = e->dest->index;
552 index = EDGE_INDEX (elist, e->src, e->dest);
553 if (index == EDGE_INDEX_NO_EDGE)
554 {
555 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
556 continue;
557 }
558
559 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
560 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
561 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
562 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
563 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
564 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
565 }
566 }
567
568 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
569 {
570 pred = e->src->index;
571 succ = e->dest->index;
572 index = EDGE_INDEX (elist, e->src, e->dest);
573 if (index == EDGE_INDEX_NO_EDGE)
574 {
575 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
576 continue;
577 }
578
579 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
580 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
581 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
582 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
583 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
584 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
585 }
586
587 /* We've verified that all the edges are in the list, no lets make sure
588 there are no spurious edges in the list. */
589
590 for (pred = 0; pred < n_basic_blocks; pred++)
591 for (succ = 0; succ < n_basic_blocks; succ++)
592 {
593 basic_block p = BASIC_BLOCK (pred);
594 basic_block s = BASIC_BLOCK (succ);
595 int found_edge = 0;
596
597 for (e = p->succ; e; e = e->succ_next)
598 if (e->dest == s)
599 {
600 found_edge = 1;
601 break;
602 }
603
604 for (e = s->pred; e; e = e->pred_next)
605 if (e->src == p)
606 {
607 found_edge = 1;
608 break;
609 }
610
611 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
612 == EDGE_INDEX_NO_EDGE && found_edge != 0)
613 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
614 pred, succ);
615 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
616 != EDGE_INDEX_NO_EDGE && found_edge == 0)
617 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
618 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
619 BASIC_BLOCK (succ)));
620 }
621
622 for (succ = 0; succ < n_basic_blocks; succ++)
623 {
624 basic_block p = ENTRY_BLOCK_PTR;
625 basic_block s = BASIC_BLOCK (succ);
626 int found_edge = 0;
627
628 for (e = p->succ; e; e = e->succ_next)
629 if (e->dest == s)
630 {
631 found_edge = 1;
632 break;
633 }
634
635 for (e = s->pred; e; e = e->pred_next)
636 if (e->src == p)
637 {
638 found_edge = 1;
639 break;
640 }
641
642 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
643 == EDGE_INDEX_NO_EDGE && found_edge != 0)
644 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
645 succ);
646 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
647 != EDGE_INDEX_NO_EDGE && found_edge == 0)
648 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
649 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
650 BASIC_BLOCK (succ)));
651 }
652
653 for (pred = 0; pred < n_basic_blocks; pred++)
654 {
655 basic_block p = BASIC_BLOCK (pred);
656 basic_block s = EXIT_BLOCK_PTR;
657 int found_edge = 0;
658
659 for (e = p->succ; e; e = e->succ_next)
660 if (e->dest == s)
661 {
662 found_edge = 1;
663 break;
664 }
665
666 for (e = s->pred; e; e = e->pred_next)
667 if (e->src == p)
668 {
669 found_edge = 1;
670 break;
671 }
672
673 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
674 == EDGE_INDEX_NO_EDGE && found_edge != 0)
675 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
676 pred);
677 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
678 != EDGE_INDEX_NO_EDGE && found_edge == 0)
679 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
680 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
681 EXIT_BLOCK_PTR));
682 }
683 }
684
685 /* This routine will determine what, if any, edge there is between
686 a specified predecessor and successor. */
687
688 int
689 find_edge_index (edge_list, pred, succ)
690 struct edge_list *edge_list;
691 basic_block pred, succ;
692 {
693 int x;
694
695 for (x = 0; x < NUM_EDGES (edge_list); x++)
696 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
697 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
698 return x;
699
700 return (EDGE_INDEX_NO_EDGE);
701 }
702
703 /* Dump the list of basic blocks in the bitmap NODES. */
704
705 void
706 flow_nodes_print (str, nodes, file)
707 const char *str;
708 const sbitmap nodes;
709 FILE *file;
710 {
711 int node;
712
713 if (! nodes)
714 return;
715
716 fprintf (file, "%s { ", str);
717 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
718 fputs ("}\n", file);
719 }
720
721 /* Dump the list of edges in the array EDGE_LIST. */
722
723 void
724 flow_edge_list_print (str, edge_list, num_edges, file)
725 const char *str;
726 const edge *edge_list;
727 int num_edges;
728 FILE *file;
729 {
730 int i;
731
732 if (! edge_list)
733 return;
734
735 fprintf (file, "%s { ", str);
736 for (i = 0; i < num_edges; i++)
737 fprintf (file, "%d->%d ", edge_list[i]->src->index,
738 edge_list[i]->dest->index);
739
740 fputs ("}\n", file);
741 }
742
743 \f
744 /* This routine will remove any fake successor edges for a basic block.
745 When the edge is removed, it is also removed from whatever predecessor
746 list it is in. */
747
748 static void
749 remove_fake_successors (bb)
750 basic_block bb;
751 {
752 edge e;
753
754 for (e = bb->succ; e;)
755 {
756 edge tmp = e;
757
758 e = e->succ_next;
759 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
760 remove_edge (tmp);
761 }
762 }
763
764 /* This routine will remove all fake edges from the flow graph. If
765 we remove all fake successors, it will automatically remove all
766 fake predecessors. */
767
768 void
769 remove_fake_edges ()
770 {
771 int x;
772
773 for (x = 0; x < n_basic_blocks; x++)
774 remove_fake_successors (BASIC_BLOCK (x));
775
776 /* We've handled all successors except the entry block's. */
777 remove_fake_successors (ENTRY_BLOCK_PTR);
778 }
779
780 /* This function will add a fake edge between any block which has no
781 successors, and the exit block. Some data flow equations require these
782 edges to exist. */
783
784 void
785 add_noreturn_fake_exit_edges ()
786 {
787 int x;
788
789 for (x = 0; x < n_basic_blocks; x++)
790 if (BASIC_BLOCK (x)->succ == NULL)
791 make_single_succ_edge (BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
792 }
793
794 /* This function adds a fake edge between any infinite loops to the
795 exit block. Some optimizations require a path from each node to
796 the exit node.
797
798 See also Morgan, Figure 3.10, pp. 82-83.
799
800 The current implementation is ugly, not attempting to minimize the
801 number of inserted fake edges. To reduce the number of fake edges
802 to insert, add fake edges from _innermost_ loops containing only
803 nodes not reachable from the exit block. */
804
805 void
806 connect_infinite_loops_to_exit ()
807 {
808 basic_block unvisited_block;
809 struct depth_first_search_dsS dfs_ds;
810
811 /* Perform depth-first search in the reverse graph to find nodes
812 reachable from the exit block. */
813 flow_dfs_compute_reverse_init (&dfs_ds);
814 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
815
816 /* Repeatedly add fake edges, updating the unreachable nodes. */
817 while (1)
818 {
819 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
820 if (!unvisited_block)
821 break;
822
823 make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
824 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
825 }
826
827 flow_dfs_compute_reverse_finish (&dfs_ds);
828 return;
829 }
830 \f
831 /* Compute reverse top sort order */
832
833 void
834 flow_reverse_top_sort_order_compute (rts_order)
835 int *rts_order;
836 {
837 edge *stack;
838 int sp;
839 int postnum = 0;
840 sbitmap visited;
841
842 /* Allocate stack for back-tracking up CFG. */
843 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
844 sp = 0;
845
846 /* Allocate bitmap to track nodes that have been visited. */
847 visited = sbitmap_alloc (n_basic_blocks);
848
849 /* None of the nodes in the CFG have been visited yet. */
850 sbitmap_zero (visited);
851
852 /* Push the first edge on to the stack. */
853 stack[sp++] = ENTRY_BLOCK_PTR->succ;
854
855 while (sp)
856 {
857 edge e;
858 basic_block src;
859 basic_block dest;
860
861 /* Look at the edge on the top of the stack. */
862 e = stack[sp - 1];
863 src = e->src;
864 dest = e->dest;
865
866 /* Check if the edge destination has been visited yet. */
867 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
868 {
869 /* Mark that we have visited the destination. */
870 SET_BIT (visited, dest->index);
871
872 if (dest->succ)
873 /* Since the DEST node has been visited for the first
874 time, check its successors. */
875 stack[sp++] = dest->succ;
876 else
877 rts_order[postnum++] = dest->index;
878 }
879 else
880 {
881 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
882 rts_order[postnum++] = src->index;
883
884 if (e->succ_next)
885 stack[sp - 1] = e->succ_next;
886 else
887 sp--;
888 }
889 }
890
891 free (stack);
892 sbitmap_free (visited);
893 }
894
895 /* Compute the depth first search order and store in the array
896 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
897 RC_ORDER is non-zero, return the reverse completion number for each
898 node. Returns the number of nodes visited. A depth first search
899 tries to get as far away from the starting point as quickly as
900 possible. */
901
902 int
903 flow_depth_first_order_compute (dfs_order, rc_order)
904 int *dfs_order;
905 int *rc_order;
906 {
907 edge *stack;
908 int sp;
909 int dfsnum = 0;
910 int rcnum = n_basic_blocks - 1;
911 sbitmap visited;
912
913 /* Allocate stack for back-tracking up CFG. */
914 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
915 sp = 0;
916
917 /* Allocate bitmap to track nodes that have been visited. */
918 visited = sbitmap_alloc (n_basic_blocks);
919
920 /* None of the nodes in the CFG have been visited yet. */
921 sbitmap_zero (visited);
922
923 /* Push the first edge on to the stack. */
924 stack[sp++] = ENTRY_BLOCK_PTR->succ;
925
926 while (sp)
927 {
928 edge e;
929 basic_block src;
930 basic_block dest;
931
932 /* Look at the edge on the top of the stack. */
933 e = stack[sp - 1];
934 src = e->src;
935 dest = e->dest;
936
937 /* Check if the edge destination has been visited yet. */
938 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
939 {
940 /* Mark that we have visited the destination. */
941 SET_BIT (visited, dest->index);
942
943 if (dfs_order)
944 dfs_order[dfsnum] = dest->index;
945
946 dfsnum++;
947
948 if (dest->succ)
949 /* Since the DEST node has been visited for the first
950 time, check its successors. */
951 stack[sp++] = dest->succ;
952 else if (rc_order)
953 /* There are no successors for the DEST node so assign
954 its reverse completion number. */
955 rc_order[rcnum--] = dest->index;
956 }
957 else
958 {
959 if (! e->succ_next && src != ENTRY_BLOCK_PTR
960 && rc_order)
961 /* There are no more successors for the SRC node
962 so assign its reverse completion number. */
963 rc_order[rcnum--] = src->index;
964
965 if (e->succ_next)
966 stack[sp - 1] = e->succ_next;
967 else
968 sp--;
969 }
970 }
971
972 free (stack);
973 sbitmap_free (visited);
974
975 /* The number of nodes visited should not be greater than
976 n_basic_blocks. */
977 if (dfsnum > n_basic_blocks)
978 abort ();
979
980 /* There are some nodes left in the CFG that are unreachable. */
981 if (dfsnum < n_basic_blocks)
982 abort ();
983
984 return dfsnum;
985 }
986
987 struct dfst_node
988 {
989 unsigned nnodes;
990 struct dfst_node **node;
991 struct dfst_node *up;
992 };
993
994 /* Compute a preorder transversal ordering such that a sub-tree which
995 is the source of a cross edge appears before the sub-tree which is
996 the destination of the cross edge. This allows for easy detection
997 of all the entry blocks for a loop.
998
999 The ordering is compute by:
1000
1001 1) Generating a depth first spanning tree.
1002
1003 2) Walking the resulting tree from right to left. */
1004
1005 void
1006 flow_preorder_transversal_compute (pot_order)
1007 int *pot_order;
1008 {
1009 edge e;
1010 edge *stack;
1011 int i;
1012 int max_successors;
1013 int sp;
1014 sbitmap visited;
1015 struct dfst_node *node;
1016 struct dfst_node *dfst;
1017
1018 /* Allocate stack for back-tracking up CFG. */
1019 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
1020 sp = 0;
1021
1022 /* Allocate the tree. */
1023 dfst = (struct dfst_node *) xcalloc (n_basic_blocks,
1024 sizeof (struct dfst_node));
1025
1026 for (i = 0; i < n_basic_blocks; i++)
1027 {
1028 max_successors = 0;
1029 for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
1030 max_successors++;
1031
1032 dfst[i].node
1033 = (max_successors
1034 ? (struct dfst_node **) xcalloc (max_successors,
1035 sizeof (struct dfst_node *))
1036 : NULL);
1037 }
1038
1039 /* Allocate bitmap to track nodes that have been visited. */
1040 visited = sbitmap_alloc (n_basic_blocks);
1041
1042 /* None of the nodes in the CFG have been visited yet. */
1043 sbitmap_zero (visited);
1044
1045 /* Push the first edge on to the stack. */
1046 stack[sp++] = ENTRY_BLOCK_PTR->succ;
1047
1048 while (sp)
1049 {
1050 basic_block src;
1051 basic_block dest;
1052
1053 /* Look at the edge on the top of the stack. */
1054 e = stack[sp - 1];
1055 src = e->src;
1056 dest = e->dest;
1057
1058 /* Check if the edge destination has been visited yet. */
1059 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
1060 {
1061 /* Mark that we have visited the destination. */
1062 SET_BIT (visited, dest->index);
1063
1064 /* Add the destination to the preorder tree. */
1065 if (src != ENTRY_BLOCK_PTR)
1066 {
1067 dfst[src->index].node[dfst[src->index].nnodes++]
1068 = &dfst[dest->index];
1069 dfst[dest->index].up = &dfst[src->index];
1070 }
1071
1072 if (dest->succ)
1073 /* Since the DEST node has been visited for the first
1074 time, check its successors. */
1075 stack[sp++] = dest->succ;
1076 }
1077
1078 else if (e->succ_next)
1079 stack[sp - 1] = e->succ_next;
1080 else
1081 sp--;
1082 }
1083
1084 free (stack);
1085 sbitmap_free (visited);
1086
1087 /* Record the preorder transversal order by
1088 walking the tree from right to left. */
1089
1090 i = 0;
1091 node = &dfst[0];
1092 pot_order[i++] = 0;
1093
1094 while (node)
1095 {
1096 if (node->nnodes)
1097 {
1098 node = node->node[--node->nnodes];
1099 pot_order[i++] = node - dfst;
1100 }
1101 else
1102 node = node->up;
1103 }
1104
1105 /* Free the tree. */
1106
1107 for (i = 0; i < n_basic_blocks; i++)
1108 if (dfst[i].node)
1109 free (dfst[i].node);
1110
1111 free (dfst);
1112 }
1113
1114 /* Compute the depth first search order on the _reverse_ graph and
1115 store in the array DFS_ORDER, marking the nodes visited in VISITED.
1116 Returns the number of nodes visited.
1117
1118 The computation is split into three pieces:
1119
1120 flow_dfs_compute_reverse_init () creates the necessary data
1121 structures.
1122
1123 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1124 structures. The block will start the search.
1125
1126 flow_dfs_compute_reverse_execute () continues (or starts) the
1127 search using the block on the top of the stack, stopping when the
1128 stack is empty.
1129
1130 flow_dfs_compute_reverse_finish () destroys the necessary data
1131 structures.
1132
1133 Thus, the user will probably call ..._init(), call ..._add_bb() to
1134 add a beginning basic block to the stack, call ..._execute(),
1135 possibly add another bb to the stack and again call ..._execute(),
1136 ..., and finally call _finish(). */
1137
1138 /* Initialize the data structures used for depth-first search on the
1139 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1140 added to the basic block stack. DATA is the current depth-first
1141 search context. If INITIALIZE_STACK is non-zero, there is an
1142 element on the stack. */
1143
1144 static void
1145 flow_dfs_compute_reverse_init (data)
1146 depth_first_search_ds data;
1147 {
1148 /* Allocate stack for back-tracking up CFG. */
1149 data->stack = (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
1150 * sizeof (basic_block));
1151 data->sp = 0;
1152
1153 /* Allocate bitmap to track nodes that have been visited. */
1154 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
1155
1156 /* None of the nodes in the CFG have been visited yet. */
1157 sbitmap_zero (data->visited_blocks);
1158
1159 return;
1160 }
1161
1162 /* Add the specified basic block to the top of the dfs data
1163 structures. When the search continues, it will start at the
1164 block. */
1165
1166 static void
1167 flow_dfs_compute_reverse_add_bb (data, bb)
1168 depth_first_search_ds data;
1169 basic_block bb;
1170 {
1171 data->stack[data->sp++] = bb;
1172 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1173 }
1174
1175 /* Continue the depth-first search through the reverse graph starting with the
1176 block at the stack's top and ending when the stack is empty. Visited nodes
1177 are marked. Returns an unvisited basic block, or NULL if there is none
1178 available. */
1179
1180 static basic_block
1181 flow_dfs_compute_reverse_execute (data)
1182 depth_first_search_ds data;
1183 {
1184 basic_block bb;
1185 edge e;
1186 int i;
1187
1188 while (data->sp > 0)
1189 {
1190 bb = data->stack[--data->sp];
1191
1192 /* Perform depth-first search on adjacent vertices. */
1193 for (e = bb->pred; e; e = e->pred_next)
1194 if (!TEST_BIT (data->visited_blocks,
1195 e->src->index - (INVALID_BLOCK + 1)))
1196 flow_dfs_compute_reverse_add_bb (data, e->src);
1197 }
1198
1199 /* Determine if there are unvisited basic blocks. */
1200 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0; )
1201 if (!TEST_BIT (data->visited_blocks, i))
1202 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
1203
1204 return NULL;
1205 }
1206
1207 /* Destroy the data structures needed for depth-first search on the
1208 reverse graph. */
1209
1210 static void
1211 flow_dfs_compute_reverse_finish (data)
1212 depth_first_search_ds data;
1213 {
1214 free (data->stack);
1215 sbitmap_free (data->visited_blocks);
1216 }