]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfgrtl.c
c8ad0984e00aa36b9cb64da9ff578129a0ef36b7
[thirdparty/gcc.git] / gcc / cfgrtl.c
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
24
25 Available functionality:
26 - CFG-aware instruction chain manipulation
27 delete_insn, delete_insn_chain
28 - Basic block manipulation
29 create_basic_block, flow_delete_block, split_block,
30 merge_blocks_nomove
31 - Infrastructure to determine quickly basic block for insn
32 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
33 - Edge redirection with updating and optimizing of insn chain
34 block_label, redirect_edge_and_branch,
35 redirect_edge_and_branch_force, tidy_fallthru_edge, force_nonfallthru
36 - Edge splitting and commiting to edges
37 split_edge, insert_insn_on_edge, commit_edge_insertions
38 - Dumping and debugging
39 print_rtl_with_bb, dump_bb, debug_bb, debug_bb_n
40 - Consistency checking
41 verify_flow_info
42 - CFG updating after constant propagation
43 purge_dead_edges, purge_all_dead_edges */
44 \f
45 #include "config.h"
46 #include "system.h"
47 #include "tree.h"
48 #include "rtl.h"
49 #include "hard-reg-set.h"
50 #include "basic-block.h"
51 #include "regs.h"
52 #include "flags.h"
53 #include "output.h"
54 #include "function.h"
55 #include "except.h"
56 #include "toplev.h"
57 #include "tm_p.h"
58 #include "obstack.h"
59 #include "insn-config.h"
60
61 /* Stubs in case we don't have a return insn. */
62 #ifndef HAVE_return
63 #define HAVE_return 0
64 #define gen_return() NULL_RTX
65 #endif
66
67 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
68 /* ??? Should probably be using LABEL_NUSES instead. It would take a
69 bit of surgery to be able to use or co-opt the routines in jump. */
70 rtx label_value_list;
71 rtx tail_recursion_label_list;
72
73 static int can_delete_note_p PARAMS ((rtx));
74 static int can_delete_label_p PARAMS ((rtx));
75 static void commit_one_edge_insertion PARAMS ((edge, int));
76 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
77 static rtx last_loop_beg_note PARAMS ((rtx));
78 static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
79 static basic_block force_nonfallthru_and_redirect PARAMS ((edge, basic_block));
80 \f
81 /* Return true if NOTE is not one of the ones that must be kept paired,
82 so that we may simply delete it. */
83
84 static int
85 can_delete_note_p (note)
86 rtx note;
87 {
88 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
89 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK
90 || NOTE_LINE_NUMBER (note) == NOTE_INSN_PREDICTION);
91 }
92
93 /* True if a given label can be deleted. */
94
95 static int
96 can_delete_label_p (label)
97 rtx label;
98 {
99 return (!LABEL_PRESERVE_P (label)
100 /* User declared labels must be preserved. */
101 && LABEL_NAME (label) == 0
102 && !in_expr_list_p (forced_labels, label)
103 && !in_expr_list_p (label_value_list, label));
104 }
105
106 /* Delete INSN by patching it out. Return the next insn. */
107
108 rtx
109 delete_insn (insn)
110 rtx insn;
111 {
112 rtx next = NEXT_INSN (insn);
113 rtx note;
114 bool really_delete = true;
115
116 if (GET_CODE (insn) == CODE_LABEL)
117 {
118 /* Some labels can't be directly removed from the INSN chain, as they
119 might be references via variables, constant pool etc.
120 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
121 if (! can_delete_label_p (insn))
122 {
123 const char *name = LABEL_NAME (insn);
124
125 really_delete = false;
126 PUT_CODE (insn, NOTE);
127 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
128 NOTE_SOURCE_FILE (insn) = name;
129 }
130
131 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
132 }
133
134 if (really_delete)
135 {
136 /* If this insn has already been deleted, something is very wrong. */
137 if (INSN_DELETED_P (insn))
138 abort ();
139 remove_insn (insn);
140 INSN_DELETED_P (insn) = 1;
141 }
142
143 /* If deleting a jump, decrement the use count of the label. Deleting
144 the label itself should happen in the normal course of block merging. */
145 if (GET_CODE (insn) == JUMP_INSN
146 && JUMP_LABEL (insn)
147 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
148 LABEL_NUSES (JUMP_LABEL (insn))--;
149
150 /* Also if deleting an insn that references a label. */
151 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
152 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
153 LABEL_NUSES (XEXP (note, 0))--;
154
155 if (GET_CODE (insn) == JUMP_INSN
156 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
157 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
158 {
159 rtx pat = PATTERN (insn);
160 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
161 int len = XVECLEN (pat, diff_vec_p);
162 int i;
163
164 for (i = 0; i < len; i++)
165 {
166 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
167
168 /* When deleting code in bulk (e.g. removing many unreachable
169 blocks) we can delete a label that's a target of the vector
170 before deleting the vector itself. */
171 if (GET_CODE (label) != NOTE)
172 LABEL_NUSES (label)--;
173 }
174 }
175
176 return next;
177 }
178
179 /* Like delete_insn but also purge dead edges from BB. */
180 rtx
181 delete_insn_and_edges (insn)
182 rtx insn;
183 {
184 rtx x;
185 bool purge = false;
186
187 if (INSN_P (insn)
188 && BLOCK_FOR_INSN (insn)
189 && BLOCK_FOR_INSN (insn)->end == insn)
190 purge = true;
191 x = delete_insn (insn);
192 if (purge)
193 purge_dead_edges (BLOCK_FOR_INSN (insn));
194 return x;
195 }
196
197 /* Unlink a chain of insns between START and FINISH, leaving notes
198 that must be paired. */
199
200 void
201 delete_insn_chain (start, finish)
202 rtx start, finish;
203 {
204 rtx next;
205
206 /* Unchain the insns one by one. It would be quicker to delete all of these
207 with a single unchaining, rather than one at a time, but we need to keep
208 the NOTE's. */
209 while (1)
210 {
211 next = NEXT_INSN (start);
212 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
213 ;
214 else
215 next = delete_insn (start);
216
217 if (start == finish)
218 break;
219 start = next;
220 }
221 }
222
223 /* Like delete_insn but also purge dead edges from BB. */
224 void
225 delete_insn_chain_and_edges (first, last)
226 rtx first, last;
227 {
228 bool purge = false;
229
230 if (INSN_P (last)
231 && BLOCK_FOR_INSN (last)
232 && BLOCK_FOR_INSN (last)->end == last)
233 purge = true;
234 delete_insn_chain (first, last);
235 if (purge)
236 purge_dead_edges (BLOCK_FOR_INSN (last));
237 }
238 \f
239 /* Create a new basic block consisting of the instructions between HEAD and END
240 inclusive. This function is designed to allow fast BB construction - reuses
241 the note and basic block struct in BB_NOTE, if any and do not grow
242 BASIC_BLOCK chain and should be used directly only by CFG construction code.
243 END can be NULL in to create new empty basic block before HEAD. Both END
244 and HEAD can be NULL to create basic block at the end of INSN chain.
245 AFTER is the basic block we should be put after. */
246
247 basic_block
248 create_basic_block_structure (head, end, bb_note, after)
249 rtx head, end, bb_note;
250 basic_block after;
251 {
252 basic_block bb;
253
254 if (bb_note
255 && ! RTX_INTEGRATED_P (bb_note)
256 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
257 && bb->aux == NULL)
258 {
259 /* If we found an existing note, thread it back onto the chain. */
260
261 rtx after;
262
263 if (GET_CODE (head) == CODE_LABEL)
264 after = head;
265 else
266 {
267 after = PREV_INSN (head);
268 head = bb_note;
269 }
270
271 if (after != bb_note && NEXT_INSN (after) != bb_note)
272 reorder_insns_nobb (bb_note, bb_note, after);
273 }
274 else
275 {
276 /* Otherwise we must create a note and a basic block structure. */
277
278 bb = alloc_block ();
279
280 if (!head && !end)
281 head = end = bb_note
282 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
283 else if (GET_CODE (head) == CODE_LABEL && end)
284 {
285 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
286 if (head == end)
287 end = bb_note;
288 }
289 else
290 {
291 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
292 head = bb_note;
293 if (!end)
294 end = head;
295 }
296
297 NOTE_BASIC_BLOCK (bb_note) = bb;
298 }
299
300 /* Always include the bb note in the block. */
301 if (NEXT_INSN (end) == bb_note)
302 end = bb_note;
303
304 bb->head = head;
305 bb->end = end;
306 bb->index = last_basic_block++;
307 bb->flags = BB_NEW;
308 link_block (bb, after);
309 BASIC_BLOCK (bb->index) = bb;
310 update_bb_for_insn (bb);
311
312 /* Tag the block so that we know it has been used when considering
313 other basic block notes. */
314 bb->aux = bb;
315
316 return bb;
317 }
318
319 /* Create new basic block consisting of instructions in between HEAD and END
320 and place it to the BB chain after block AFTER. END can be NULL in to
321 create new empty basic block before HEAD. Both END and HEAD can be NULL to
322 create basic block at the end of INSN chain. */
323
324 basic_block
325 create_basic_block (head, end, after)
326 rtx head, end;
327 basic_block after;
328 {
329 basic_block bb;
330
331 /* Place the new block just after the end. */
332 VARRAY_GROW (basic_block_info, last_basic_block+1);
333
334 n_basic_blocks++;
335
336 bb = create_basic_block_structure (head, end, NULL, after);
337 bb->aux = NULL;
338 return bb;
339 }
340 \f
341 /* Delete the insns in a (non-live) block. We physically delete every
342 non-deleted-note insn, and update the flow graph appropriately.
343
344 Return nonzero if we deleted an exception handler. */
345
346 /* ??? Preserving all such notes strikes me as wrong. It would be nice
347 to post-process the stream to remove empty blocks, loops, ranges, etc. */
348
349 int
350 flow_delete_block_noexpunge (b)
351 basic_block b;
352 {
353 int deleted_handler = 0;
354 rtx insn, end, tmp;
355
356 /* If the head of this block is a CODE_LABEL, then it might be the
357 label for an exception handler which can't be reached.
358
359 We need to remove the label from the exception_handler_label list
360 and remove the associated NOTE_INSN_EH_REGION_BEG and
361 NOTE_INSN_EH_REGION_END notes. */
362
363 /* Get rid of all NOTE_INSN_PREDICTIONs and NOTE_INSN_LOOP_CONTs
364 hanging before the block. */
365
366 for (insn = PREV_INSN (b->head); insn; insn = PREV_INSN (insn))
367 {
368 if (GET_CODE (insn) != NOTE)
369 break;
370 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION
371 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
372 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
373 }
374
375 insn = b->head;
376
377 never_reached_warning (insn, b->end);
378
379 if (GET_CODE (insn) == CODE_LABEL)
380 maybe_remove_eh_handler (insn);
381
382 /* Include any jump table following the basic block. */
383 end = b->end;
384 if (GET_CODE (end) == JUMP_INSN
385 && (tmp = JUMP_LABEL (end)) != NULL_RTX
386 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
387 && GET_CODE (tmp) == JUMP_INSN
388 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
389 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
390 end = tmp;
391
392 /* Include any barrier that may follow the basic block. */
393 tmp = next_nonnote_insn (end);
394 if (tmp && GET_CODE (tmp) == BARRIER)
395 end = tmp;
396
397 /* Selectively delete the entire chain. */
398 b->head = NULL;
399 delete_insn_chain (insn, end);
400
401 /* Remove the edges into and out of this block. Note that there may
402 indeed be edges in, if we are removing an unreachable loop. */
403 while (b->pred != NULL)
404 remove_edge (b->pred);
405 while (b->succ != NULL)
406 remove_edge (b->succ);
407
408 b->pred = NULL;
409 b->succ = NULL;
410
411 return deleted_handler;
412 }
413
414 int
415 flow_delete_block (b)
416 basic_block b;
417 {
418 int deleted_handler = flow_delete_block_noexpunge (b);
419
420 /* Remove the basic block from the array. */
421 expunge_block (b);
422
423 return deleted_handler;
424 }
425 \f
426 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
427
428 void
429 compute_bb_for_insn ()
430 {
431 basic_block bb;
432
433 FOR_EACH_BB (bb)
434 {
435 rtx end = bb->end;
436 rtx insn;
437
438 for (insn = bb->head; ; insn = NEXT_INSN (insn))
439 {
440 BLOCK_FOR_INSN (insn) = bb;
441 if (insn == end)
442 break;
443 }
444 }
445 }
446
447 /* Release the basic_block_for_insn array. */
448
449 void
450 free_bb_for_insn ()
451 {
452 rtx insn;
453 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
454 if (GET_CODE (insn) != BARRIER)
455 BLOCK_FOR_INSN (insn) = NULL;
456 }
457
458 /* Update insns block within BB. */
459
460 void
461 update_bb_for_insn (bb)
462 basic_block bb;
463 {
464 rtx insn;
465
466 for (insn = bb->head; ; insn = NEXT_INSN (insn))
467 {
468 set_block_for_insn (insn, bb);
469 if (insn == bb->end)
470 break;
471 }
472 }
473 \f
474 /* Split a block BB after insn INSN creating a new fallthru edge.
475 Return the new edge. Note that to keep other parts of the compiler happy,
476 this function renumbers all the basic blocks so that the new
477 one has a number one greater than the block split. */
478
479 edge
480 split_block (bb, insn)
481 basic_block bb;
482 rtx insn;
483 {
484 basic_block new_bb;
485 edge new_edge;
486 edge e;
487
488 /* There is no point splitting the block after its end. */
489 if (bb->end == insn)
490 return 0;
491
492 /* Create the new basic block. */
493 new_bb = create_basic_block (NEXT_INSN (insn), bb->end, bb);
494 new_bb->count = bb->count;
495 new_bb->frequency = bb->frequency;
496 new_bb->loop_depth = bb->loop_depth;
497 bb->end = insn;
498
499 /* Redirect the outgoing edges. */
500 new_bb->succ = bb->succ;
501 bb->succ = NULL;
502 for (e = new_bb->succ; e; e = e->succ_next)
503 e->src = new_bb;
504
505 new_edge = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
506
507 if (bb->global_live_at_start)
508 {
509 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
510 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
511 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
512
513 /* We now have to calculate which registers are live at the end
514 of the split basic block and at the start of the new basic
515 block. Start with those registers that are known to be live
516 at the end of the original basic block and get
517 propagate_block to determine which registers are live. */
518 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
519 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
520 COPY_REG_SET (bb->global_live_at_end,
521 new_bb->global_live_at_start);
522 #ifdef HAVE_conditional_execution
523 /* In the presence of conditional execution we are not able to update
524 liveness precisely. */
525 if (reload_completed)
526 {
527 bb->flags |= BB_DIRTY;
528 new_bb->flags |= BB_DIRTY;
529 }
530 #endif
531 }
532
533 return new_edge;
534 }
535
536 /* Blocks A and B are to be merged into a single block A. The insns
537 are already contiguous, hence `nomove'. */
538
539 void
540 merge_blocks_nomove (a, b)
541 basic_block a, b;
542 {
543 rtx b_head = b->head, b_end = b->end, a_end = a->end;
544 rtx del_first = NULL_RTX, del_last = NULL_RTX;
545 int b_empty = 0;
546 edge e;
547
548 /* If there was a CODE_LABEL beginning B, delete it. */
549 if (GET_CODE (b_head) == CODE_LABEL)
550 {
551 /* Detect basic blocks with nothing but a label. This can happen
552 in particular at the end of a function. */
553 if (b_head == b_end)
554 b_empty = 1;
555
556 del_first = del_last = b_head;
557 b_head = NEXT_INSN (b_head);
558 }
559
560 /* Delete the basic block note and handle blocks containing just that
561 note. */
562 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
563 {
564 if (b_head == b_end)
565 b_empty = 1;
566 if (! del_last)
567 del_first = b_head;
568
569 del_last = b_head;
570 b_head = NEXT_INSN (b_head);
571 }
572
573 /* If there was a jump out of A, delete it. */
574 if (GET_CODE (a_end) == JUMP_INSN)
575 {
576 rtx prev;
577
578 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
579 if (GET_CODE (prev) != NOTE
580 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
581 || prev == a->head)
582 break;
583
584 del_first = a_end;
585
586 #ifdef HAVE_cc0
587 /* If this was a conditional jump, we need to also delete
588 the insn that set cc0. */
589 if (only_sets_cc0_p (prev))
590 {
591 rtx tmp = prev;
592
593 prev = prev_nonnote_insn (prev);
594 if (!prev)
595 prev = a->head;
596 del_first = tmp;
597 }
598 #endif
599
600 a_end = PREV_INSN (del_first);
601 }
602 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
603 del_first = NEXT_INSN (a_end);
604
605 /* Normally there should only be one successor of A and that is B, but
606 partway though the merge of blocks for conditional_execution we'll
607 be merging a TEST block with THEN and ELSE successors. Free the
608 whole lot of them and hope the caller knows what they're doing. */
609 while (a->succ)
610 remove_edge (a->succ);
611
612 /* Adjust the edges out of B for the new owner. */
613 for (e = b->succ; e; e = e->succ_next)
614 e->src = a;
615 a->succ = b->succ;
616 a->flags |= b->flags;
617
618 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
619 b->pred = b->succ = NULL;
620 a->global_live_at_end = b->global_live_at_end;
621
622 expunge_block (b);
623
624 /* Delete everything marked above as well as crap that might be
625 hanging out between the two blocks. */
626 delete_insn_chain (del_first, del_last);
627
628 /* Reassociate the insns of B with A. */
629 if (!b_empty)
630 {
631 rtx x;
632
633 for (x = a_end; x != b_end; x = NEXT_INSN (x))
634 set_block_for_insn (x, a);
635
636 set_block_for_insn (b_end, a);
637
638 a_end = b_end;
639 }
640
641 a->end = a_end;
642 }
643 \f
644 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
645 exist. */
646
647 rtx
648 block_label (block)
649 basic_block block;
650 {
651 if (block == EXIT_BLOCK_PTR)
652 return NULL_RTX;
653
654 if (GET_CODE (block->head) != CODE_LABEL)
655 {
656 block->head = emit_label_before (gen_label_rtx (), block->head);
657 }
658
659 return block->head;
660 }
661
662 /* Attempt to perform edge redirection by replacing possibly complex jump
663 instruction by unconditional jump or removing jump completely. This can
664 apply only if all edges now point to the same block. The parameters and
665 return values are equivalent to redirect_edge_and_branch. */
666
667 static bool
668 try_redirect_by_replacing_jump (e, target)
669 edge e;
670 basic_block target;
671 {
672 basic_block src = e->src;
673 rtx insn = src->end, kill_from;
674 edge tmp;
675 rtx set, table;
676 int fallthru = 0;
677
678 /* Verify that all targets will be TARGET. */
679 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
680 if (tmp->dest != target && tmp != e)
681 break;
682
683 if (tmp || !onlyjump_p (insn))
684 return false;
685 if (reload_completed && JUMP_LABEL (insn)
686 && (table = NEXT_INSN (JUMP_LABEL (insn))) != NULL_RTX
687 && GET_CODE (table) == JUMP_INSN
688 && (GET_CODE (PATTERN (table)) == ADDR_VEC
689 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
690 return false;
691
692 /* Avoid removing branch with side effects. */
693 set = single_set (insn);
694 if (!set || side_effects_p (set))
695 return false;
696
697 /* In case we zap a conditional jump, we'll need to kill
698 the cc0 setter too. */
699 kill_from = insn;
700 #ifdef HAVE_cc0
701 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
702 kill_from = PREV_INSN (insn);
703 #endif
704
705 /* See if we can create the fallthru edge. */
706 if (can_fallthru (src, target))
707 {
708 if (rtl_dump_file)
709 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
710 fallthru = 1;
711
712 /* Selectively unlink whole insn chain. */
713 delete_insn_chain (kill_from, PREV_INSN (target->head));
714 }
715
716 /* If this already is simplejump, redirect it. */
717 else if (simplejump_p (insn))
718 {
719 if (e->dest == target)
720 return false;
721 if (rtl_dump_file)
722 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
723 INSN_UID (insn), e->dest->index, target->index);
724 if (!redirect_jump (insn, block_label (target), 0))
725 {
726 if (target == EXIT_BLOCK_PTR)
727 return false;
728 abort ();
729 }
730 }
731
732 /* Cannot do anything for target exit block. */
733 else if (target == EXIT_BLOCK_PTR)
734 return false;
735
736 /* Or replace possibly complicated jump insn by simple jump insn. */
737 else
738 {
739 rtx target_label = block_label (target);
740 rtx barrier, tmp;
741
742 emit_jump_insn_after (gen_jump (target_label), insn);
743 JUMP_LABEL (src->end) = target_label;
744 LABEL_NUSES (target_label)++;
745 if (rtl_dump_file)
746 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
747 INSN_UID (insn), INSN_UID (src->end));
748
749
750 delete_insn_chain (kill_from, insn);
751
752 /* Recognize a tablejump that we are converting to a
753 simple jump and remove its associated CODE_LABEL
754 and ADDR_VEC or ADDR_DIFF_VEC. */
755 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
756 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
757 && GET_CODE (tmp) == JUMP_INSN
758 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
759 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
760 {
761 delete_insn_chain (JUMP_LABEL (insn), tmp);
762 }
763
764 barrier = next_nonnote_insn (src->end);
765 if (!barrier || GET_CODE (barrier) != BARRIER)
766 emit_barrier_after (src->end);
767 }
768
769 /* Keep only one edge out and set proper flags. */
770 while (src->succ->succ_next)
771 remove_edge (src->succ);
772 e = src->succ;
773 if (fallthru)
774 e->flags = EDGE_FALLTHRU;
775 else
776 e->flags = 0;
777
778 e->probability = REG_BR_PROB_BASE;
779 e->count = src->count;
780
781 /* We don't want a block to end on a line-number note since that has
782 the potential of changing the code between -g and not -g. */
783 while (GET_CODE (e->src->end) == NOTE
784 && NOTE_LINE_NUMBER (e->src->end) >= 0)
785 delete_insn (e->src->end);
786
787 if (e->dest != target)
788 redirect_edge_succ (e, target);
789
790 return true;
791 }
792
793 /* Return last loop_beg note appearing after INSN, before start of next
794 basic block. Return INSN if there are no such notes.
795
796 When emitting jump to redirect a fallthru edge, it should always appear
797 after the LOOP_BEG notes, as loop optimizer expect loop to either start by
798 fallthru edge or jump following the LOOP_BEG note jumping to the loop exit
799 test. */
800
801 static rtx
802 last_loop_beg_note (insn)
803 rtx insn;
804 {
805 rtx last = insn;
806
807 for (insn = NEXT_INSN (insn); insn && GET_CODE (insn) == NOTE
808 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
809 insn = NEXT_INSN (insn))
810 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
811 last = insn;
812
813 return last;
814 }
815
816 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
817 expense of adding new instructions or reordering basic blocks.
818
819 Function can be also called with edge destination equivalent to the TARGET.
820 Then it should try the simplifications and do nothing if none is possible.
821
822 Return true if transformation succeeded. We still return false in case E
823 already destinated TARGET and we didn't managed to simplify instruction
824 stream. */
825
826 bool
827 redirect_edge_and_branch (e, target)
828 edge e;
829 basic_block target;
830 {
831 rtx tmp;
832 rtx old_label = e->dest->head;
833 basic_block src = e->src;
834 rtx insn = src->end;
835
836 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
837 return false;
838
839 if (try_redirect_by_replacing_jump (e, target))
840 return true;
841
842 /* Do this fast path late, as we want above code to simplify for cases
843 where called on single edge leaving basic block containing nontrivial
844 jump insn. */
845 else if (e->dest == target)
846 return false;
847
848 /* We can only redirect non-fallthru edges of jump insn. */
849 if (e->flags & EDGE_FALLTHRU)
850 return false;
851 else if (GET_CODE (insn) != JUMP_INSN)
852 return false;
853
854 /* Recognize a tablejump and adjust all matching cases. */
855 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
856 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
857 && GET_CODE (tmp) == JUMP_INSN
858 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
859 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
860 {
861 rtvec vec;
862 int j;
863 rtx new_label = block_label (target);
864
865 if (target == EXIT_BLOCK_PTR)
866 return false;
867 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
868 vec = XVEC (PATTERN (tmp), 0);
869 else
870 vec = XVEC (PATTERN (tmp), 1);
871
872 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
873 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
874 {
875 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
876 --LABEL_NUSES (old_label);
877 ++LABEL_NUSES (new_label);
878 }
879
880 /* Handle casesi dispatch insns */
881 if ((tmp = single_set (insn)) != NULL
882 && SET_DEST (tmp) == pc_rtx
883 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
884 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
885 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
886 {
887 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
888 new_label);
889 --LABEL_NUSES (old_label);
890 ++LABEL_NUSES (new_label);
891 }
892 }
893 else
894 {
895 /* ?? We may play the games with moving the named labels from
896 one basic block to the other in case only one computed_jump is
897 available. */
898 if (computed_jump_p (insn)
899 /* A return instruction can't be redirected. */
900 || returnjump_p (insn))
901 return false;
902
903 /* If the insn doesn't go where we think, we're confused. */
904 if (JUMP_LABEL (insn) != old_label)
905 abort ();
906
907 /* If the substitution doesn't succeed, die. This can happen
908 if the back end emitted unrecognizable instructions or if
909 target is exit block on some arches. */
910 if (!redirect_jump (insn, block_label (target), 0))
911 {
912 if (target == EXIT_BLOCK_PTR)
913 return false;
914 abort ();
915 }
916 }
917
918 if (rtl_dump_file)
919 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
920 e->src->index, e->dest->index, target->index);
921
922 if (e->dest != target)
923 redirect_edge_succ_nodup (e, target);
924
925 return true;
926 }
927
928 /* Like force_nonfallthru below, but additionally performs redirection
929 Used by redirect_edge_and_branch_force. */
930
931 static basic_block
932 force_nonfallthru_and_redirect (e, target)
933 edge e;
934 basic_block target;
935 {
936 basic_block jump_block, new_bb = NULL, src = e->src;
937 rtx note;
938 edge new_edge;
939 int abnormal_edge_flags = 0;
940
941 if (e->flags & EDGE_ABNORMAL)
942 {
943 /* Irritating special case - fallthru edge to the same block as abnormal
944 edge.
945 We can't redirect abnormal edge, but we still can split the fallthru
946 one and create separate abnormal edge to original destination.
947 This allows bb-reorder to make such edge non-fallthru. */
948 if (e->dest != target)
949 abort ();
950 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
951 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
952 }
953 else if (!(e->flags & EDGE_FALLTHRU))
954 abort ();
955 else if (e->src == ENTRY_BLOCK_PTR)
956 {
957 /* We can't redirect the entry block. Create an empty block at the
958 start of the function which we use to add the new jump. */
959 edge *pe1;
960 basic_block bb = create_basic_block (e->dest->head, NULL, ENTRY_BLOCK_PTR);
961
962 /* Change the existing edge's source to be the new block, and add
963 a new edge from the entry block to the new block. */
964 e->src = bb;
965 for (pe1 = &ENTRY_BLOCK_PTR->succ; *pe1; pe1 = &(*pe1)->succ_next)
966 if (*pe1 == e)
967 {
968 *pe1 = e->succ_next;
969 break;
970 }
971 e->succ_next = 0;
972 bb->succ = e;
973 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
974 }
975
976 if (e->src->succ->succ_next || abnormal_edge_flags)
977 {
978 /* Create the new structures. */
979
980 /* Position the new block correctly relative to loop notes. */
981 note = last_loop_beg_note (e->src->end);
982 note = NEXT_INSN (note);
983
984 /* ... and ADDR_VECs. */
985 if (note != NULL
986 && GET_CODE (note) == CODE_LABEL
987 && NEXT_INSN (note)
988 && GET_CODE (NEXT_INSN (note)) == JUMP_INSN
989 && (GET_CODE (PATTERN (NEXT_INSN (note))) == ADDR_DIFF_VEC
990 || GET_CODE (PATTERN (NEXT_INSN (note))) == ADDR_VEC))
991 note = NEXT_INSN (NEXT_INSN (note));
992
993 jump_block = create_basic_block (note, NULL, e->src);
994 jump_block->count = e->count;
995 jump_block->frequency = EDGE_FREQUENCY (e);
996 jump_block->loop_depth = target->loop_depth;
997
998 if (target->global_live_at_start)
999 {
1000 jump_block->global_live_at_start
1001 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1002 jump_block->global_live_at_end
1003 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1004 COPY_REG_SET (jump_block->global_live_at_start,
1005 target->global_live_at_start);
1006 COPY_REG_SET (jump_block->global_live_at_end,
1007 target->global_live_at_start);
1008 }
1009
1010 /* Wire edge in. */
1011 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1012 new_edge->probability = e->probability;
1013 new_edge->count = e->count;
1014
1015 /* Redirect old edge. */
1016 redirect_edge_pred (e, jump_block);
1017 e->probability = REG_BR_PROB_BASE;
1018
1019 new_bb = jump_block;
1020 }
1021 else
1022 jump_block = e->src;
1023
1024 e->flags &= ~EDGE_FALLTHRU;
1025 if (target == EXIT_BLOCK_PTR)
1026 {
1027 if (HAVE_return)
1028 emit_jump_insn_after (gen_return (), jump_block->end);
1029 else
1030 abort ();
1031 }
1032 else
1033 {
1034 rtx label = block_label (target);
1035 emit_jump_insn_after (gen_jump (label), jump_block->end);
1036 JUMP_LABEL (jump_block->end) = label;
1037 LABEL_NUSES (label)++;
1038 }
1039
1040 emit_barrier_after (jump_block->end);
1041 redirect_edge_succ_nodup (e, target);
1042
1043 if (abnormal_edge_flags)
1044 make_edge (src, target, abnormal_edge_flags);
1045
1046 return new_bb;
1047 }
1048
1049 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1050 (and possibly create new basic block) to make edge non-fallthru.
1051 Return newly created BB or NULL if none. */
1052
1053 basic_block
1054 force_nonfallthru (e)
1055 edge e;
1056 {
1057 return force_nonfallthru_and_redirect (e, e->dest);
1058 }
1059
1060 /* Redirect edge even at the expense of creating new jump insn or
1061 basic block. Return new basic block if created, NULL otherwise.
1062 Abort if conversion is impossible. */
1063
1064 basic_block
1065 redirect_edge_and_branch_force (e, target)
1066 edge e;
1067 basic_block target;
1068 {
1069 if (redirect_edge_and_branch (e, target)
1070 || e->dest == target)
1071 return NULL;
1072
1073 /* In case the edge redirection failed, try to force it to be non-fallthru
1074 and redirect newly created simplejump. */
1075 return force_nonfallthru_and_redirect (e, target);
1076 }
1077
1078 /* The given edge should potentially be a fallthru edge. If that is in
1079 fact true, delete the jump and barriers that are in the way. */
1080
1081 void
1082 tidy_fallthru_edge (e, b, c)
1083 edge e;
1084 basic_block b, c;
1085 {
1086 rtx q;
1087
1088 /* ??? In a late-running flow pass, other folks may have deleted basic
1089 blocks by nopping out blocks, leaving multiple BARRIERs between here
1090 and the target label. They ought to be chastized and fixed.
1091
1092 We can also wind up with a sequence of undeletable labels between
1093 one block and the next.
1094
1095 So search through a sequence of barriers, labels, and notes for
1096 the head of block C and assert that we really do fall through. */
1097
1098 for (q = NEXT_INSN (b->end); q != c->head; q = NEXT_INSN (q))
1099 if (INSN_P (q))
1100 return;
1101
1102 /* Remove what will soon cease being the jump insn from the source block.
1103 If block B consisted only of this single jump, turn it into a deleted
1104 note. */
1105 q = b->end;
1106 if (GET_CODE (q) == JUMP_INSN
1107 && onlyjump_p (q)
1108 && (any_uncondjump_p (q)
1109 || (b->succ == e && e->succ_next == NULL)))
1110 {
1111 #ifdef HAVE_cc0
1112 /* If this was a conditional jump, we need to also delete
1113 the insn that set cc0. */
1114 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1115 q = PREV_INSN (q);
1116 #endif
1117
1118 q = PREV_INSN (q);
1119
1120 /* We don't want a block to end on a line-number note since that has
1121 the potential of changing the code between -g and not -g. */
1122 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
1123 q = PREV_INSN (q);
1124 }
1125
1126 /* Selectively unlink the sequence. */
1127 if (q != PREV_INSN (c->head))
1128 delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
1129
1130 e->flags |= EDGE_FALLTHRU;
1131 }
1132
1133 /* Fix up edges that now fall through, or rather should now fall through
1134 but previously required a jump around now deleted blocks. Simplify
1135 the search by only examining blocks numerically adjacent, since this
1136 is how find_basic_blocks created them. */
1137
1138 void
1139 tidy_fallthru_edges ()
1140 {
1141 basic_block b, c;
1142
1143 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1144 return;
1145
1146 FOR_BB_BETWEEN (b, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, next_bb)
1147 {
1148 edge s;
1149
1150 c = b->next_bb;
1151
1152 /* We care about simple conditional or unconditional jumps with
1153 a single successor.
1154
1155 If we had a conditional branch to the next instruction when
1156 find_basic_blocks was called, then there will only be one
1157 out edge for the block which ended with the conditional
1158 branch (since we do not create duplicate edges).
1159
1160 Furthermore, the edge will be marked as a fallthru because we
1161 merge the flags for the duplicate edges. So we do not want to
1162 check that the edge is not a FALLTHRU edge. */
1163
1164 if ((s = b->succ) != NULL
1165 && ! (s->flags & EDGE_COMPLEX)
1166 && s->succ_next == NULL
1167 && s->dest == c
1168 /* If the jump insn has side effects, we can't tidy the edge. */
1169 && (GET_CODE (b->end) != JUMP_INSN
1170 || onlyjump_p (b->end)))
1171 tidy_fallthru_edge (s, b, c);
1172 }
1173 }
1174 \f
1175 /* Helper function for split_edge. Return true in case edge BB2 to BB1
1176 is back edge of syntactic loop. */
1177
1178 static bool
1179 back_edge_of_syntactic_loop_p (bb1, bb2)
1180 basic_block bb1, bb2;
1181 {
1182 rtx insn;
1183 int count = 0;
1184 basic_block bb;
1185
1186 if (bb1 == bb2)
1187 return true;
1188
1189 /* ??? Could we guarantee that bb indices are monotone, so that we could
1190 just compare them? */
1191 for (bb = bb1; bb && bb != bb2; bb = bb->next_bb)
1192 continue;
1193
1194 if (!bb)
1195 return false;
1196
1197 for (insn = bb1->end; insn != bb2->head && count >= 0;
1198 insn = NEXT_INSN (insn))
1199 if (GET_CODE (insn) == NOTE)
1200 {
1201 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1202 count++;
1203 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1204 count--;
1205 }
1206
1207 return count >= 0;
1208 }
1209
1210 /* Split a (typically critical) edge. Return the new block.
1211 Abort on abnormal edges.
1212
1213 ??? The code generally expects to be called on critical edges.
1214 The case of a block ending in an unconditional jump to a
1215 block with multiple predecessors is not handled optimally. */
1216
1217 basic_block
1218 split_edge (edge_in)
1219 edge edge_in;
1220 {
1221 basic_block bb;
1222 edge edge_out;
1223 rtx before;
1224
1225 /* Abnormal edges cannot be split. */
1226 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1227 abort ();
1228
1229 /* We are going to place the new block in front of edge destination.
1230 Avoid existence of fallthru predecessors. */
1231 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1232 {
1233 edge e;
1234
1235 for (e = edge_in->dest->pred; e; e = e->pred_next)
1236 if (e->flags & EDGE_FALLTHRU)
1237 break;
1238
1239 if (e)
1240 force_nonfallthru (e);
1241 }
1242
1243 /* Create the basic block note.
1244
1245 Where we place the note can have a noticeable impact on the generated
1246 code. Consider this cfg:
1247
1248 E
1249 |
1250 0
1251 / \
1252 +->1-->2--->E
1253 | |
1254 +--+
1255
1256 If we need to insert an insn on the edge from block 0 to block 1,
1257 we want to ensure the instructions we insert are outside of any
1258 loop notes that physically sit between block 0 and block 1. Otherwise
1259 we confuse the loop optimizer into thinking the loop is a phony. */
1260
1261 if (edge_in->dest != EXIT_BLOCK_PTR
1262 && PREV_INSN (edge_in->dest->head)
1263 && GET_CODE (PREV_INSN (edge_in->dest->head)) == NOTE
1264 && (NOTE_LINE_NUMBER (PREV_INSN (edge_in->dest->head))
1265 == NOTE_INSN_LOOP_BEG)
1266 && !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
1267 before = PREV_INSN (edge_in->dest->head);
1268 else if (edge_in->dest != EXIT_BLOCK_PTR)
1269 before = edge_in->dest->head;
1270 else
1271 before = NULL_RTX;
1272
1273 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1274 bb->count = edge_in->count;
1275 bb->frequency = EDGE_FREQUENCY (edge_in);
1276
1277 /* ??? This info is likely going to be out of date very soon. */
1278 if (edge_in->dest->global_live_at_start)
1279 {
1280 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1281 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1282 COPY_REG_SET (bb->global_live_at_start,
1283 edge_in->dest->global_live_at_start);
1284 COPY_REG_SET (bb->global_live_at_end,
1285 edge_in->dest->global_live_at_start);
1286 }
1287
1288 edge_out = make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1289
1290 /* For non-fallthry edges, we must adjust the predecessor's
1291 jump instruction to target our new block. */
1292 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1293 {
1294 if (!redirect_edge_and_branch (edge_in, bb))
1295 abort ();
1296 }
1297 else
1298 redirect_edge_succ (edge_in, bb);
1299
1300 return bb;
1301 }
1302
1303 /* Queue instructions for insertion on an edge between two basic blocks.
1304 The new instructions and basic blocks (if any) will not appear in the
1305 CFG until commit_edge_insertions is called. */
1306
1307 void
1308 insert_insn_on_edge (pattern, e)
1309 rtx pattern;
1310 edge e;
1311 {
1312 /* We cannot insert instructions on an abnormal critical edge.
1313 It will be easier to find the culprit if we die now. */
1314 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
1315 abort ();
1316
1317 if (e->insns == NULL_RTX)
1318 start_sequence ();
1319 else
1320 push_to_sequence (e->insns);
1321
1322 emit_insn (pattern);
1323
1324 e->insns = get_insns ();
1325 end_sequence ();
1326 }
1327
1328 /* Update the CFG for the instructions queued on edge E. */
1329
1330 static void
1331 commit_one_edge_insertion (e, watch_calls)
1332 edge e;
1333 int watch_calls;
1334 {
1335 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1336 basic_block bb = NULL;
1337
1338 /* Pull the insns off the edge now since the edge might go away. */
1339 insns = e->insns;
1340 e->insns = NULL_RTX;
1341
1342 /* Special case -- avoid inserting code between call and storing
1343 its return value. */
1344 if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
1345 && e->src != ENTRY_BLOCK_PTR
1346 && GET_CODE (e->src->end) == CALL_INSN)
1347 {
1348 rtx next = next_nonnote_insn (e->src->end);
1349
1350 after = e->dest->head;
1351 /* The first insn after the call may be a stack pop, skip it. */
1352 while (next
1353 && keep_with_call_p (next))
1354 {
1355 after = next;
1356 next = next_nonnote_insn (next);
1357 }
1358 bb = e->dest;
1359 }
1360 if (!before && !after)
1361 {
1362 /* Figure out where to put these things. If the destination has
1363 one predecessor, insert there. Except for the exit block. */
1364 if (e->dest->pred->pred_next == NULL && e->dest != EXIT_BLOCK_PTR)
1365 {
1366 bb = e->dest;
1367
1368 /* Get the location correct wrt a code label, and "nice" wrt
1369 a basic block note, and before everything else. */
1370 tmp = bb->head;
1371 if (GET_CODE (tmp) == CODE_LABEL)
1372 tmp = NEXT_INSN (tmp);
1373 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1374 tmp = NEXT_INSN (tmp);
1375 if (tmp == bb->head)
1376 before = tmp;
1377 else if (tmp)
1378 after = PREV_INSN (tmp);
1379 else
1380 after = get_last_insn ();
1381 }
1382
1383 /* If the source has one successor and the edge is not abnormal,
1384 insert there. Except for the entry block. */
1385 else if ((e->flags & EDGE_ABNORMAL) == 0
1386 && e->src->succ->succ_next == NULL
1387 && e->src != ENTRY_BLOCK_PTR)
1388 {
1389 bb = e->src;
1390
1391 /* It is possible to have a non-simple jump here. Consider a target
1392 where some forms of unconditional jumps clobber a register. This
1393 happens on the fr30 for example.
1394
1395 We know this block has a single successor, so we can just emit
1396 the queued insns before the jump. */
1397 if (GET_CODE (bb->end) == JUMP_INSN)
1398 for (before = bb->end;
1399 GET_CODE (PREV_INSN (before)) == NOTE
1400 && NOTE_LINE_NUMBER (PREV_INSN (before)) ==
1401 NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
1402 ;
1403 else
1404 {
1405 /* We'd better be fallthru, or we've lost track of what's what. */
1406 if ((e->flags & EDGE_FALLTHRU) == 0)
1407 abort ();
1408
1409 after = bb->end;
1410 }
1411 }
1412 /* Otherwise we must split the edge. */
1413 else
1414 {
1415 bb = split_edge (e);
1416 after = bb->end;
1417 }
1418 }
1419
1420 /* Now that we've found the spot, do the insertion. */
1421
1422 if (before)
1423 {
1424 emit_insn_before (insns, before);
1425 last = prev_nonnote_insn (before);
1426 }
1427 else
1428 last = emit_insn_after (insns, after);
1429
1430 if (returnjump_p (last))
1431 {
1432 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1433 This is not currently a problem because this only happens
1434 for the (single) epilogue, which already has a fallthru edge
1435 to EXIT. */
1436
1437 e = bb->succ;
1438 if (e->dest != EXIT_BLOCK_PTR
1439 || e->succ_next != NULL || (e->flags & EDGE_FALLTHRU) == 0)
1440 abort ();
1441
1442 e->flags &= ~EDGE_FALLTHRU;
1443 emit_barrier_after (last);
1444
1445 if (before)
1446 delete_insn (before);
1447 }
1448 else if (GET_CODE (last) == JUMP_INSN)
1449 abort ();
1450
1451 find_sub_basic_blocks (bb);
1452 }
1453
1454 /* Update the CFG for all queued instructions. */
1455
1456 void
1457 commit_edge_insertions ()
1458 {
1459 basic_block bb;
1460
1461 #ifdef ENABLE_CHECKING
1462 verify_flow_info ();
1463 #endif
1464
1465 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1466 {
1467 edge e, next;
1468
1469 for (e = bb->succ; e; e = next)
1470 {
1471 next = e->succ_next;
1472 if (e->insns)
1473 commit_one_edge_insertion (e, false);
1474 }
1475 }
1476 }
1477 \f
1478 /* Update the CFG for all queued instructions, taking special care of inserting
1479 code on edges between call and storing its return value. */
1480
1481 void
1482 commit_edge_insertions_watch_calls ()
1483 {
1484 basic_block bb;
1485
1486 #ifdef ENABLE_CHECKING
1487 verify_flow_info ();
1488 #endif
1489
1490 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1491 {
1492 edge e, next;
1493
1494 for (e = bb->succ; e; e = next)
1495 {
1496 next = e->succ_next;
1497 if (e->insns)
1498 commit_one_edge_insertion (e, true);
1499 }
1500 }
1501 }
1502 \f
1503 /* Print out one basic block with live information at start and end. */
1504
1505 void
1506 dump_bb (bb, outf)
1507 basic_block bb;
1508 FILE *outf;
1509 {
1510 rtx insn;
1511 rtx last;
1512 edge e;
1513
1514 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
1515 bb->index, bb->loop_depth);
1516 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
1517 putc ('\n', outf);
1518
1519 fputs (";; Predecessors: ", outf);
1520 for (e = bb->pred; e; e = e->pred_next)
1521 dump_edge_info (outf, e, 0);
1522 putc ('\n', outf);
1523
1524 fputs (";; Registers live at start:", outf);
1525 dump_regset (bb->global_live_at_start, outf);
1526 putc ('\n', outf);
1527
1528 for (insn = bb->head, last = NEXT_INSN (bb->end); insn != last;
1529 insn = NEXT_INSN (insn))
1530 print_rtl_single (outf, insn);
1531
1532 fputs (";; Registers live at end:", outf);
1533 dump_regset (bb->global_live_at_end, outf);
1534 putc ('\n', outf);
1535
1536 fputs (";; Successors: ", outf);
1537 for (e = bb->succ; e; e = e->succ_next)
1538 dump_edge_info (outf, e, 1);
1539 putc ('\n', outf);
1540 }
1541
1542 void
1543 debug_bb (bb)
1544 basic_block bb;
1545 {
1546 dump_bb (bb, stderr);
1547 }
1548
1549 void
1550 debug_bb_n (n)
1551 int n;
1552 {
1553 dump_bb (BASIC_BLOCK (n), stderr);
1554 }
1555 \f
1556 /* Like print_rtl, but also print out live information for the start of each
1557 basic block. */
1558
1559 void
1560 print_rtl_with_bb (outf, rtx_first)
1561 FILE *outf;
1562 rtx rtx_first;
1563 {
1564 rtx tmp_rtx;
1565
1566 if (rtx_first == 0)
1567 fprintf (outf, "(nil)\n");
1568 else
1569 {
1570 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1571 int max_uid = get_max_uid ();
1572 basic_block *start
1573 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1574 basic_block *end
1575 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1576 enum bb_state *in_bb_p
1577 = (enum bb_state *) xcalloc (max_uid, sizeof (enum bb_state));
1578
1579 basic_block bb;
1580
1581 FOR_EACH_BB_REVERSE (bb)
1582 {
1583 rtx x;
1584
1585 start[INSN_UID (bb->head)] = bb;
1586 end[INSN_UID (bb->end)] = bb;
1587 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
1588 {
1589 enum bb_state state = IN_MULTIPLE_BB;
1590
1591 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1592 state = IN_ONE_BB;
1593 in_bb_p[INSN_UID (x)] = state;
1594
1595 if (x == bb->end)
1596 break;
1597 }
1598 }
1599
1600 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1601 {
1602 int did_output;
1603
1604 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1605 {
1606 fprintf (outf, ";; Start of basic block %d, registers live:",
1607 bb->index);
1608 dump_regset (bb->global_live_at_start, outf);
1609 putc ('\n', outf);
1610 }
1611
1612 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1613 && GET_CODE (tmp_rtx) != NOTE
1614 && GET_CODE (tmp_rtx) != BARRIER)
1615 fprintf (outf, ";; Insn is not within a basic block\n");
1616 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1617 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1618
1619 did_output = print_rtl_single (outf, tmp_rtx);
1620
1621 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1622 {
1623 fprintf (outf, ";; End of basic block %d, registers live:\n",
1624 bb->index);
1625 dump_regset (bb->global_live_at_end, outf);
1626 putc ('\n', outf);
1627 }
1628
1629 if (did_output)
1630 putc ('\n', outf);
1631 }
1632
1633 free (start);
1634 free (end);
1635 free (in_bb_p);
1636 }
1637
1638 if (current_function_epilogue_delay_list != 0)
1639 {
1640 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1641 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1642 tmp_rtx = XEXP (tmp_rtx, 1))
1643 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1644 }
1645 }
1646 \f
1647 void
1648 update_br_prob_note (bb)
1649 basic_block bb;
1650 {
1651 rtx note;
1652 if (GET_CODE (bb->end) != JUMP_INSN)
1653 return;
1654 note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
1655 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1656 return;
1657 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1658 }
1659 \f
1660 /* Verify the CFG consistency. This function check some CFG invariants and
1661 aborts when something is wrong. Hope that this function will help to
1662 convert many optimization passes to preserve CFG consistent.
1663
1664 Currently it does following checks:
1665
1666 - test head/end pointers
1667 - overlapping of basic blocks
1668 - edge list correctness
1669 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1670 - tails of basic blocks (ensure that boundary is necessary)
1671 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1672 and NOTE_INSN_BASIC_BLOCK
1673 - check that all insns are in the basic blocks
1674 (except the switch handling code, barriers and notes)
1675 - check that all returns are followed by barriers
1676
1677 In future it can be extended check a lot of other stuff as well
1678 (reachability of basic blocks, life information, etc. etc.). */
1679
1680 void
1681 verify_flow_info ()
1682 {
1683 const int max_uid = get_max_uid ();
1684 const rtx rtx_first = get_insns ();
1685 rtx last_head = get_last_insn ();
1686 basic_block *bb_info, *last_visited;
1687 size_t *edge_checksum;
1688 rtx x;
1689 int num_bb_notes, err = 0;
1690 basic_block bb, last_bb_seen;
1691
1692 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1693 last_visited = (basic_block *) xcalloc (last_basic_block + 2,
1694 sizeof (basic_block));
1695 edge_checksum = (size_t *) xcalloc (last_basic_block + 2, sizeof (size_t));
1696
1697 /* Check bb chain & numbers. */
1698 last_bb_seen = ENTRY_BLOCK_PTR;
1699 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
1700 {
1701 if (bb != EXIT_BLOCK_PTR
1702 && bb != BASIC_BLOCK (bb->index))
1703 {
1704 error ("bb %d on wrong place", bb->index);
1705 err = 1;
1706 }
1707
1708 if (bb->prev_bb != last_bb_seen)
1709 {
1710 error ("prev_bb of %d should be %d, not %d",
1711 bb->index, last_bb_seen->index, bb->prev_bb->index);
1712 err = 1;
1713 }
1714
1715 last_bb_seen = bb;
1716 }
1717
1718 FOR_EACH_BB_REVERSE (bb)
1719 {
1720 rtx head = bb->head;
1721 rtx end = bb->end;
1722
1723 /* Verify the end of the basic block is in the INSN chain. */
1724 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1725 if (x == end)
1726 break;
1727
1728 if (!x)
1729 {
1730 error ("end insn %d for block %d not found in the insn stream",
1731 INSN_UID (end), bb->index);
1732 err = 1;
1733 }
1734
1735 /* Work backwards from the end to the head of the basic block
1736 to verify the head is in the RTL chain. */
1737 for (; x != NULL_RTX; x = PREV_INSN (x))
1738 {
1739 /* While walking over the insn chain, verify insns appear
1740 in only one basic block and initialize the BB_INFO array
1741 used by other passes. */
1742 if (bb_info[INSN_UID (x)] != NULL)
1743 {
1744 error ("insn %d is in multiple basic blocks (%d and %d)",
1745 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1746 err = 1;
1747 }
1748
1749 bb_info[INSN_UID (x)] = bb;
1750
1751 if (x == head)
1752 break;
1753 }
1754 if (!x)
1755 {
1756 error ("head insn %d for block %d not found in the insn stream",
1757 INSN_UID (head), bb->index);
1758 err = 1;
1759 }
1760
1761 last_head = x;
1762 }
1763
1764 /* Now check the basic blocks (boundaries etc.) */
1765 FOR_EACH_BB_REVERSE (bb)
1766 {
1767 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1768 edge e;
1769 rtx note;
1770
1771 if (INSN_P (bb->end)
1772 && (note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX))
1773 && bb->succ && bb->succ->succ_next
1774 && any_condjump_p (bb->end))
1775 {
1776 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability)
1777 {
1778 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
1779 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1780 err = 1;
1781 }
1782 }
1783 if (bb->count < 0)
1784 {
1785 error ("verify_flow_info: Wrong count of block %i %i",
1786 bb->index, (int)bb->count);
1787 err = 1;
1788 }
1789 if (bb->frequency < 0)
1790 {
1791 error ("verify_flow_info: Wrong frequency of block %i %i",
1792 bb->index, bb->frequency);
1793 err = 1;
1794 }
1795 for (e = bb->succ; e; e = e->succ_next)
1796 {
1797 if (last_visited [e->dest->index + 2] == bb)
1798 {
1799 error ("verify_flow_info: Duplicate edge %i->%i",
1800 e->src->index, e->dest->index);
1801 err = 1;
1802 }
1803 if (e->probability < 0 || e->probability > REG_BR_PROB_BASE)
1804 {
1805 error ("verify_flow_info: Wrong probability of edge %i->%i %i",
1806 e->src->index, e->dest->index, e->probability);
1807 err = 1;
1808 }
1809 if (e->count < 0)
1810 {
1811 error ("verify_flow_info: Wrong count of edge %i->%i %i",
1812 e->src->index, e->dest->index, (int)e->count);
1813 err = 1;
1814 }
1815
1816 last_visited [e->dest->index + 2] = bb;
1817
1818 if (e->flags & EDGE_FALLTHRU)
1819 n_fallthru++;
1820
1821 if ((e->flags & ~EDGE_DFS_BACK) == 0)
1822 n_branch++;
1823
1824 if (e->flags & EDGE_ABNORMAL_CALL)
1825 n_call++;
1826
1827 if (e->flags & EDGE_EH)
1828 n_eh++;
1829 else if (e->flags & EDGE_ABNORMAL)
1830 n_abnormal++;
1831
1832 if ((e->flags & EDGE_FALLTHRU)
1833 && e->src != ENTRY_BLOCK_PTR
1834 && e->dest != EXIT_BLOCK_PTR)
1835 {
1836 rtx insn;
1837
1838 if (e->src->next_bb != e->dest)
1839 {
1840 error
1841 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
1842 e->src->index, e->dest->index);
1843 err = 1;
1844 }
1845 else
1846 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
1847 insn = NEXT_INSN (insn))
1848 if (GET_CODE (insn) == BARRIER
1849 #ifndef CASE_DROPS_THROUGH
1850 || INSN_P (insn)
1851 #else
1852 || (INSN_P (insn) && ! JUMP_TABLE_DATA_P (insn))
1853 #endif
1854 )
1855 {
1856 error ("verify_flow_info: Incorrect fallthru %i->%i",
1857 e->src->index, e->dest->index);
1858 fatal_insn ("wrong insn in the fallthru edge", insn);
1859 err = 1;
1860 }
1861 }
1862
1863 if (e->src != bb)
1864 {
1865 error ("verify_flow_info: Basic block %d succ edge is corrupted",
1866 bb->index);
1867 fprintf (stderr, "Predecessor: ");
1868 dump_edge_info (stderr, e, 0);
1869 fprintf (stderr, "\nSuccessor: ");
1870 dump_edge_info (stderr, e, 1);
1871 fprintf (stderr, "\n");
1872 err = 1;
1873 }
1874
1875 edge_checksum[e->dest->index + 2] += (size_t) e;
1876 }
1877
1878 if (n_eh && GET_CODE (PATTERN (bb->end)) != RESX
1879 && !find_reg_note (bb->end, REG_EH_REGION, NULL_RTX))
1880 {
1881 error ("Missing REG_EH_REGION note in the end of bb %i", bb->index);
1882 err = 1;
1883 }
1884 if (n_branch
1885 && (GET_CODE (bb->end) != JUMP_INSN
1886 || (n_branch > 1 && (any_uncondjump_p (bb->end)
1887 || any_condjump_p (bb->end)))))
1888 {
1889 error ("Too many outgoing branch edges from bb %i", bb->index);
1890 err = 1;
1891 }
1892 if (n_fallthru && any_uncondjump_p (bb->end))
1893 {
1894 error ("Fallthru edge after unconditional jump %i", bb->index);
1895 err = 1;
1896 }
1897 if (n_branch != 1 && any_uncondjump_p (bb->end))
1898 {
1899 error ("Wrong amount of branch edges after unconditional jump %i", bb->index);
1900 err = 1;
1901 }
1902 if (n_branch != 1 && any_condjump_p (bb->end)
1903 && JUMP_LABEL (bb->end) != bb->next_bb->head)
1904 {
1905 error ("Wrong amount of branch edges after conditional jump %i", bb->index);
1906 err = 1;
1907 }
1908 if (n_call && GET_CODE (bb->end) != CALL_INSN)
1909 {
1910 error ("Call edges for non-call insn in bb %i", bb->index);
1911 err = 1;
1912 }
1913 if (n_abnormal
1914 && (GET_CODE (bb->end) != CALL_INSN && n_call != n_abnormal)
1915 && (GET_CODE (bb->end) != JUMP_INSN
1916 || any_condjump_p (bb->end)
1917 || any_uncondjump_p (bb->end)))
1918 {
1919 error ("Abnormal edges for no purpose in bb %i", bb->index);
1920 err = 1;
1921 }
1922
1923 if (!n_fallthru)
1924 {
1925 rtx insn;
1926
1927 /* Ensure existence of barrier in BB with no fallthru edges. */
1928 for (insn = bb->end; !insn || GET_CODE (insn) != BARRIER;
1929 insn = NEXT_INSN (insn))
1930 if (!insn
1931 || (GET_CODE (insn) == NOTE
1932 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
1933 {
1934 error ("missing barrier after block %i", bb->index);
1935 err = 1;
1936 break;
1937 }
1938 }
1939
1940 for (e = bb->pred; e; e = e->pred_next)
1941 {
1942 if (e->dest != bb)
1943 {
1944 error ("basic block %d pred edge is corrupted", bb->index);
1945 fputs ("Predecessor: ", stderr);
1946 dump_edge_info (stderr, e, 0);
1947 fputs ("\nSuccessor: ", stderr);
1948 dump_edge_info (stderr, e, 1);
1949 fputc ('\n', stderr);
1950 err = 1;
1951 }
1952 edge_checksum[e->dest->index + 2] -= (size_t) e;
1953 }
1954
1955 for (x = bb->head; x != NEXT_INSN (bb->end); x = NEXT_INSN (x))
1956 if (BLOCK_FOR_INSN (x) != bb)
1957 {
1958 debug_rtx (x);
1959 if (! BLOCK_FOR_INSN (x))
1960 error
1961 ("insn %d inside basic block %d but block_for_insn is NULL",
1962 INSN_UID (x), bb->index);
1963 else
1964 error
1965 ("insn %d inside basic block %d but block_for_insn is %i",
1966 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1967
1968 err = 1;
1969 }
1970
1971 /* OK pointers are correct. Now check the header of basic
1972 block. It ought to contain optional CODE_LABEL followed
1973 by NOTE_BASIC_BLOCK. */
1974 x = bb->head;
1975 if (GET_CODE (x) == CODE_LABEL)
1976 {
1977 if (bb->end == x)
1978 {
1979 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1980 bb->index);
1981 err = 1;
1982 }
1983
1984 x = NEXT_INSN (x);
1985 }
1986
1987 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
1988 {
1989 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1990 bb->index);
1991 err = 1;
1992 }
1993
1994 if (bb->end == x)
1995 /* Do checks for empty blocks her. e */
1996 ;
1997 else
1998 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
1999 {
2000 if (NOTE_INSN_BASIC_BLOCK_P (x))
2001 {
2002 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2003 INSN_UID (x), bb->index);
2004 err = 1;
2005 }
2006
2007 if (x == bb->end)
2008 break;
2009
2010 if (GET_CODE (x) == JUMP_INSN
2011 || GET_CODE (x) == CODE_LABEL
2012 || GET_CODE (x) == BARRIER)
2013 {
2014 error ("in basic block %d:", bb->index);
2015 fatal_insn ("flow control insn inside a basic block", x);
2016 }
2017 }
2018 }
2019
2020 /* Complete edge checksumming for ENTRY and EXIT. */
2021 {
2022 edge e;
2023
2024 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
2025 edge_checksum[e->dest->index + 2] += (size_t) e;
2026
2027 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
2028 edge_checksum[e->dest->index + 2] -= (size_t) e;
2029 }
2030
2031 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2032 if (edge_checksum[bb->index + 2])
2033 {
2034 error ("basic block %i edge lists are corrupted", bb->index);
2035 err = 1;
2036 }
2037
2038 num_bb_notes = 0;
2039 last_bb_seen = ENTRY_BLOCK_PTR;
2040
2041 for (x = rtx_first; x; x = NEXT_INSN (x))
2042 {
2043 if (NOTE_INSN_BASIC_BLOCK_P (x))
2044 {
2045 bb = NOTE_BASIC_BLOCK (x);
2046
2047 num_bb_notes++;
2048 if (bb != last_bb_seen->next_bb)
2049 internal_error ("basic blocks not numbered consecutively");
2050
2051 last_bb_seen = bb;
2052 }
2053
2054 if (!bb_info[INSN_UID (x)])
2055 {
2056 switch (GET_CODE (x))
2057 {
2058 case BARRIER:
2059 case NOTE:
2060 break;
2061
2062 case CODE_LABEL:
2063 /* An addr_vec is placed outside any block block. */
2064 if (NEXT_INSN (x)
2065 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
2066 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2067 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2068 x = NEXT_INSN (x);
2069
2070 /* But in any case, non-deletable labels can appear anywhere. */
2071 break;
2072
2073 default:
2074 fatal_insn ("insn outside basic block", x);
2075 }
2076 }
2077
2078 if (INSN_P (x)
2079 && GET_CODE (x) == JUMP_INSN
2080 && returnjump_p (x) && ! condjump_p (x)
2081 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
2082 fatal_insn ("return not followed by barrier", x);
2083 }
2084
2085 if (num_bb_notes != n_basic_blocks)
2086 internal_error
2087 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2088 num_bb_notes, n_basic_blocks);
2089
2090 if (err)
2091 internal_error ("verify_flow_info failed");
2092
2093 /* Clean up. */
2094 free (bb_info);
2095 free (last_visited);
2096 free (edge_checksum);
2097 }
2098 \f
2099 /* Assume that the preceding pass has possibly eliminated jump instructions
2100 or converted the unconditional jumps. Eliminate the edges from CFG.
2101 Return true if any edges are eliminated. */
2102
2103 bool
2104 purge_dead_edges (bb)
2105 basic_block bb;
2106 {
2107 edge e, next;
2108 rtx insn = bb->end, note;
2109 bool purged = false;
2110
2111 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2112 if (GET_CODE (insn) == INSN
2113 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2114 {
2115 rtx eqnote;
2116
2117 if (! may_trap_p (PATTERN (insn))
2118 || ((eqnote = find_reg_equal_equiv_note (insn))
2119 && ! may_trap_p (XEXP (eqnote, 0))))
2120 remove_note (insn, note);
2121 }
2122
2123 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2124 for (e = bb->succ; e; e = next)
2125 {
2126 next = e->succ_next;
2127 if (e->flags & EDGE_EH)
2128 {
2129 if (can_throw_internal (bb->end))
2130 continue;
2131 }
2132 else if (e->flags & EDGE_ABNORMAL_CALL)
2133 {
2134 if (GET_CODE (bb->end) == CALL_INSN
2135 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2136 || INTVAL (XEXP (note, 0)) >= 0))
2137 continue;
2138 }
2139 else
2140 continue;
2141
2142 remove_edge (e);
2143 bb->flags |= BB_DIRTY;
2144 purged = true;
2145 }
2146
2147 if (GET_CODE (insn) == JUMP_INSN)
2148 {
2149 rtx note;
2150 edge b,f;
2151
2152 /* We do care only about conditional jumps and simplejumps. */
2153 if (!any_condjump_p (insn)
2154 && !returnjump_p (insn)
2155 && !simplejump_p (insn))
2156 return purged;
2157
2158 /* Branch probability/prediction notes are defined only for
2159 condjumps. We've possibly turned condjump into simplejump. */
2160 if (simplejump_p (insn))
2161 {
2162 note = find_reg_note (insn, REG_BR_PROB, NULL);
2163 if (note)
2164 remove_note (insn, note);
2165 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2166 remove_note (insn, note);
2167 }
2168
2169 for (e = bb->succ; e; e = next)
2170 {
2171 next = e->succ_next;
2172
2173 /* Avoid abnormal flags to leak from computed jumps turned
2174 into simplejumps. */
2175
2176 e->flags &= ~EDGE_ABNORMAL;
2177
2178 /* See if this edge is one we should keep. */
2179 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2180 /* A conditional jump can fall through into the next
2181 block, so we should keep the edge. */
2182 continue;
2183 else if (e->dest != EXIT_BLOCK_PTR
2184 && e->dest->head == JUMP_LABEL (insn))
2185 /* If the destination block is the target of the jump,
2186 keep the edge. */
2187 continue;
2188 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2189 /* If the destination block is the exit block, and this
2190 instruction is a return, then keep the edge. */
2191 continue;
2192 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2193 /* Keep the edges that correspond to exceptions thrown by
2194 this instruction. */
2195 continue;
2196
2197 /* We do not need this edge. */
2198 bb->flags |= BB_DIRTY;
2199 purged = true;
2200 remove_edge (e);
2201 }
2202
2203 if (!bb->succ || !purged)
2204 return purged;
2205
2206 if (rtl_dump_file)
2207 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
2208
2209 if (!optimize)
2210 return purged;
2211
2212 /* Redistribute probabilities. */
2213 if (!bb->succ->succ_next)
2214 {
2215 bb->succ->probability = REG_BR_PROB_BASE;
2216 bb->succ->count = bb->count;
2217 }
2218 else
2219 {
2220 note = find_reg_note (insn, REG_BR_PROB, NULL);
2221 if (!note)
2222 return purged;
2223
2224 b = BRANCH_EDGE (bb);
2225 f = FALLTHRU_EDGE (bb);
2226 b->probability = INTVAL (XEXP (note, 0));
2227 f->probability = REG_BR_PROB_BASE - b->probability;
2228 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2229 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2230 }
2231
2232 return purged;
2233 }
2234
2235 /* If we don't see a jump insn, we don't know exactly why the block would
2236 have been broken at this point. Look for a simple, non-fallthru edge,
2237 as these are only created by conditional branches. If we find such an
2238 edge we know that there used to be a jump here and can then safely
2239 remove all non-fallthru edges. */
2240 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
2241 e = e->succ_next)
2242 ;
2243
2244 if (!e)
2245 return purged;
2246
2247 for (e = bb->succ; e; e = next)
2248 {
2249 next = e->succ_next;
2250 if (!(e->flags & EDGE_FALLTHRU))
2251 {
2252 bb->flags |= BB_DIRTY;
2253 remove_edge (e);
2254 purged = true;
2255 }
2256 }
2257
2258 if (!bb->succ || bb->succ->succ_next)
2259 abort ();
2260
2261 bb->succ->probability = REG_BR_PROB_BASE;
2262 bb->succ->count = bb->count;
2263
2264 if (rtl_dump_file)
2265 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
2266 bb->index);
2267 return purged;
2268 }
2269
2270 /* Search all basic blocks for potentially dead edges and purge them. Return
2271 true if some edge has been eliminated. */
2272
2273 bool
2274 purge_all_dead_edges (update_life_p)
2275 int update_life_p;
2276 {
2277 int purged = false;
2278 sbitmap blocks = 0;
2279 basic_block bb;
2280
2281 if (update_life_p)
2282 {
2283 blocks = sbitmap_alloc (last_basic_block);
2284 sbitmap_zero (blocks);
2285 }
2286
2287 FOR_EACH_BB (bb)
2288 {
2289 bool purged_here = purge_dead_edges (bb);
2290
2291 purged |= purged_here;
2292 if (purged_here && update_life_p)
2293 SET_BIT (blocks, bb->index);
2294 }
2295
2296 if (update_life_p && purged)
2297 update_life_info (blocks, UPDATE_LIFE_GLOBAL,
2298 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2299 | PROP_KILL_DEAD_CODE);
2300
2301 if (update_life_p)
2302 sbitmap_free (blocks);
2303 return purged;
2304 }