]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfgrtl.c
basic-block.h (flow_delete_block_noexpunge): Kill.
[thirdparty/gcc.git] / gcc / cfgrtl.c
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
24
25 Available functionality:
26 - CFG-aware instruction chain manipulation
27 delete_insn, delete_insn_chain
28 - Basic block manipulation
29 create_basic_block, rtl_delete_block,rtl_split_block,
30 merge_blocks_nomove
31 - Infrastructure to determine quickly basic block for insn
32 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
33 - Edge redirection with updating and optimizing of insn chain
34 block_label, redirect_edge_and_branch,
35 redirect_edge_and_branch_force, tidy_fallthru_edge, force_nonfallthru
36 - Edge splitting and committing to edges
37 split_edge, insert_insn_on_edge, commit_edge_insertions
38 - CFG updating after constant propagation
39 purge_dead_edges, purge_all_dead_edges */
40 \f
41 #include "config.h"
42 #include "system.h"
43 #include "coretypes.h"
44 #include "tm.h"
45 #include "tree.h"
46 #include "rtl.h"
47 #include "hard-reg-set.h"
48 #include "basic-block.h"
49 #include "regs.h"
50 #include "flags.h"
51 #include "output.h"
52 #include "function.h"
53 #include "except.h"
54 #include "toplev.h"
55 #include "tm_p.h"
56 #include "obstack.h"
57 #include "insn-config.h"
58 #include "cfglayout.h"
59
60 /* Stubs in case we don't have a return insn. */
61 #ifndef HAVE_return
62 #define HAVE_return 0
63 #define gen_return() NULL_RTX
64 #endif
65
66 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
67 /* ??? Should probably be using LABEL_NUSES instead. It would take a
68 bit of surgery to be able to use or co-opt the routines in jump. */
69 rtx label_value_list;
70 rtx tail_recursion_label_list;
71
72 static int can_delete_note_p PARAMS ((rtx));
73 static int can_delete_label_p PARAMS ((rtx));
74 static void commit_one_edge_insertion PARAMS ((edge, int));
75 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
76 static rtx last_loop_beg_note PARAMS ((rtx));
77 static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
78 basic_block force_nonfallthru_and_redirect PARAMS ((edge, basic_block));
79 static basic_block rtl_split_edge PARAMS ((edge));
80 static int rtl_verify_flow_info PARAMS ((void));
81 static edge cfg_layout_split_block PARAMS ((basic_block, void *));
82 static bool cfg_layout_redirect_edge_and_branch PARAMS ((edge, basic_block));
83 static basic_block cfg_layout_redirect_edge_and_branch_force PARAMS ((edge, basic_block));
84 static void cfg_layout_delete_block PARAMS ((basic_block));
85 static void rtl_delete_block PARAMS ((basic_block));
86 static basic_block rtl_redirect_edge_and_branch_force PARAMS ((edge, basic_block));
87 static bool rtl_redirect_edge_and_branch PARAMS ((edge, basic_block));
88 static edge rtl_split_block PARAMS ((basic_block, void *));
89 static void rtl_dump_bb PARAMS ((basic_block, FILE *));
90 static int rtl_verify_flow_info_1 PARAMS ((void));
91 \f
92 /* Return true if NOTE is not one of the ones that must be kept paired,
93 so that we may simply delete it. */
94
95 static int
96 can_delete_note_p (note)
97 rtx note;
98 {
99 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
100 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK
101 || NOTE_LINE_NUMBER (note) == NOTE_INSN_PREDICTION);
102 }
103
104 /* True if a given label can be deleted. */
105
106 static int
107 can_delete_label_p (label)
108 rtx label;
109 {
110 return (!LABEL_PRESERVE_P (label)
111 /* User declared labels must be preserved. */
112 && LABEL_NAME (label) == 0
113 && !in_expr_list_p (forced_labels, label)
114 && !in_expr_list_p (label_value_list, label));
115 }
116
117 /* Delete INSN by patching it out. Return the next insn. */
118
119 rtx
120 delete_insn (insn)
121 rtx insn;
122 {
123 rtx next = NEXT_INSN (insn);
124 rtx note;
125 bool really_delete = true;
126
127 if (GET_CODE (insn) == CODE_LABEL)
128 {
129 /* Some labels can't be directly removed from the INSN chain, as they
130 might be references via variables, constant pool etc.
131 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
132 if (! can_delete_label_p (insn))
133 {
134 const char *name = LABEL_NAME (insn);
135
136 really_delete = false;
137 PUT_CODE (insn, NOTE);
138 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
139 NOTE_SOURCE_FILE (insn) = name;
140 }
141
142 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
143 }
144
145 if (really_delete)
146 {
147 /* If this insn has already been deleted, something is very wrong. */
148 if (INSN_DELETED_P (insn))
149 abort ();
150 remove_insn (insn);
151 INSN_DELETED_P (insn) = 1;
152 }
153
154 /* If deleting a jump, decrement the use count of the label. Deleting
155 the label itself should happen in the normal course of block merging. */
156 if (GET_CODE (insn) == JUMP_INSN
157 && JUMP_LABEL (insn)
158 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
159 LABEL_NUSES (JUMP_LABEL (insn))--;
160
161 /* Also if deleting an insn that references a label. */
162 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
163 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
164 LABEL_NUSES (XEXP (note, 0))--;
165
166 if (GET_CODE (insn) == JUMP_INSN
167 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
168 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
169 {
170 rtx pat = PATTERN (insn);
171 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
172 int len = XVECLEN (pat, diff_vec_p);
173 int i;
174
175 for (i = 0; i < len; i++)
176 {
177 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
178
179 /* When deleting code in bulk (e.g. removing many unreachable
180 blocks) we can delete a label that's a target of the vector
181 before deleting the vector itself. */
182 if (GET_CODE (label) != NOTE)
183 LABEL_NUSES (label)--;
184 }
185 }
186
187 return next;
188 }
189
190 /* Like delete_insn but also purge dead edges from BB. */
191 rtx
192 delete_insn_and_edges (insn)
193 rtx insn;
194 {
195 rtx x;
196 bool purge = false;
197
198 if (INSN_P (insn)
199 && BLOCK_FOR_INSN (insn)
200 && BLOCK_FOR_INSN (insn)->end == insn)
201 purge = true;
202 x = delete_insn (insn);
203 if (purge)
204 purge_dead_edges (BLOCK_FOR_INSN (insn));
205 return x;
206 }
207
208 /* Unlink a chain of insns between START and FINISH, leaving notes
209 that must be paired. */
210
211 void
212 delete_insn_chain (start, finish)
213 rtx start, finish;
214 {
215 rtx next;
216
217 /* Unchain the insns one by one. It would be quicker to delete all of these
218 with a single unchaining, rather than one at a time, but we need to keep
219 the NOTE's. */
220 while (1)
221 {
222 next = NEXT_INSN (start);
223 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
224 ;
225 else
226 next = delete_insn (start);
227
228 if (start == finish)
229 break;
230 start = next;
231 }
232 }
233
234 /* Like delete_insn but also purge dead edges from BB. */
235 void
236 delete_insn_chain_and_edges (first, last)
237 rtx first, last;
238 {
239 bool purge = false;
240
241 if (INSN_P (last)
242 && BLOCK_FOR_INSN (last)
243 && BLOCK_FOR_INSN (last)->end == last)
244 purge = true;
245 delete_insn_chain (first, last);
246 if (purge)
247 purge_dead_edges (BLOCK_FOR_INSN (last));
248 }
249 \f
250 /* Create a new basic block consisting of the instructions between HEAD and END
251 inclusive. This function is designed to allow fast BB construction - reuses
252 the note and basic block struct in BB_NOTE, if any and do not grow
253 BASIC_BLOCK chain and should be used directly only by CFG construction code.
254 END can be NULL in to create new empty basic block before HEAD. Both END
255 and HEAD can be NULL to create basic block at the end of INSN chain.
256 AFTER is the basic block we should be put after. */
257
258 basic_block
259 create_basic_block_structure (head, end, bb_note, after)
260 rtx head, end, bb_note;
261 basic_block after;
262 {
263 basic_block bb;
264
265 if (bb_note
266 && ! RTX_INTEGRATED_P (bb_note)
267 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
268 && bb->aux == NULL)
269 {
270 /* If we found an existing note, thread it back onto the chain. */
271
272 rtx after;
273
274 if (GET_CODE (head) == CODE_LABEL)
275 after = head;
276 else
277 {
278 after = PREV_INSN (head);
279 head = bb_note;
280 }
281
282 if (after != bb_note && NEXT_INSN (after) != bb_note)
283 reorder_insns_nobb (bb_note, bb_note, after);
284 }
285 else
286 {
287 /* Otherwise we must create a note and a basic block structure. */
288
289 bb = alloc_block ();
290
291 if (!head && !end)
292 head = end = bb_note
293 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
294 else if (GET_CODE (head) == CODE_LABEL && end)
295 {
296 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
297 if (head == end)
298 end = bb_note;
299 }
300 else
301 {
302 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
303 head = bb_note;
304 if (!end)
305 end = head;
306 }
307
308 NOTE_BASIC_BLOCK (bb_note) = bb;
309 }
310
311 /* Always include the bb note in the block. */
312 if (NEXT_INSN (end) == bb_note)
313 end = bb_note;
314
315 bb->head = head;
316 bb->end = end;
317 bb->index = last_basic_block++;
318 bb->flags = BB_NEW;
319 link_block (bb, after);
320 BASIC_BLOCK (bb->index) = bb;
321 update_bb_for_insn (bb);
322
323 /* Tag the block so that we know it has been used when considering
324 other basic block notes. */
325 bb->aux = bb;
326
327 return bb;
328 }
329
330 /* Create new basic block consisting of instructions in between HEAD and END
331 and place it to the BB chain after block AFTER. END can be NULL in to
332 create new empty basic block before HEAD. Both END and HEAD can be NULL to
333 create basic block at the end of INSN chain. */
334
335 basic_block
336 create_basic_block (head, end, after)
337 rtx head, end;
338 basic_block after;
339 {
340 basic_block bb;
341
342 /* Place the new block just after the end. */
343 VARRAY_GROW (basic_block_info, last_basic_block+1);
344
345 n_basic_blocks++;
346
347 bb = create_basic_block_structure (head, end, NULL, after);
348 bb->aux = NULL;
349 return bb;
350 }
351 \f
352 /* Delete the insns in a (non-live) block. We physically delete every
353 non-deleted-note insn, and update the flow graph appropriately.
354
355 Return nonzero if we deleted an exception handler. */
356
357 /* ??? Preserving all such notes strikes me as wrong. It would be nice
358 to post-process the stream to remove empty blocks, loops, ranges, etc. */
359
360 static void
361 rtl_delete_block (b)
362 basic_block b;
363 {
364 rtx insn, end, tmp;
365
366 /* If the head of this block is a CODE_LABEL, then it might be the
367 label for an exception handler which can't be reached.
368
369 We need to remove the label from the exception_handler_label list
370 and remove the associated NOTE_INSN_EH_REGION_BEG and
371 NOTE_INSN_EH_REGION_END notes. */
372
373 /* Get rid of all NOTE_INSN_PREDICTIONs and NOTE_INSN_LOOP_CONTs
374 hanging before the block. */
375
376 for (insn = PREV_INSN (b->head); insn; insn = PREV_INSN (insn))
377 {
378 if (GET_CODE (insn) != NOTE)
379 break;
380 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION
381 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
382 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
383 }
384
385 insn = b->head;
386
387 never_reached_warning (insn, b->end);
388
389 if (GET_CODE (insn) == CODE_LABEL)
390 maybe_remove_eh_handler (insn);
391
392 /* Include any jump table following the basic block. */
393 end = b->end;
394 if (tablejump_p (end, NULL, &tmp))
395 end = tmp;
396
397 /* Include any barrier that may follow the basic block. */
398 tmp = next_nonnote_insn (end);
399 if (tmp && GET_CODE (tmp) == BARRIER)
400 end = tmp;
401
402 /* Selectively delete the entire chain. */
403 b->head = NULL;
404 delete_insn_chain (insn, end);
405
406 /* Remove the edges into and out of this block. Note that there may
407 indeed be edges in, if we are removing an unreachable loop. */
408 while (b->pred != NULL)
409 remove_edge (b->pred);
410 while (b->succ != NULL)
411 remove_edge (b->succ);
412
413 b->pred = NULL;
414 b->succ = NULL;
415
416 /* Remove the basic block from the array. */
417 expunge_block (b);
418 }
419 \f
420 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
421
422 void
423 compute_bb_for_insn ()
424 {
425 basic_block bb;
426
427 FOR_EACH_BB (bb)
428 {
429 rtx end = bb->end;
430 rtx insn;
431
432 for (insn = bb->head; ; insn = NEXT_INSN (insn))
433 {
434 BLOCK_FOR_INSN (insn) = bb;
435 if (insn == end)
436 break;
437 }
438 }
439 }
440
441 /* Release the basic_block_for_insn array. */
442
443 void
444 free_bb_for_insn ()
445 {
446 rtx insn;
447 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
448 if (GET_CODE (insn) != BARRIER)
449 BLOCK_FOR_INSN (insn) = NULL;
450 }
451
452 /* Update insns block within BB. */
453
454 void
455 update_bb_for_insn (bb)
456 basic_block bb;
457 {
458 rtx insn;
459
460 for (insn = bb->head; ; insn = NEXT_INSN (insn))
461 {
462 if (GET_CODE (insn) != BARRIER)
463 set_block_for_insn (insn, bb);
464 if (insn == bb->end)
465 break;
466 }
467 }
468 \f
469 /* Split a block BB after insn INSN creating a new fallthru edge.
470 Return the new edge. Note that to keep other parts of the compiler happy,
471 this function renumbers all the basic blocks so that the new
472 one has a number one greater than the block split. */
473
474 static edge
475 rtl_split_block (bb, insnp)
476 basic_block bb;
477 void *insnp;
478 {
479 basic_block new_bb;
480 edge new_edge;
481 edge e;
482 rtx insn = insnp;
483
484 /* There is no point splitting the block after its end. */
485 if (bb->end == insn)
486 return 0;
487
488 /* Create the new basic block. */
489 new_bb = create_basic_block (NEXT_INSN (insn), bb->end, bb);
490 new_bb->count = bb->count;
491 new_bb->frequency = bb->frequency;
492 new_bb->loop_depth = bb->loop_depth;
493 bb->end = insn;
494
495 /* Redirect the outgoing edges. */
496 new_bb->succ = bb->succ;
497 bb->succ = NULL;
498 for (e = new_bb->succ; e; e = e->succ_next)
499 e->src = new_bb;
500
501 new_edge = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
502
503 if (bb->global_live_at_start)
504 {
505 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
506 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
507 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
508
509 /* We now have to calculate which registers are live at the end
510 of the split basic block and at the start of the new basic
511 block. Start with those registers that are known to be live
512 at the end of the original basic block and get
513 propagate_block to determine which registers are live. */
514 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
515 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
516 COPY_REG_SET (bb->global_live_at_end,
517 new_bb->global_live_at_start);
518 #ifdef HAVE_conditional_execution
519 /* In the presence of conditional execution we are not able to update
520 liveness precisely. */
521 if (reload_completed)
522 {
523 bb->flags |= BB_DIRTY;
524 new_bb->flags |= BB_DIRTY;
525 }
526 #endif
527 }
528
529 return new_edge;
530 }
531
532 /* Blocks A and B are to be merged into a single block A. The insns
533 are already contiguous, hence `nomove'. */
534
535 void
536 merge_blocks_nomove (a, b)
537 basic_block a, b;
538 {
539 rtx b_head = b->head, b_end = b->end, a_end = a->end;
540 rtx del_first = NULL_RTX, del_last = NULL_RTX;
541 int b_empty = 0;
542 edge e;
543
544 /* If there was a CODE_LABEL beginning B, delete it. */
545 if (GET_CODE (b_head) == CODE_LABEL)
546 {
547 /* Detect basic blocks with nothing but a label. This can happen
548 in particular at the end of a function. */
549 if (b_head == b_end)
550 b_empty = 1;
551
552 del_first = del_last = b_head;
553 b_head = NEXT_INSN (b_head);
554 }
555
556 /* Delete the basic block note and handle blocks containing just that
557 note. */
558 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
559 {
560 if (b_head == b_end)
561 b_empty = 1;
562 if (! del_last)
563 del_first = b_head;
564
565 del_last = b_head;
566 b_head = NEXT_INSN (b_head);
567 }
568
569 /* If there was a jump out of A, delete it. */
570 if (GET_CODE (a_end) == JUMP_INSN)
571 {
572 rtx prev;
573
574 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
575 if (GET_CODE (prev) != NOTE
576 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
577 || prev == a->head)
578 break;
579
580 del_first = a_end;
581
582 #ifdef HAVE_cc0
583 /* If this was a conditional jump, we need to also delete
584 the insn that set cc0. */
585 if (only_sets_cc0_p (prev))
586 {
587 rtx tmp = prev;
588
589 prev = prev_nonnote_insn (prev);
590 if (!prev)
591 prev = a->head;
592 del_first = tmp;
593 }
594 #endif
595
596 a_end = PREV_INSN (del_first);
597 }
598 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
599 del_first = NEXT_INSN (a_end);
600
601 /* Normally there should only be one successor of A and that is B, but
602 partway though the merge of blocks for conditional_execution we'll
603 be merging a TEST block with THEN and ELSE successors. Free the
604 whole lot of them and hope the caller knows what they're doing. */
605 while (a->succ)
606 remove_edge (a->succ);
607
608 /* Adjust the edges out of B for the new owner. */
609 for (e = b->succ; e; e = e->succ_next)
610 e->src = a;
611 a->succ = b->succ;
612 a->flags |= b->flags;
613
614 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
615 b->pred = b->succ = NULL;
616 a->global_live_at_end = b->global_live_at_end;
617
618 expunge_block (b);
619
620 /* Delete everything marked above as well as crap that might be
621 hanging out between the two blocks. */
622 delete_insn_chain (del_first, del_last);
623
624 /* Reassociate the insns of B with A. */
625 if (!b_empty)
626 {
627 rtx x;
628
629 for (x = a_end; x != b_end; x = NEXT_INSN (x))
630 set_block_for_insn (x, a);
631
632 set_block_for_insn (b_end, a);
633
634 a_end = b_end;
635 }
636
637 a->end = a_end;
638 }
639 \f
640 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
641 exist. */
642
643 rtx
644 block_label (block)
645 basic_block block;
646 {
647 if (block == EXIT_BLOCK_PTR)
648 return NULL_RTX;
649
650 if (GET_CODE (block->head) != CODE_LABEL)
651 {
652 block->head = emit_label_before (gen_label_rtx (), block->head);
653 }
654
655 return block->head;
656 }
657
658 /* Attempt to perform edge redirection by replacing possibly complex jump
659 instruction by unconditional jump or removing jump completely. This can
660 apply only if all edges now point to the same block. The parameters and
661 return values are equivalent to redirect_edge_and_branch. */
662
663 static bool
664 try_redirect_by_replacing_jump (e, target)
665 edge e;
666 basic_block target;
667 {
668 basic_block src = e->src;
669 rtx insn = src->end, kill_from;
670 edge tmp;
671 rtx set;
672 int fallthru = 0;
673
674 /* Verify that all targets will be TARGET. */
675 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
676 if (tmp->dest != target && tmp != e)
677 break;
678
679 if (tmp || !onlyjump_p (insn))
680 return false;
681 if ((!optimize || flow2_completed) && tablejump_p (insn, NULL, NULL))
682 return false;
683
684 /* Avoid removing branch with side effects. */
685 set = single_set (insn);
686 if (!set || side_effects_p (set))
687 return false;
688
689 /* In case we zap a conditional jump, we'll need to kill
690 the cc0 setter too. */
691 kill_from = insn;
692 #ifdef HAVE_cc0
693 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
694 kill_from = PREV_INSN (insn);
695 #endif
696
697 /* See if we can create the fallthru edge. */
698 if (can_fallthru (src, target))
699 {
700 if (rtl_dump_file)
701 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
702 fallthru = 1;
703
704 /* Selectively unlink whole insn chain. */
705 delete_insn_chain (kill_from, PREV_INSN (target->head));
706 }
707
708 /* If this already is simplejump, redirect it. */
709 else if (simplejump_p (insn))
710 {
711 if (e->dest == target)
712 return false;
713 if (rtl_dump_file)
714 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
715 INSN_UID (insn), e->dest->index, target->index);
716 if (!redirect_jump (insn, block_label (target), 0))
717 {
718 if (target == EXIT_BLOCK_PTR)
719 return false;
720 abort ();
721 }
722 }
723
724 /* Cannot do anything for target exit block. */
725 else if (target == EXIT_BLOCK_PTR)
726 return false;
727
728 /* Or replace possibly complicated jump insn by simple jump insn. */
729 else
730 {
731 rtx target_label = block_label (target);
732 rtx barrier, label, table;
733
734 emit_jump_insn_after (gen_jump (target_label), insn);
735 JUMP_LABEL (src->end) = target_label;
736 LABEL_NUSES (target_label)++;
737 if (rtl_dump_file)
738 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
739 INSN_UID (insn), INSN_UID (src->end));
740
741
742 delete_insn_chain (kill_from, insn);
743
744 /* Recognize a tablejump that we are converting to a
745 simple jump and remove its associated CODE_LABEL
746 and ADDR_VEC or ADDR_DIFF_VEC. */
747 if (tablejump_p (insn, &label, &table))
748 delete_insn_chain (label, table);
749
750 barrier = next_nonnote_insn (src->end);
751 if (!barrier || GET_CODE (barrier) != BARRIER)
752 emit_barrier_after (src->end);
753 }
754
755 /* Keep only one edge out and set proper flags. */
756 while (src->succ->succ_next)
757 remove_edge (src->succ);
758 e = src->succ;
759 if (fallthru)
760 e->flags = EDGE_FALLTHRU;
761 else
762 e->flags = 0;
763
764 e->probability = REG_BR_PROB_BASE;
765 e->count = src->count;
766
767 /* We don't want a block to end on a line-number note since that has
768 the potential of changing the code between -g and not -g. */
769 while (GET_CODE (e->src->end) == NOTE
770 && NOTE_LINE_NUMBER (e->src->end) >= 0)
771 delete_insn (e->src->end);
772
773 if (e->dest != target)
774 redirect_edge_succ (e, target);
775
776 return true;
777 }
778
779 /* Return last loop_beg note appearing after INSN, before start of next
780 basic block. Return INSN if there are no such notes.
781
782 When emitting jump to redirect a fallthru edge, it should always appear
783 after the LOOP_BEG notes, as loop optimizer expect loop to either start by
784 fallthru edge or jump following the LOOP_BEG note jumping to the loop exit
785 test. */
786
787 static rtx
788 last_loop_beg_note (insn)
789 rtx insn;
790 {
791 rtx last = insn;
792
793 for (insn = NEXT_INSN (insn); insn && GET_CODE (insn) == NOTE
794 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
795 insn = NEXT_INSN (insn))
796 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
797 last = insn;
798
799 return last;
800 }
801
802 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
803 expense of adding new instructions or reordering basic blocks.
804
805 Function can be also called with edge destination equivalent to the TARGET.
806 Then it should try the simplifications and do nothing if none is possible.
807
808 Return true if transformation succeeded. We still return false in case E
809 already destinated TARGET and we didn't managed to simplify instruction
810 stream. */
811
812 static bool
813 rtl_redirect_edge_and_branch (e, target)
814 edge e;
815 basic_block target;
816 {
817 rtx tmp;
818 rtx old_label = e->dest->head;
819 basic_block src = e->src;
820 rtx insn = src->end;
821
822 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
823 return false;
824
825 if (try_redirect_by_replacing_jump (e, target))
826 return true;
827
828 /* Do this fast path late, as we want above code to simplify for cases
829 where called on single edge leaving basic block containing nontrivial
830 jump insn. */
831 else if (e->dest == target)
832 return false;
833
834 /* We can only redirect non-fallthru edges of jump insn. */
835 if (e->flags & EDGE_FALLTHRU)
836 return false;
837 else if (GET_CODE (insn) != JUMP_INSN)
838 return false;
839
840 /* Recognize a tablejump and adjust all matching cases. */
841 if (tablejump_p (insn, NULL, &tmp))
842 {
843 rtvec vec;
844 int j;
845 rtx new_label = block_label (target);
846
847 if (target == EXIT_BLOCK_PTR)
848 return false;
849 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
850 vec = XVEC (PATTERN (tmp), 0);
851 else
852 vec = XVEC (PATTERN (tmp), 1);
853
854 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
855 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
856 {
857 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
858 --LABEL_NUSES (old_label);
859 ++LABEL_NUSES (new_label);
860 }
861
862 /* Handle casesi dispatch insns */
863 if ((tmp = single_set (insn)) != NULL
864 && SET_DEST (tmp) == pc_rtx
865 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
866 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
867 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
868 {
869 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
870 new_label);
871 --LABEL_NUSES (old_label);
872 ++LABEL_NUSES (new_label);
873 }
874 }
875 else
876 {
877 /* ?? We may play the games with moving the named labels from
878 one basic block to the other in case only one computed_jump is
879 available. */
880 if (computed_jump_p (insn)
881 /* A return instruction can't be redirected. */
882 || returnjump_p (insn))
883 return false;
884
885 /* If the insn doesn't go where we think, we're confused. */
886 if (JUMP_LABEL (insn) != old_label)
887 abort ();
888
889 /* If the substitution doesn't succeed, die. This can happen
890 if the back end emitted unrecognizable instructions or if
891 target is exit block on some arches. */
892 if (!redirect_jump (insn, block_label (target), 0))
893 {
894 if (target == EXIT_BLOCK_PTR)
895 return false;
896 abort ();
897 }
898 }
899
900 if (rtl_dump_file)
901 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
902 e->src->index, e->dest->index, target->index);
903
904 if (e->dest != target)
905 redirect_edge_succ_nodup (e, target);
906
907 return true;
908 }
909
910 /* Like force_nonfallthru below, but additionally performs redirection
911 Used by redirect_edge_and_branch_force. */
912
913 basic_block
914 force_nonfallthru_and_redirect (e, target)
915 edge e;
916 basic_block target;
917 {
918 basic_block jump_block, new_bb = NULL, src = e->src;
919 rtx note;
920 edge new_edge;
921 int abnormal_edge_flags = 0;
922
923 /* In the case the last instruction is conditional jump to the next
924 instruction, first redirect the jump itself and then continue
925 by creating an basic block afterwards to redirect fallthru edge. */
926 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
927 && any_condjump_p (e->src->end)
928 /* When called from cfglayout, fallthru edges do not
929 neccessarily go to the next block. */
930 && e->src->next_bb == e->dest
931 && JUMP_LABEL (e->src->end) == e->dest->head)
932 {
933 rtx note;
934 edge b = unchecked_make_edge (e->src, target, 0);
935
936 if (!redirect_jump (e->src->end, block_label (target), 0))
937 abort ();
938 note = find_reg_note (e->src->end, REG_BR_PROB, NULL_RTX);
939 if (note)
940 {
941 int prob = INTVAL (XEXP (note, 0));
942
943 b->probability = prob;
944 b->count = e->count * prob / REG_BR_PROB_BASE;
945 e->probability -= e->probability;
946 e->count -= b->count;
947 if (e->probability < 0)
948 e->probability = 0;
949 if (e->count < 0)
950 e->count = 0;
951 }
952 }
953
954 if (e->flags & EDGE_ABNORMAL)
955 {
956 /* Irritating special case - fallthru edge to the same block as abnormal
957 edge.
958 We can't redirect abnormal edge, but we still can split the fallthru
959 one and create separate abnormal edge to original destination.
960 This allows bb-reorder to make such edge non-fallthru. */
961 if (e->dest != target)
962 abort ();
963 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
964 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
965 }
966 else if (!(e->flags & EDGE_FALLTHRU))
967 abort ();
968 else if (e->src == ENTRY_BLOCK_PTR)
969 {
970 /* We can't redirect the entry block. Create an empty block at the
971 start of the function which we use to add the new jump. */
972 edge *pe1;
973 basic_block bb = create_basic_block (e->dest->head, NULL, ENTRY_BLOCK_PTR);
974
975 /* Change the existing edge's source to be the new block, and add
976 a new edge from the entry block to the new block. */
977 e->src = bb;
978 for (pe1 = &ENTRY_BLOCK_PTR->succ; *pe1; pe1 = &(*pe1)->succ_next)
979 if (*pe1 == e)
980 {
981 *pe1 = e->succ_next;
982 break;
983 }
984 e->succ_next = 0;
985 bb->succ = e;
986 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
987 }
988
989 if (e->src->succ->succ_next || abnormal_edge_flags)
990 {
991 /* Create the new structures. */
992
993 /* Position the new block correctly relative to loop notes. */
994 note = last_loop_beg_note (e->src->end);
995 note = NEXT_INSN (note);
996
997 /* ... and ADDR_VECs. */
998 if (note != NULL
999 && GET_CODE (note) == CODE_LABEL
1000 && NEXT_INSN (note)
1001 && GET_CODE (NEXT_INSN (note)) == JUMP_INSN
1002 && (GET_CODE (PATTERN (NEXT_INSN (note))) == ADDR_DIFF_VEC
1003 || GET_CODE (PATTERN (NEXT_INSN (note))) == ADDR_VEC))
1004 note = NEXT_INSN (NEXT_INSN (note));
1005
1006 jump_block = create_basic_block (note, NULL, e->src);
1007 jump_block->count = e->count;
1008 jump_block->frequency = EDGE_FREQUENCY (e);
1009 jump_block->loop_depth = target->loop_depth;
1010
1011 if (target->global_live_at_start)
1012 {
1013 jump_block->global_live_at_start
1014 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1015 jump_block->global_live_at_end
1016 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1017 COPY_REG_SET (jump_block->global_live_at_start,
1018 target->global_live_at_start);
1019 COPY_REG_SET (jump_block->global_live_at_end,
1020 target->global_live_at_start);
1021 }
1022
1023 /* Wire edge in. */
1024 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1025 new_edge->probability = e->probability;
1026 new_edge->count = e->count;
1027
1028 /* Redirect old edge. */
1029 redirect_edge_pred (e, jump_block);
1030 e->probability = REG_BR_PROB_BASE;
1031
1032 new_bb = jump_block;
1033 }
1034 else
1035 jump_block = e->src;
1036
1037 e->flags &= ~EDGE_FALLTHRU;
1038 if (target == EXIT_BLOCK_PTR)
1039 {
1040 if (HAVE_return)
1041 emit_jump_insn_after (gen_return (), jump_block->end);
1042 else
1043 abort ();
1044 }
1045 else
1046 {
1047 rtx label = block_label (target);
1048 emit_jump_insn_after (gen_jump (label), jump_block->end);
1049 JUMP_LABEL (jump_block->end) = label;
1050 LABEL_NUSES (label)++;
1051 }
1052
1053 emit_barrier_after (jump_block->end);
1054 redirect_edge_succ_nodup (e, target);
1055
1056 if (abnormal_edge_flags)
1057 make_edge (src, target, abnormal_edge_flags);
1058
1059 return new_bb;
1060 }
1061
1062 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1063 (and possibly create new basic block) to make edge non-fallthru.
1064 Return newly created BB or NULL if none. */
1065
1066 basic_block
1067 force_nonfallthru (e)
1068 edge e;
1069 {
1070 return force_nonfallthru_and_redirect (e, e->dest);
1071 }
1072
1073 /* Redirect edge even at the expense of creating new jump insn or
1074 basic block. Return new basic block if created, NULL otherwise.
1075 Abort if conversion is impossible. */
1076
1077 static basic_block
1078 rtl_redirect_edge_and_branch_force (e, target)
1079 edge e;
1080 basic_block target;
1081 {
1082 if (redirect_edge_and_branch (e, target)
1083 || e->dest == target)
1084 return NULL;
1085
1086 /* In case the edge redirection failed, try to force it to be non-fallthru
1087 and redirect newly created simplejump. */
1088 return force_nonfallthru_and_redirect (e, target);
1089 }
1090
1091 /* The given edge should potentially be a fallthru edge. If that is in
1092 fact true, delete the jump and barriers that are in the way. */
1093
1094 void
1095 tidy_fallthru_edge (e, b, c)
1096 edge e;
1097 basic_block b, c;
1098 {
1099 rtx q;
1100
1101 /* ??? In a late-running flow pass, other folks may have deleted basic
1102 blocks by nopping out blocks, leaving multiple BARRIERs between here
1103 and the target label. They ought to be chastized and fixed.
1104
1105 We can also wind up with a sequence of undeletable labels between
1106 one block and the next.
1107
1108 So search through a sequence of barriers, labels, and notes for
1109 the head of block C and assert that we really do fall through. */
1110
1111 for (q = NEXT_INSN (b->end); q != c->head; q = NEXT_INSN (q))
1112 if (INSN_P (q))
1113 return;
1114
1115 /* Remove what will soon cease being the jump insn from the source block.
1116 If block B consisted only of this single jump, turn it into a deleted
1117 note. */
1118 q = b->end;
1119 if (GET_CODE (q) == JUMP_INSN
1120 && onlyjump_p (q)
1121 && (any_uncondjump_p (q)
1122 || (b->succ == e && e->succ_next == NULL)))
1123 {
1124 #ifdef HAVE_cc0
1125 /* If this was a conditional jump, we need to also delete
1126 the insn that set cc0. */
1127 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1128 q = PREV_INSN (q);
1129 #endif
1130
1131 q = PREV_INSN (q);
1132
1133 /* We don't want a block to end on a line-number note since that has
1134 the potential of changing the code between -g and not -g. */
1135 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
1136 q = PREV_INSN (q);
1137 }
1138
1139 /* Selectively unlink the sequence. */
1140 if (q != PREV_INSN (c->head))
1141 delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
1142
1143 e->flags |= EDGE_FALLTHRU;
1144 }
1145
1146 /* Fix up edges that now fall through, or rather should now fall through
1147 but previously required a jump around now deleted blocks. Simplify
1148 the search by only examining blocks numerically adjacent, since this
1149 is how find_basic_blocks created them. */
1150
1151 void
1152 tidy_fallthru_edges ()
1153 {
1154 basic_block b, c;
1155
1156 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1157 return;
1158
1159 FOR_BB_BETWEEN (b, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, next_bb)
1160 {
1161 edge s;
1162
1163 c = b->next_bb;
1164
1165 /* We care about simple conditional or unconditional jumps with
1166 a single successor.
1167
1168 If we had a conditional branch to the next instruction when
1169 find_basic_blocks was called, then there will only be one
1170 out edge for the block which ended with the conditional
1171 branch (since we do not create duplicate edges).
1172
1173 Furthermore, the edge will be marked as a fallthru because we
1174 merge the flags for the duplicate edges. So we do not want to
1175 check that the edge is not a FALLTHRU edge. */
1176
1177 if ((s = b->succ) != NULL
1178 && ! (s->flags & EDGE_COMPLEX)
1179 && s->succ_next == NULL
1180 && s->dest == c
1181 /* If the jump insn has side effects, we can't tidy the edge. */
1182 && (GET_CODE (b->end) != JUMP_INSN
1183 || onlyjump_p (b->end)))
1184 tidy_fallthru_edge (s, b, c);
1185 }
1186 }
1187 \f
1188 /* Helper function for split_edge. Return true in case edge BB2 to BB1
1189 is back edge of syntactic loop. */
1190
1191 static bool
1192 back_edge_of_syntactic_loop_p (bb1, bb2)
1193 basic_block bb1, bb2;
1194 {
1195 rtx insn;
1196 int count = 0;
1197 basic_block bb;
1198
1199 if (bb1 == bb2)
1200 return true;
1201
1202 /* ??? Could we guarantee that bb indices are monotone, so that we could
1203 just compare them? */
1204 for (bb = bb1; bb && bb != bb2; bb = bb->next_bb)
1205 continue;
1206
1207 if (!bb)
1208 return false;
1209
1210 for (insn = bb1->end; insn != bb2->head && count >= 0;
1211 insn = NEXT_INSN (insn))
1212 if (GET_CODE (insn) == NOTE)
1213 {
1214 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1215 count++;
1216 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1217 count--;
1218 }
1219
1220 return count >= 0;
1221 }
1222
1223 /* Split a (typically critical) edge. Return the new block.
1224 Abort on abnormal edges.
1225
1226 ??? The code generally expects to be called on critical edges.
1227 The case of a block ending in an unconditional jump to a
1228 block with multiple predecessors is not handled optimally. */
1229
1230 basic_block
1231 rtl_split_edge (edge_in)
1232 edge edge_in;
1233 {
1234 basic_block bb;
1235 rtx before;
1236
1237 /* Abnormal edges cannot be split. */
1238 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1239 abort ();
1240
1241 /* We are going to place the new block in front of edge destination.
1242 Avoid existence of fallthru predecessors. */
1243 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1244 {
1245 edge e;
1246
1247 for (e = edge_in->dest->pred; e; e = e->pred_next)
1248 if (e->flags & EDGE_FALLTHRU)
1249 break;
1250
1251 if (e)
1252 force_nonfallthru (e);
1253 }
1254
1255 /* Create the basic block note.
1256
1257 Where we place the note can have a noticeable impact on the generated
1258 code. Consider this cfg:
1259
1260 E
1261 |
1262 0
1263 / \
1264 +->1-->2--->E
1265 | |
1266 +--+
1267
1268 If we need to insert an insn on the edge from block 0 to block 1,
1269 we want to ensure the instructions we insert are outside of any
1270 loop notes that physically sit between block 0 and block 1. Otherwise
1271 we confuse the loop optimizer into thinking the loop is a phony. */
1272
1273 if (edge_in->dest != EXIT_BLOCK_PTR
1274 && PREV_INSN (edge_in->dest->head)
1275 && GET_CODE (PREV_INSN (edge_in->dest->head)) == NOTE
1276 && (NOTE_LINE_NUMBER (PREV_INSN (edge_in->dest->head))
1277 == NOTE_INSN_LOOP_BEG)
1278 && !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
1279 before = PREV_INSN (edge_in->dest->head);
1280 else if (edge_in->dest != EXIT_BLOCK_PTR)
1281 before = edge_in->dest->head;
1282 else
1283 before = NULL_RTX;
1284
1285 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1286 bb->count = edge_in->count;
1287 bb->frequency = EDGE_FREQUENCY (edge_in);
1288
1289 /* ??? This info is likely going to be out of date very soon. */
1290 if (edge_in->dest->global_live_at_start)
1291 {
1292 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1293 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1294 COPY_REG_SET (bb->global_live_at_start,
1295 edge_in->dest->global_live_at_start);
1296 COPY_REG_SET (bb->global_live_at_end,
1297 edge_in->dest->global_live_at_start);
1298 }
1299
1300 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1301
1302 /* For non-fallthry edges, we must adjust the predecessor's
1303 jump instruction to target our new block. */
1304 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1305 {
1306 if (!redirect_edge_and_branch (edge_in, bb))
1307 abort ();
1308 }
1309 else
1310 redirect_edge_succ (edge_in, bb);
1311
1312 return bb;
1313 }
1314
1315 /* Queue instructions for insertion on an edge between two basic blocks.
1316 The new instructions and basic blocks (if any) will not appear in the
1317 CFG until commit_edge_insertions is called. */
1318
1319 void
1320 insert_insn_on_edge (pattern, e)
1321 rtx pattern;
1322 edge e;
1323 {
1324 /* We cannot insert instructions on an abnormal critical edge.
1325 It will be easier to find the culprit if we die now. */
1326 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
1327 abort ();
1328
1329 if (e->insns == NULL_RTX)
1330 start_sequence ();
1331 else
1332 push_to_sequence (e->insns);
1333
1334 emit_insn (pattern);
1335
1336 e->insns = get_insns ();
1337 end_sequence ();
1338 }
1339
1340 /* Update the CFG for the instructions queued on edge E. */
1341
1342 static void
1343 commit_one_edge_insertion (e, watch_calls)
1344 edge e;
1345 int watch_calls;
1346 {
1347 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1348 basic_block bb = NULL;
1349
1350 /* Pull the insns off the edge now since the edge might go away. */
1351 insns = e->insns;
1352 e->insns = NULL_RTX;
1353
1354 /* Special case -- avoid inserting code between call and storing
1355 its return value. */
1356 if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
1357 && e->src != ENTRY_BLOCK_PTR
1358 && GET_CODE (e->src->end) == CALL_INSN)
1359 {
1360 rtx next = next_nonnote_insn (e->src->end);
1361
1362 after = e->dest->head;
1363 /* The first insn after the call may be a stack pop, skip it. */
1364 while (next
1365 && keep_with_call_p (next))
1366 {
1367 after = next;
1368 next = next_nonnote_insn (next);
1369 }
1370 bb = e->dest;
1371 }
1372 if (!before && !after)
1373 {
1374 /* Figure out where to put these things. If the destination has
1375 one predecessor, insert there. Except for the exit block. */
1376 if (e->dest->pred->pred_next == NULL && e->dest != EXIT_BLOCK_PTR)
1377 {
1378 bb = e->dest;
1379
1380 /* Get the location correct wrt a code label, and "nice" wrt
1381 a basic block note, and before everything else. */
1382 tmp = bb->head;
1383 if (GET_CODE (tmp) == CODE_LABEL)
1384 tmp = NEXT_INSN (tmp);
1385 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1386 tmp = NEXT_INSN (tmp);
1387 if (tmp == bb->head)
1388 before = tmp;
1389 else if (tmp)
1390 after = PREV_INSN (tmp);
1391 else
1392 after = get_last_insn ();
1393 }
1394
1395 /* If the source has one successor and the edge is not abnormal,
1396 insert there. Except for the entry block. */
1397 else if ((e->flags & EDGE_ABNORMAL) == 0
1398 && e->src->succ->succ_next == NULL
1399 && e->src != ENTRY_BLOCK_PTR)
1400 {
1401 bb = e->src;
1402
1403 /* It is possible to have a non-simple jump here. Consider a target
1404 where some forms of unconditional jumps clobber a register. This
1405 happens on the fr30 for example.
1406
1407 We know this block has a single successor, so we can just emit
1408 the queued insns before the jump. */
1409 if (GET_CODE (bb->end) == JUMP_INSN)
1410 for (before = bb->end;
1411 GET_CODE (PREV_INSN (before)) == NOTE
1412 && NOTE_LINE_NUMBER (PREV_INSN (before)) ==
1413 NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
1414 ;
1415 else
1416 {
1417 /* We'd better be fallthru, or we've lost track of what's what. */
1418 if ((e->flags & EDGE_FALLTHRU) == 0)
1419 abort ();
1420
1421 after = bb->end;
1422 }
1423 }
1424 /* Otherwise we must split the edge. */
1425 else
1426 {
1427 bb = split_edge (e);
1428 after = bb->end;
1429 }
1430 }
1431
1432 /* Now that we've found the spot, do the insertion. */
1433
1434 if (before)
1435 {
1436 emit_insn_before (insns, before);
1437 last = prev_nonnote_insn (before);
1438 }
1439 else
1440 last = emit_insn_after (insns, after);
1441
1442 if (returnjump_p (last))
1443 {
1444 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1445 This is not currently a problem because this only happens
1446 for the (single) epilogue, which already has a fallthru edge
1447 to EXIT. */
1448
1449 e = bb->succ;
1450 if (e->dest != EXIT_BLOCK_PTR
1451 || e->succ_next != NULL || (e->flags & EDGE_FALLTHRU) == 0)
1452 abort ();
1453
1454 e->flags &= ~EDGE_FALLTHRU;
1455 emit_barrier_after (last);
1456
1457 if (before)
1458 delete_insn (before);
1459 }
1460 else if (GET_CODE (last) == JUMP_INSN)
1461 abort ();
1462
1463 /* Mark the basic block for find_sub_basic_blocks. */
1464 bb->aux = &bb->aux;
1465 }
1466
1467 /* Update the CFG for all queued instructions. */
1468
1469 void
1470 commit_edge_insertions ()
1471 {
1472 basic_block bb;
1473 sbitmap blocks;
1474 bool changed = false;
1475
1476 #ifdef ENABLE_CHECKING
1477 verify_flow_info ();
1478 #endif
1479
1480 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1481 {
1482 edge e, next;
1483
1484 for (e = bb->succ; e; e = next)
1485 {
1486 next = e->succ_next;
1487 if (e->insns)
1488 {
1489 changed = true;
1490 commit_one_edge_insertion (e, false);
1491 }
1492 }
1493 }
1494
1495 if (!changed)
1496 return;
1497
1498 blocks = sbitmap_alloc (last_basic_block);
1499 sbitmap_zero (blocks);
1500 FOR_EACH_BB (bb)
1501 if (bb->aux)
1502 {
1503 SET_BIT (blocks, bb->index);
1504 /* Check for forgotten bb->aux values before commit_edge_insertions
1505 call. */
1506 if (bb->aux != &bb->aux)
1507 abort ();
1508 bb->aux = NULL;
1509 }
1510 find_many_sub_basic_blocks (blocks);
1511 sbitmap_free (blocks);
1512 }
1513 \f
1514 /* Update the CFG for all queued instructions, taking special care of inserting
1515 code on edges between call and storing its return value. */
1516
1517 void
1518 commit_edge_insertions_watch_calls ()
1519 {
1520 basic_block bb;
1521 sbitmap blocks;
1522 bool changed = false;
1523
1524 #ifdef ENABLE_CHECKING
1525 verify_flow_info ();
1526 #endif
1527
1528 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1529 {
1530 edge e, next;
1531
1532 for (e = bb->succ; e; e = next)
1533 {
1534 next = e->succ_next;
1535 if (e->insns)
1536 {
1537 changed = true;
1538 commit_one_edge_insertion (e, true);
1539 }
1540 }
1541 }
1542
1543 if (!changed)
1544 return;
1545
1546 blocks = sbitmap_alloc (last_basic_block);
1547 sbitmap_zero (blocks);
1548 FOR_EACH_BB (bb)
1549 if (bb->aux)
1550 {
1551 SET_BIT (blocks, bb->index);
1552 /* Check for forgotten bb->aux values before commit_edge_insertions
1553 call. */
1554 if (bb->aux != &bb->aux)
1555 abort ();
1556 bb->aux = NULL;
1557 }
1558 find_many_sub_basic_blocks (blocks);
1559 sbitmap_free (blocks);
1560 }
1561 \f
1562 /* Print out one basic block with live information at start and end. */
1563
1564 static void
1565 rtl_dump_bb (bb, outf)
1566 basic_block bb;
1567 FILE *outf;
1568 {
1569 rtx insn;
1570 rtx last;
1571
1572 fputs (";; Registers live at start:", outf);
1573 dump_regset (bb->global_live_at_start, outf);
1574 putc ('\n', outf);
1575
1576 for (insn = bb->head, last = NEXT_INSN (bb->end); insn != last;
1577 insn = NEXT_INSN (insn))
1578 print_rtl_single (outf, insn);
1579
1580 fputs (";; Registers live at end:", outf);
1581 dump_regset (bb->global_live_at_end, outf);
1582 putc ('\n', outf);
1583 }
1584 \f
1585 /* Like print_rtl, but also print out live information for the start of each
1586 basic block. */
1587
1588 void
1589 print_rtl_with_bb (outf, rtx_first)
1590 FILE *outf;
1591 rtx rtx_first;
1592 {
1593 rtx tmp_rtx;
1594
1595 if (rtx_first == 0)
1596 fprintf (outf, "(nil)\n");
1597 else
1598 {
1599 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1600 int max_uid = get_max_uid ();
1601 basic_block *start
1602 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1603 basic_block *end
1604 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1605 enum bb_state *in_bb_p
1606 = (enum bb_state *) xcalloc (max_uid, sizeof (enum bb_state));
1607
1608 basic_block bb;
1609
1610 FOR_EACH_BB_REVERSE (bb)
1611 {
1612 rtx x;
1613
1614 start[INSN_UID (bb->head)] = bb;
1615 end[INSN_UID (bb->end)] = bb;
1616 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
1617 {
1618 enum bb_state state = IN_MULTIPLE_BB;
1619
1620 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1621 state = IN_ONE_BB;
1622 in_bb_p[INSN_UID (x)] = state;
1623
1624 if (x == bb->end)
1625 break;
1626 }
1627 }
1628
1629 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1630 {
1631 int did_output;
1632
1633 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1634 {
1635 fprintf (outf, ";; Start of basic block %d, registers live:",
1636 bb->index);
1637 dump_regset (bb->global_live_at_start, outf);
1638 putc ('\n', outf);
1639 }
1640
1641 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1642 && GET_CODE (tmp_rtx) != NOTE
1643 && GET_CODE (tmp_rtx) != BARRIER)
1644 fprintf (outf, ";; Insn is not within a basic block\n");
1645 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1646 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1647
1648 did_output = print_rtl_single (outf, tmp_rtx);
1649
1650 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1651 {
1652 fprintf (outf, ";; End of basic block %d, registers live:\n",
1653 bb->index);
1654 dump_regset (bb->global_live_at_end, outf);
1655 putc ('\n', outf);
1656 }
1657
1658 if (did_output)
1659 putc ('\n', outf);
1660 }
1661
1662 free (start);
1663 free (end);
1664 free (in_bb_p);
1665 }
1666
1667 if (current_function_epilogue_delay_list != 0)
1668 {
1669 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1670 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1671 tmp_rtx = XEXP (tmp_rtx, 1))
1672 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1673 }
1674 }
1675 \f
1676 void
1677 update_br_prob_note (bb)
1678 basic_block bb;
1679 {
1680 rtx note;
1681 if (GET_CODE (bb->end) != JUMP_INSN)
1682 return;
1683 note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
1684 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1685 return;
1686 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1687 }
1688 \f
1689 /* Verify the CFG and RTL consistency common for both underlying RTL and
1690 cfglayout RTL.
1691
1692 Currently it does following checks:
1693
1694 - test head/end pointers
1695 - overlapping of basic blocks
1696 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1697 - tails of basic blocks (ensure that boundary is necessary)
1698 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1699 and NOTE_INSN_BASIC_BLOCK
1700
1701 In future it can be extended check a lot of other stuff as well
1702 (reachability of basic blocks, life information, etc. etc.). */
1703 static int
1704 rtl_verify_flow_info_1 ()
1705 {
1706 const int max_uid = get_max_uid ();
1707 rtx last_head = get_last_insn ();
1708 basic_block *bb_info;
1709 rtx x;
1710 int err = 0;
1711 basic_block bb, last_bb_seen;
1712
1713 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1714
1715 /* Check bb chain & numbers. */
1716 last_bb_seen = ENTRY_BLOCK_PTR;
1717
1718 FOR_EACH_BB_REVERSE (bb)
1719 {
1720 rtx head = bb->head;
1721 rtx end = bb->end;
1722
1723 /* Verify the end of the basic block is in the INSN chain. */
1724 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1725 if (x == end)
1726 break;
1727
1728 if (!x)
1729 {
1730 error ("end insn %d for block %d not found in the insn stream",
1731 INSN_UID (end), bb->index);
1732 err = 1;
1733 }
1734
1735 /* Work backwards from the end to the head of the basic block
1736 to verify the head is in the RTL chain. */
1737 for (; x != NULL_RTX; x = PREV_INSN (x))
1738 {
1739 /* While walking over the insn chain, verify insns appear
1740 in only one basic block and initialize the BB_INFO array
1741 used by other passes. */
1742 if (bb_info[INSN_UID (x)] != NULL)
1743 {
1744 error ("insn %d is in multiple basic blocks (%d and %d)",
1745 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1746 err = 1;
1747 }
1748
1749 bb_info[INSN_UID (x)] = bb;
1750
1751 if (x == head)
1752 break;
1753 }
1754 if (!x)
1755 {
1756 error ("head insn %d for block %d not found in the insn stream",
1757 INSN_UID (head), bb->index);
1758 err = 1;
1759 }
1760
1761 last_head = x;
1762 }
1763
1764 /* Now check the basic blocks (boundaries etc.) */
1765 FOR_EACH_BB_REVERSE (bb)
1766 {
1767 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1768 edge e;
1769 rtx note;
1770
1771 if (INSN_P (bb->end)
1772 && (note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX))
1773 && bb->succ && bb->succ->succ_next
1774 && any_condjump_p (bb->end))
1775 {
1776 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability)
1777 {
1778 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
1779 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1780 err = 1;
1781 }
1782 }
1783 for (e = bb->succ; e; e = e->succ_next)
1784 {
1785 if (e->flags & EDGE_FALLTHRU)
1786 n_fallthru++;
1787
1788 if ((e->flags & ~(EDGE_DFS_BACK | EDGE_CAN_FALLTHRU | EDGE_IRREDUCIBLE_LOOP)) == 0)
1789 n_branch++;
1790
1791 if (e->flags & EDGE_ABNORMAL_CALL)
1792 n_call++;
1793
1794 if (e->flags & EDGE_EH)
1795 n_eh++;
1796 else if (e->flags & EDGE_ABNORMAL)
1797 n_abnormal++;
1798 }
1799
1800 if (n_eh && GET_CODE (PATTERN (bb->end)) != RESX
1801 && !find_reg_note (bb->end, REG_EH_REGION, NULL_RTX))
1802 {
1803 error ("Missing REG_EH_REGION note in the end of bb %i", bb->index);
1804 err = 1;
1805 }
1806 if (n_branch
1807 && (GET_CODE (bb->end) != JUMP_INSN
1808 || (n_branch > 1 && (any_uncondjump_p (bb->end)
1809 || any_condjump_p (bb->end)))))
1810 {
1811 error ("Too many outgoing branch edges from bb %i", bb->index);
1812 err = 1;
1813 }
1814 if (n_fallthru && any_uncondjump_p (bb->end))
1815 {
1816 error ("Fallthru edge after unconditional jump %i", bb->index);
1817 err = 1;
1818 }
1819 if (n_branch != 1 && any_uncondjump_p (bb->end))
1820 {
1821 error ("Wrong amount of branch edges after unconditional jump %i", bb->index);
1822 err = 1;
1823 }
1824 if (n_branch != 1 && any_condjump_p (bb->end)
1825 && JUMP_LABEL (bb->end) != bb->next_bb->head)
1826 {
1827 error ("Wrong amount of branch edges after conditional jump %i", bb->index);
1828 err = 1;
1829 }
1830 if (n_call && GET_CODE (bb->end) != CALL_INSN)
1831 {
1832 error ("Call edges for non-call insn in bb %i", bb->index);
1833 err = 1;
1834 }
1835 if (n_abnormal
1836 && (GET_CODE (bb->end) != CALL_INSN && n_call != n_abnormal)
1837 && (GET_CODE (bb->end) != JUMP_INSN
1838 || any_condjump_p (bb->end)
1839 || any_uncondjump_p (bb->end)))
1840 {
1841 error ("Abnormal edges for no purpose in bb %i", bb->index);
1842 err = 1;
1843 }
1844
1845 for (x = bb->head; x != NEXT_INSN (bb->end); x = NEXT_INSN (x))
1846 if (BLOCK_FOR_INSN (x) != bb)
1847 {
1848 debug_rtx (x);
1849 if (! BLOCK_FOR_INSN (x))
1850 error
1851 ("insn %d inside basic block %d but block_for_insn is NULL",
1852 INSN_UID (x), bb->index);
1853 else
1854 error
1855 ("insn %d inside basic block %d but block_for_insn is %i",
1856 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1857
1858 err = 1;
1859 }
1860
1861 /* OK pointers are correct. Now check the header of basic
1862 block. It ought to contain optional CODE_LABEL followed
1863 by NOTE_BASIC_BLOCK. */
1864 x = bb->head;
1865 if (GET_CODE (x) == CODE_LABEL)
1866 {
1867 if (bb->end == x)
1868 {
1869 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1870 bb->index);
1871 err = 1;
1872 }
1873
1874 x = NEXT_INSN (x);
1875 }
1876
1877 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
1878 {
1879 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1880 bb->index);
1881 err = 1;
1882 }
1883
1884 if (bb->end == x)
1885 /* Do checks for empty blocks her. e */
1886 ;
1887 else
1888 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
1889 {
1890 if (NOTE_INSN_BASIC_BLOCK_P (x))
1891 {
1892 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
1893 INSN_UID (x), bb->index);
1894 err = 1;
1895 }
1896
1897 if (x == bb->end)
1898 break;
1899
1900 if (control_flow_insn_p (x))
1901 {
1902 error ("in basic block %d:", bb->index);
1903 fatal_insn ("flow control insn inside a basic block", x);
1904 }
1905 }
1906 }
1907
1908 /* Clean up. */
1909 free (bb_info);
1910 return err;
1911 }
1912
1913 /* Verify the CFG and RTL consistency common for both underlying RTL and
1914 cfglayout RTL.
1915
1916 Currently it does following checks:
1917 - all checks of rtl_verify_flow_info_1
1918 - check that all insns are in the basic blocks
1919 (except the switch handling code, barriers and notes)
1920 - check that all returns are followed by barriers
1921 - check that all fallthru edge points to the adjacent blocks. */
1922 static int
1923 rtl_verify_flow_info ()
1924 {
1925 basic_block bb;
1926 int err = rtl_verify_flow_info_1 ();
1927 rtx x;
1928 int num_bb_notes;
1929 const rtx rtx_first = get_insns ();
1930 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
1931
1932 FOR_EACH_BB_REVERSE (bb)
1933 {
1934 edge e;
1935 for (e = bb->succ; e; e = e->succ_next)
1936 if (e->flags & EDGE_FALLTHRU)
1937 break;
1938 if (!e)
1939 {
1940 rtx insn;
1941
1942 /* Ensure existence of barrier in BB with no fallthru edges. */
1943 for (insn = bb->end; !insn || GET_CODE (insn) != BARRIER;
1944 insn = NEXT_INSN (insn))
1945 if (!insn
1946 || (GET_CODE (insn) == NOTE
1947 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
1948 {
1949 error ("missing barrier after block %i", bb->index);
1950 err = 1;
1951 break;
1952 }
1953 }
1954 else if (e->src != ENTRY_BLOCK_PTR
1955 && e->dest != EXIT_BLOCK_PTR)
1956 {
1957 rtx insn;
1958
1959 if (e->src->next_bb != e->dest)
1960 {
1961 error
1962 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
1963 e->src->index, e->dest->index);
1964 err = 1;
1965 }
1966 else
1967 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
1968 insn = NEXT_INSN (insn))
1969 if (GET_CODE (insn) == BARRIER
1970 #ifndef CASE_DROPS_THROUGH
1971 || INSN_P (insn)
1972 #else
1973 || (INSN_P (insn) && ! JUMP_TABLE_DATA_P (insn))
1974 #endif
1975 )
1976 {
1977 error ("verify_flow_info: Incorrect fallthru %i->%i",
1978 e->src->index, e->dest->index);
1979 fatal_insn ("wrong insn in the fallthru edge", insn);
1980 err = 1;
1981 }
1982 }
1983 }
1984
1985 num_bb_notes = 0;
1986 last_bb_seen = ENTRY_BLOCK_PTR;
1987
1988 for (x = rtx_first; x; x = NEXT_INSN (x))
1989 {
1990 if (NOTE_INSN_BASIC_BLOCK_P (x))
1991 {
1992 bb = NOTE_BASIC_BLOCK (x);
1993
1994 num_bb_notes++;
1995 if (bb != last_bb_seen->next_bb)
1996 internal_error ("basic blocks not laid down consecutively");
1997
1998 curr_bb = last_bb_seen = bb;
1999 }
2000
2001 if (!curr_bb)
2002 {
2003 switch (GET_CODE (x))
2004 {
2005 case BARRIER:
2006 case NOTE:
2007 break;
2008
2009 case CODE_LABEL:
2010 /* An addr_vec is placed outside any block block. */
2011 if (NEXT_INSN (x)
2012 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
2013 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2014 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2015 x = NEXT_INSN (x);
2016
2017 /* But in any case, non-deletable labels can appear anywhere. */
2018 break;
2019
2020 default:
2021 fatal_insn ("insn outside basic block", x);
2022 }
2023 }
2024
2025 if (INSN_P (x)
2026 && GET_CODE (x) == JUMP_INSN
2027 && returnjump_p (x) && ! condjump_p (x)
2028 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
2029 fatal_insn ("return not followed by barrier", x);
2030 if (curr_bb && x == curr_bb->end)
2031 curr_bb = NULL;
2032 }
2033
2034 if (num_bb_notes != n_basic_blocks)
2035 internal_error
2036 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2037 num_bb_notes, n_basic_blocks);
2038
2039 return err;
2040 }
2041 \f
2042 /* Assume that the preceding pass has possibly eliminated jump instructions
2043 or converted the unconditional jumps. Eliminate the edges from CFG.
2044 Return true if any edges are eliminated. */
2045
2046 bool
2047 purge_dead_edges (bb)
2048 basic_block bb;
2049 {
2050 edge e, next;
2051 rtx insn = bb->end, note;
2052 bool purged = false;
2053
2054 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2055 if (GET_CODE (insn) == INSN
2056 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2057 {
2058 rtx eqnote;
2059
2060 if (! may_trap_p (PATTERN (insn))
2061 || ((eqnote = find_reg_equal_equiv_note (insn))
2062 && ! may_trap_p (XEXP (eqnote, 0))))
2063 remove_note (insn, note);
2064 }
2065
2066 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2067 for (e = bb->succ; e; e = next)
2068 {
2069 next = e->succ_next;
2070 if (e->flags & EDGE_EH)
2071 {
2072 if (can_throw_internal (bb->end))
2073 continue;
2074 }
2075 else if (e->flags & EDGE_ABNORMAL_CALL)
2076 {
2077 if (GET_CODE (bb->end) == CALL_INSN
2078 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2079 || INTVAL (XEXP (note, 0)) >= 0))
2080 continue;
2081 }
2082 else
2083 continue;
2084
2085 remove_edge (e);
2086 bb->flags |= BB_DIRTY;
2087 purged = true;
2088 }
2089
2090 if (GET_CODE (insn) == JUMP_INSN)
2091 {
2092 rtx note;
2093 edge b,f;
2094
2095 /* We do care only about conditional jumps and simplejumps. */
2096 if (!any_condjump_p (insn)
2097 && !returnjump_p (insn)
2098 && !simplejump_p (insn))
2099 return purged;
2100
2101 /* Branch probability/prediction notes are defined only for
2102 condjumps. We've possibly turned condjump into simplejump. */
2103 if (simplejump_p (insn))
2104 {
2105 note = find_reg_note (insn, REG_BR_PROB, NULL);
2106 if (note)
2107 remove_note (insn, note);
2108 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2109 remove_note (insn, note);
2110 }
2111
2112 for (e = bb->succ; e; e = next)
2113 {
2114 next = e->succ_next;
2115
2116 /* Avoid abnormal flags to leak from computed jumps turned
2117 into simplejumps. */
2118
2119 e->flags &= ~EDGE_ABNORMAL;
2120
2121 /* See if this edge is one we should keep. */
2122 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2123 /* A conditional jump can fall through into the next
2124 block, so we should keep the edge. */
2125 continue;
2126 else if (e->dest != EXIT_BLOCK_PTR
2127 && e->dest->head == JUMP_LABEL (insn))
2128 /* If the destination block is the target of the jump,
2129 keep the edge. */
2130 continue;
2131 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2132 /* If the destination block is the exit block, and this
2133 instruction is a return, then keep the edge. */
2134 continue;
2135 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2136 /* Keep the edges that correspond to exceptions thrown by
2137 this instruction. */
2138 continue;
2139
2140 /* We do not need this edge. */
2141 bb->flags |= BB_DIRTY;
2142 purged = true;
2143 remove_edge (e);
2144 }
2145
2146 if (!bb->succ || !purged)
2147 return purged;
2148
2149 if (rtl_dump_file)
2150 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
2151
2152 if (!optimize)
2153 return purged;
2154
2155 /* Redistribute probabilities. */
2156 if (!bb->succ->succ_next)
2157 {
2158 bb->succ->probability = REG_BR_PROB_BASE;
2159 bb->succ->count = bb->count;
2160 }
2161 else
2162 {
2163 note = find_reg_note (insn, REG_BR_PROB, NULL);
2164 if (!note)
2165 return purged;
2166
2167 b = BRANCH_EDGE (bb);
2168 f = FALLTHRU_EDGE (bb);
2169 b->probability = INTVAL (XEXP (note, 0));
2170 f->probability = REG_BR_PROB_BASE - b->probability;
2171 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2172 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2173 }
2174
2175 return purged;
2176 }
2177 else if (GET_CODE (insn) == CALL_INSN && SIBLING_CALL_P (insn))
2178 {
2179 /* First, there should not be any EH or ABCALL edges resulting
2180 from non-local gotos and the like. If there were, we shouldn't
2181 have created the sibcall in the first place. Second, there
2182 should of course never have been a fallthru edge. */
2183 if (!bb->succ || bb->succ->succ_next)
2184 abort ();
2185 if (bb->succ->flags != EDGE_SIBCALL)
2186 abort ();
2187
2188 return 0;
2189 }
2190
2191 /* If we don't see a jump insn, we don't know exactly why the block would
2192 have been broken at this point. Look for a simple, non-fallthru edge,
2193 as these are only created by conditional branches. If we find such an
2194 edge we know that there used to be a jump here and can then safely
2195 remove all non-fallthru edges. */
2196 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
2197 e = e->succ_next)
2198 ;
2199
2200 if (!e)
2201 return purged;
2202
2203 for (e = bb->succ; e; e = next)
2204 {
2205 next = e->succ_next;
2206 if (!(e->flags & EDGE_FALLTHRU))
2207 {
2208 bb->flags |= BB_DIRTY;
2209 remove_edge (e);
2210 purged = true;
2211 }
2212 }
2213
2214 if (!bb->succ || bb->succ->succ_next)
2215 abort ();
2216
2217 bb->succ->probability = REG_BR_PROB_BASE;
2218 bb->succ->count = bb->count;
2219
2220 if (rtl_dump_file)
2221 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
2222 bb->index);
2223 return purged;
2224 }
2225
2226 /* Search all basic blocks for potentially dead edges and purge them. Return
2227 true if some edge has been eliminated. */
2228
2229 bool
2230 purge_all_dead_edges (update_life_p)
2231 int update_life_p;
2232 {
2233 int purged = false;
2234 sbitmap blocks = 0;
2235 basic_block bb;
2236
2237 if (update_life_p)
2238 {
2239 blocks = sbitmap_alloc (last_basic_block);
2240 sbitmap_zero (blocks);
2241 }
2242
2243 FOR_EACH_BB (bb)
2244 {
2245 bool purged_here = purge_dead_edges (bb);
2246
2247 purged |= purged_here;
2248 if (purged_here && update_life_p)
2249 SET_BIT (blocks, bb->index);
2250 }
2251
2252 if (update_life_p && purged)
2253 update_life_info (blocks, UPDATE_LIFE_GLOBAL,
2254 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2255 | PROP_KILL_DEAD_CODE);
2256
2257 if (update_life_p)
2258 sbitmap_free (blocks);
2259 return purged;
2260 }
2261
2262 /* Same as split_block but update cfg_layout structures. */
2263 static edge
2264 cfg_layout_split_block (bb, insnp)
2265 basic_block bb;
2266 void *insnp;
2267 {
2268 rtx insn = insnp;
2269
2270 edge fallthru = rtl_split_block (bb, insn);
2271
2272 alloc_aux_for_block (fallthru->dest, sizeof (struct reorder_block_def));
2273 RBI (fallthru->dest)->footer = RBI (fallthru->src)->footer;
2274 RBI (fallthru->src)->footer = NULL;
2275 return fallthru;
2276 }
2277
2278
2279 /* Redirect Edge to DEST. */
2280 static bool
2281 cfg_layout_redirect_edge_and_branch (e, dest)
2282 edge e;
2283 basic_block dest;
2284 {
2285 basic_block src = e->src;
2286 basic_block old_next_bb = src->next_bb;
2287 bool ret;
2288
2289 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2290 in the case the basic block appears to be in sequence. Avoid this
2291 transformation. */
2292
2293 src->next_bb = NULL;
2294 if (e->flags & EDGE_FALLTHRU)
2295 {
2296 /* Redirect any branch edges unified with the fallthru one. */
2297 if (GET_CODE (src->end) == JUMP_INSN
2298 && JUMP_LABEL (src->end) == e->dest->head)
2299 {
2300 if (!redirect_jump (src->end, block_label (dest), 0))
2301 abort ();
2302 }
2303 /* In case we are redirecting fallthru edge to the branch edge
2304 of conditional jump, remove it. */
2305 if (src->succ->succ_next
2306 && !src->succ->succ_next->succ_next)
2307 {
2308 edge s = e->succ_next ? e->succ_next : src->succ;
2309 if (s->dest == dest
2310 && any_condjump_p (src->end)
2311 && onlyjump_p (src->end))
2312 delete_insn (src->end);
2313 }
2314 redirect_edge_succ_nodup (e, dest);
2315
2316 ret = true;
2317 }
2318 else
2319 ret = rtl_redirect_edge_and_branch (e, dest);
2320
2321 /* We don't want simplejumps in the insn stream during cfglayout. */
2322 if (simplejump_p (src->end))
2323 {
2324 delete_insn (src->end);
2325 delete_barrier (NEXT_INSN (src->end));
2326 src->succ->flags |= EDGE_FALLTHRU;
2327 }
2328 src->next_bb = old_next_bb;
2329
2330 return ret;
2331 }
2332
2333 /* Simple wrapper as we always can redirect fallthru edges. */
2334 static basic_block
2335 cfg_layout_redirect_edge_and_branch_force (e, dest)
2336 edge e;
2337 basic_block dest;
2338 {
2339 if (!cfg_layout_redirect_edge_and_branch (e, dest))
2340 abort ();
2341 return NULL;
2342 }
2343
2344 /* Same as flow_delete_block but update cfg_layout structures. */
2345 static void
2346 cfg_layout_delete_block (bb)
2347 basic_block bb;
2348 {
2349 rtx insn, next, prev = PREV_INSN (bb->head), *to, remaints;
2350
2351 if (RBI (bb)->header)
2352 {
2353 next = bb->head;
2354 if (prev)
2355 NEXT_INSN (prev) = RBI (bb)->header;
2356 else
2357 set_first_insn (RBI (bb)->header);
2358 PREV_INSN (RBI (bb)->header) = prev;
2359 insn = RBI (bb)->header;
2360 while (NEXT_INSN (insn))
2361 insn = NEXT_INSN (insn);
2362 NEXT_INSN (insn) = next;
2363 PREV_INSN (next) = insn;
2364 }
2365 next = NEXT_INSN (bb->end);
2366 if (RBI (bb)->footer)
2367 {
2368 insn = bb->end;
2369 NEXT_INSN (insn) = RBI (bb)->footer;
2370 PREV_INSN (RBI (bb)->footer) = insn;
2371 while (NEXT_INSN (insn))
2372 insn = NEXT_INSN (insn);
2373 NEXT_INSN (insn) = next;
2374 if (next)
2375 PREV_INSN (next) = insn;
2376 else
2377 set_last_insn (insn);
2378 }
2379 if (bb->next_bb != EXIT_BLOCK_PTR)
2380 to = &RBI(bb->next_bb)->header;
2381 else
2382 to = &cfg_layout_function_footer;
2383 rtl_delete_block (bb);
2384
2385 if (prev)
2386 prev = NEXT_INSN (prev);
2387 else
2388 prev = get_insns ();
2389 if (next)
2390 next = PREV_INSN (next);
2391 else
2392 next = get_last_insn ();
2393
2394 if (next && NEXT_INSN (next) != prev)
2395 {
2396 remaints = unlink_insn_chain (prev, next);
2397 insn = remaints;
2398 while (NEXT_INSN (insn))
2399 insn = NEXT_INSN (insn);
2400 NEXT_INSN (insn) = *to;
2401 if (*to)
2402 PREV_INSN (*to) = insn;
2403 *to = remaints;
2404 }
2405 }
2406
2407 /* Implementation of CFG manipulation for linearized RTL. */
2408 struct cfg_hooks rtl_cfg_hooks = {
2409 rtl_verify_flow_info,
2410 rtl_dump_bb,
2411 rtl_redirect_edge_and_branch,
2412 rtl_redirect_edge_and_branch_force,
2413 rtl_delete_block,
2414 rtl_split_block,
2415 rtl_split_edge
2416 };
2417
2418 /* Implementation of CFG manipulation for cfg layout RTL, where
2419 basic block connected via fallthru edges does not have to be adjacent.
2420 This representation will hopefully become the default one in future
2421 version of the compiler. */
2422 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
2423 rtl_verify_flow_info_1, /* verify_flow_info. */
2424 rtl_dump_bb,
2425 cfg_layout_redirect_edge_and_branch,
2426 cfg_layout_redirect_edge_and_branch_force,
2427 cfg_layout_delete_block,
2428 cfg_layout_split_block,
2429 NULL /* split_edge. */
2430 };