]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgcleanup.c
2001-12-13 Benjamin Kosnik <bkoz@redhat.com>
[thirdparty/gcc.git] / gcc / cfgcleanup.c
CommitLineData
65f34de5 1/* Control flow optimization code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22/* This file contains optimizer of the control flow. The main entrypoint is
23 cleanup_cfg. Following optimizations are performed:
24
25 - Unreachable blocks removal
26 - Edge forwarding (edge to the forwarder block is forwarded to it's
4a82352a 27 successor. Simplification of the branch instruction is performed by
65f34de5 28 underlying infrastructure so branch can be converted to simplejump or
35a3065a 29 eliminated).
65f34de5 30 - Cross jumping (tail merging)
31 - Conditional jump-around-simplejump simplification
32 - Basic block merging. */
33
34#include "config.h"
35#include "system.h"
36#include "rtl.h"
37#include "hard-reg-set.h"
38#include "basic-block.h"
39#include "timevar.h"
40#include "output.h"
41#include "insn-config.h"
42#include "flags.h"
43#include "recog.h"
44#include "toplev.h"
45
46#include "obstack.h"
47
1b52f600 48/* cleanup_cfg maintains following flags for each basic block. */
33dbe4d1 49enum bb_flags {
50 /* Set if life info needs to be recomputed for given BB. */
51 BB_UPDATE_LIFE = 1,
52 /* Set if BB is the forwarder block to avoid too many
53 forwarder_block_p calls. */
54 BB_FORWARDER_BLOCK = 2
55 };
56
57#define BB_FLAGS(bb) (enum bb_flags)(bb)->aux
58#define BB_SET_FLAG(bb,flag) \
95bd86b4 59 (bb)->aux = (void *) (long) ((enum bb_flags)(bb)->aux | (flag))
33dbe4d1 60#define BB_CLEAR_FLAG(bb,flag) \
95bd86b4 61 (bb)->aux = (void *) (long) ((enum bb_flags)(bb)->aux & ~(flag))
33dbe4d1 62
63#define FORWARDER_BLOCK_P(bb) (BB_FLAGS(bb) & BB_FORWARDER_BLOCK)
64
65f34de5 65static bool try_crossjump_to_edge PARAMS ((int, edge, edge));
66static bool try_crossjump_bb PARAMS ((int, basic_block));
67static bool outgoing_edges_match PARAMS ((basic_block, basic_block));
68static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block,
69 rtx *, rtx *));
70
71static bool delete_unreachable_blocks PARAMS ((void));
460ee42f 72static bool label_is_jump_target_p PARAMS ((rtx, rtx));
e76f35e8 73static bool tail_recursion_label_p PARAMS ((rtx));
74static void merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
65f34de5 75 basic_block));
e76f35e8 76static void merge_blocks_move_successor_nojumps PARAMS ((basic_block,
65f34de5 77 basic_block));
e76f35e8 78static bool merge_blocks PARAMS ((edge,basic_block,basic_block,
65f34de5 79 int));
80static bool try_optimize_cfg PARAMS ((int));
81static bool try_simplify_condjump PARAMS ((basic_block));
82static bool try_forward_edges PARAMS ((int, basic_block));
33dbe4d1 83static void notice_new_block PARAMS ((basic_block));
84static void update_forwarder_flag PARAMS ((basic_block));
85\f
86/* Set flags for newly created block. */
87
88static void
89notice_new_block (bb)
90 basic_block bb;
91{
92 if (!bb)
93 return;
94 BB_SET_FLAG (bb, BB_UPDATE_LIFE);
95 if (forwarder_block_p (bb))
96 BB_SET_FLAG (bb, BB_FORWARDER_BLOCK);
97}
98
99/* Recompute forwarder flag after block has been modified. */
100
101static void
102update_forwarder_flag (bb)
103 basic_block bb;
104{
105 if (forwarder_block_p (bb))
106 BB_SET_FLAG (bb, BB_FORWARDER_BLOCK);
107 else
108 BB_CLEAR_FLAG (bb, BB_FORWARDER_BLOCK);
109}
65f34de5 110\f
111/* Simplify a conditional jump around an unconditional jump.
112 Return true if something changed. */
113
114static bool
115try_simplify_condjump (cbranch_block)
116 basic_block cbranch_block;
117{
118 basic_block jump_block, jump_dest_block, cbranch_dest_block;
119 edge cbranch_jump_edge, cbranch_fallthru_edge;
120 rtx cbranch_insn;
121
122 /* Verify that there are exactly two successors. */
123 if (!cbranch_block->succ
124 || !cbranch_block->succ->succ_next
125 || cbranch_block->succ->succ_next->succ_next)
126 return false;
127
128 /* Verify that we've got a normal conditional branch at the end
129 of the block. */
130 cbranch_insn = cbranch_block->end;
131 if (!any_condjump_p (cbranch_insn))
132 return false;
133
134 cbranch_fallthru_edge = FALLTHRU_EDGE (cbranch_block);
135 cbranch_jump_edge = BRANCH_EDGE (cbranch_block);
136
137 /* The next block must not have multiple predecessors, must not
138 be the last block in the function, and must contain just the
139 unconditional jump. */
140 jump_block = cbranch_fallthru_edge->dest;
141 if (jump_block->pred->pred_next
142 || jump_block->index == n_basic_blocks - 1
33dbe4d1 143 || !FORWARDER_BLOCK_P (jump_block))
65f34de5 144 return false;
145 jump_dest_block = jump_block->succ->dest;
146
147 /* The conditional branch must target the block after the
148 unconditional branch. */
149 cbranch_dest_block = cbranch_jump_edge->dest;
150
151 if (!can_fallthru (jump_block, cbranch_dest_block))
152 return false;
153
b36d64df 154 /* Invert the conditional branch. */
155 if (!invert_jump (cbranch_insn, block_label (jump_dest_block), 0))
156 return false;
65f34de5 157
158 if (rtl_dump_file)
159 fprintf (rtl_dump_file, "Simplifying condjump %i around jump %i\n",
160 INSN_UID (cbranch_insn), INSN_UID (jump_block->end));
161
162 /* Success. Update the CFG to match. Note that after this point
163 the edge variable names appear backwards; the redirection is done
164 this way to preserve edge profile data. */
165 cbranch_jump_edge = redirect_edge_succ_nodup (cbranch_jump_edge,
166 cbranch_dest_block);
167 cbranch_fallthru_edge = redirect_edge_succ_nodup (cbranch_fallthru_edge,
168 jump_dest_block);
169 cbranch_jump_edge->flags |= EDGE_FALLTHRU;
170 cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
171
172 /* Delete the block with the unconditional jump, and clean up the mess. */
173 flow_delete_block (jump_block);
174 tidy_fallthru_edge (cbranch_jump_edge, cbranch_block, cbranch_dest_block);
175
176 return true;
177}
178\f
179/* Attempt to forward edges leaving basic block B.
4a82352a 180 Return true if successful. */
65f34de5 181
182static bool
183try_forward_edges (mode, b)
184 basic_block b;
185 int mode;
186{
187 bool changed = false;
188 edge e, next;
189
190 for (e = b->succ; e ; e = next)
191 {
192 basic_block target, first;
193 int counter;
194
195 next = e->succ_next;
196
197 /* Skip complex edges because we don't know how to update them.
198
4a82352a 199 Still handle fallthru edges, as we can succeed to forward fallthru
65f34de5 200 edge to the same place as the branch edge of conditional branch
4a82352a 201 and turn conditional branch to an unconditional branch. */
65f34de5 202 if (e->flags & EDGE_COMPLEX)
203 continue;
204
205 target = first = e->dest;
206 counter = 0;
207
208 /* Look for the real destination of the jump.
4a82352a 209 Avoid infinite loop in the infinite empty loop by counting
65f34de5 210 up to n_basic_blocks. */
33dbe4d1 211 while (FORWARDER_BLOCK_P (target)
65f34de5 212 && target->succ->dest != EXIT_BLOCK_PTR
213 && counter < n_basic_blocks)
214 {
215 /* Bypass trivial infinite loops. */
216 if (target == target->succ->dest)
217 counter = n_basic_blocks;
218
219 /* Avoid killing of loop pre-headers, as it is the place loop
220 optimizer wants to hoist code to.
221
222 For fallthru forwarders, the LOOP_BEG note must appear between
223 the header of block and CODE_LABEL of the loop, for non forwarders
224 it must appear before the JUMP_INSN. */
225 if (mode & CLEANUP_PRE_LOOP)
226 {
227 rtx insn = (target->succ->flags & EDGE_FALLTHRU
228 ? target->head : prev_nonnote_insn (target->end));
229
230 if (GET_CODE (insn) != NOTE)
231 insn = NEXT_INSN (insn);
232
233 for (;insn && GET_CODE (insn) != CODE_LABEL && !INSN_P (insn);
234 insn = NEXT_INSN (insn))
235 if (GET_CODE (insn) == NOTE
236 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
237 break;
238
239 if (GET_CODE (insn) == NOTE)
240 break;
241 }
242 target = target->succ->dest, counter++;
243 }
244
245 if (counter >= n_basic_blocks)
246 {
247 if (rtl_dump_file)
248 fprintf (rtl_dump_file, "Infinite loop in BB %i.\n",
249 target->index);
250 }
251 else if (target == first)
252 ; /* We didn't do anything. */
253 else
254 {
255 /* Save the values now, as the edge may get removed. */
256 gcov_type edge_count = e->count;
257 int edge_probability = e->probability;
258
259 if (redirect_edge_and_branch (e, target))
260 {
261 /* We successfully forwarded the edge. Now update profile
262 data: for each edge we traversed in the chain, remove
263 the original edge's execution count. */
264 int edge_frequency = ((edge_probability * b->frequency
265 + REG_BR_PROB_BASE / 2)
266 / REG_BR_PROB_BASE);
267
33dbe4d1 268 if (!FORWARDER_BLOCK_P (b) && forwarder_block_p (b))
269 BB_SET_FLAG (b, BB_FORWARDER_BLOCK);
270 BB_SET_FLAG (b, BB_UPDATE_LIFE);
271
65f34de5 272 do
273 {
274 first->count -= edge_count;
275 first->succ->count -= edge_count;
276 first->frequency -= edge_frequency;
277 first = first->succ->dest;
278 }
279 while (first != target);
280
281 changed = true;
282 }
283 else
284 {
285 if (rtl_dump_file)
286 fprintf (rtl_dump_file, "Forwarding edge %i->%i to %i failed.\n",
287 b->index, e->dest->index, target->index);
288 }
289 }
290 }
291
292 return changed;
293}
294\f
460ee42f 295/* Return true if LABEL is a target of JUMP_INSN. This applies only
296 to non-complex jumps. That is, direct unconditional, conditional,
297 and tablejumps, but not computed jumps or returns. It also does
298 not apply to the fallthru case of a conditional jump. */
299
300static bool
301label_is_jump_target_p (label, jump_insn)
302 rtx label, jump_insn;
303{
304 rtx tmp = JUMP_LABEL (jump_insn);
305
306 if (label == tmp)
307 return true;
308
309 if (tmp != NULL_RTX
310 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
311 && GET_CODE (tmp) == JUMP_INSN
312 && (tmp = PATTERN (tmp),
313 GET_CODE (tmp) == ADDR_VEC
314 || GET_CODE (tmp) == ADDR_DIFF_VEC))
315 {
316 rtvec vec = XVEC (tmp, GET_CODE (tmp) == ADDR_DIFF_VEC);
317 int i, veclen = GET_NUM_ELEM (vec);
318
319 for (i = 0; i < veclen; ++i)
320 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
321 return true;
322 }
323
324 return false;
325}
326
e76f35e8 327/* Return true if LABEL is used for tail recursion. */
328
329static bool
65f34de5 330tail_recursion_label_p (label)
331 rtx label;
332{
333 rtx x;
334
335 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
336 if (label == XEXP (x, 0))
e76f35e8 337 return true;
65f34de5 338
e76f35e8 339 return false;
65f34de5 340}
341
342/* Blocks A and B are to be merged into a single block. A has no incoming
343 fallthru edge, so it can be moved before B without adding or modifying
344 any jumps (aside from the jump from A to B). */
345
e76f35e8 346static void
65f34de5 347merge_blocks_move_predecessor_nojumps (a, b)
348 basic_block a, b;
349{
350 rtx barrier;
351 int index;
352
353 barrier = next_nonnote_insn (a->end);
354 if (GET_CODE (barrier) != BARRIER)
355 abort ();
e4bf866d 356 delete_insn (barrier);
65f34de5 357
358 /* Move block and loop notes out of the chain so that we do not
359 disturb their order.
360
361 ??? A better solution would be to squeeze out all the non-nested notes
362 and adjust the block trees appropriately. Even better would be to have
363 a tighter connection between block trees and rtl so that this is not
364 necessary. */
87dc0300 365 if (squeeze_notes (&a->head, &a->end))
366 abort ();
65f34de5 367
368 /* Scramble the insn chain. */
369 if (a->end != PREV_INSN (b->head))
9dda7915 370 reorder_insns_nobb (a->head, a->end, PREV_INSN (b->head));
2a22a8e6 371 BB_SET_FLAG (a, BB_UPDATE_LIFE);
65f34de5 372
373 if (rtl_dump_file)
374 {
375 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
376 a->index, b->index);
377 }
378
379 /* Swap the records for the two blocks around. Although we are deleting B,
380 A is now where B was and we want to compact the BB array from where
381 A used to be. */
382 BASIC_BLOCK (a->index) = b;
383 BASIC_BLOCK (b->index) = a;
384 index = a->index;
385 a->index = b->index;
386 b->index = index;
387
388 /* Now blocks A and B are contiguous. Merge them. */
389 merge_blocks_nomove (a, b);
65f34de5 390}
391
392/* Blocks A and B are to be merged into a single block. B has no outgoing
393 fallthru edge, so it can be moved after A without adding or modifying
394 any jumps (aside from the jump from A to B). */
395
e76f35e8 396static void
65f34de5 397merge_blocks_move_successor_nojumps (a, b)
398 basic_block a, b;
399{
f70d6641 400 rtx barrier, real_b_end;
65f34de5 401
f70d6641 402 real_b_end = b->end;
65f34de5 403 barrier = NEXT_INSN (b->end);
404
405 /* Recognize a jump table following block B. */
406 if (barrier
407 && GET_CODE (barrier) == CODE_LABEL
408 && NEXT_INSN (barrier)
409 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
410 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
411 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
412 {
f70d6641 413 /* Temporarily add the table jump insn to b, so that it will also
414 be moved to the correct location. */
65f34de5 415 b->end = NEXT_INSN (barrier);
416 barrier = NEXT_INSN (b->end);
417 }
418
419 /* There had better have been a barrier there. Delete it. */
420 if (barrier && GET_CODE (barrier) == BARRIER)
e4bf866d 421 delete_insn (barrier);
65f34de5 422
423 /* Move block and loop notes out of the chain so that we do not
424 disturb their order.
425
426 ??? A better solution would be to squeeze out all the non-nested notes
427 and adjust the block trees appropriately. Even better would be to have
428 a tighter connection between block trees and rtl so that this is not
429 necessary. */
87dc0300 430 if (squeeze_notes (&b->head, &b->end))
431 abort ();
65f34de5 432
433 /* Scramble the insn chain. */
9dda7915 434 reorder_insns_nobb (b->head, b->end, a->end);
65f34de5 435
f70d6641 436 /* Restore the real end of b. */
437 b->end = real_b_end;
438
65f34de5 439 /* Now blocks A and B are contiguous. Merge them. */
440 merge_blocks_nomove (a, b);
2a22a8e6 441 BB_SET_FLAG (a, BB_UPDATE_LIFE);
65f34de5 442
443 if (rtl_dump_file)
444 {
445 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
446 b->index, a->index);
447 }
65f34de5 448}
449
450/* Attempt to merge basic blocks that are potentially non-adjacent.
451 Return true iff the attempt succeeded. */
452
e76f35e8 453static bool
65f34de5 454merge_blocks (e, b, c, mode)
455 edge e;
456 basic_block b, c;
457 int mode;
458{
459 /* If C has a tail recursion label, do not merge. There is no
460 edge recorded from the call_placeholder back to this label, as
461 that would make optimize_sibling_and_tail_recursive_calls more
462 complex for no gain. */
e76f35e8 463 if ((mode & CLEANUP_PRE_SIBCALL)
464 && GET_CODE (c->head) == CODE_LABEL
65f34de5 465 && tail_recursion_label_p (c->head))
e76f35e8 466 return false;
65f34de5 467
468 /* If B has a fallthru edge to C, no need to move anything. */
469 if (e->flags & EDGE_FALLTHRU)
470 {
0922c912 471 /* We need to update liveness in case C already has broken liveness
472 or B ends by conditional jump to next instructions that will be
473 removed. */
474 if ((BB_FLAGS (c) & BB_UPDATE_LIFE)
475 || GET_CODE (b->end) == JUMP_INSN)
476 BB_SET_FLAG (b, BB_UPDATE_LIFE);
65f34de5 477 merge_blocks_nomove (b, c);
33dbe4d1 478 update_forwarder_flag (b);
65f34de5 479
480 if (rtl_dump_file)
481 {
482 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
483 b->index, c->index);
484 }
485
e76f35e8 486 return true;
65f34de5 487 }
488 /* Otherwise we will need to move code around. Do that only if expensive
489 transformations are allowed. */
490 else if (mode & CLEANUP_EXPENSIVE)
491 {
e76f35e8 492 edge tmp_edge, b_fallthru_edge;
493 bool c_has_outgoing_fallthru;
494 bool b_has_incoming_fallthru;
65f34de5 495
496 /* Avoid overactive code motion, as the forwarder blocks should be
497 eliminated by edge redirection instead. One exception might have
498 been if B is a forwarder block and C has no fallthru edge, but
499 that should be cleaned up by bb-reorder instead. */
33dbe4d1 500 if (FORWARDER_BLOCK_P (b) || FORWARDER_BLOCK_P (c))
e76f35e8 501 return false;
65f34de5 502
503 /* We must make sure to not munge nesting of lexical blocks,
504 and loop notes. This is done by squeezing out all the notes
505 and leaving them there to lie. Not ideal, but functional. */
506
507 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
508 if (tmp_edge->flags & EDGE_FALLTHRU)
509 break;
510 c_has_outgoing_fallthru = (tmp_edge != NULL);
65f34de5 511
512 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
513 if (tmp_edge->flags & EDGE_FALLTHRU)
514 break;
515 b_has_incoming_fallthru = (tmp_edge != NULL);
e76f35e8 516 b_fallthru_edge = tmp_edge;
517
518 /* Otherwise, we're going to try to move C after B. If C does
519 not have an outgoing fallthru, then it can be moved
520 immediately after B without introducing or modifying jumps. */
521 if (! c_has_outgoing_fallthru)
522 {
523 merge_blocks_move_successor_nojumps (b, c);
524 return true;
525 }
65f34de5 526
527 /* If B does not have an incoming fallthru, then it can be moved
528 immediately before C without introducing or modifying jumps.
529 C cannot be the first block, so we do not have to worry about
530 accessing a non-existent block. */
65f34de5 531
e76f35e8 532 if (b_has_incoming_fallthru)
533 {
0922c912 534 basic_block bb;
e76f35e8 535 if (b_fallthru_edge->src == ENTRY_BLOCK_PTR)
536 return false;
2a22a8e6 537 bb = force_nonfallthru (b_fallthru_edge);
538 if (bb)
539 notice_new_block (bb);
540 else
541 BB_SET_FLAG (b_fallthru_edge->src, BB_UPDATE_LIFE);
e76f35e8 542 }
543 merge_blocks_move_predecessor_nojumps (b, c);
544 return true;
65f34de5 545 }
e76f35e8 546 return false;
65f34de5 547}
548\f
549/* Look through the insns at the end of BB1 and BB2 and find the longest
550 sequence that are equivalent. Store the first insns for that sequence
551 in *F1 and *F2 and return the sequence length.
552
553 To simplify callers of this function, if the blocks match exactly,
554 store the head of the blocks in *F1 and *F2. */
555
556static int
557flow_find_cross_jump (mode, bb1, bb2, f1, f2)
558 int mode ATTRIBUTE_UNUSED;
559 basic_block bb1, bb2;
560 rtx *f1, *f2;
561{
562 rtx i1, i2, p1, p2, last1, last2, afterlast1, afterlast2;
563 int ninsns = 0;
564
565 /* Skip simple jumps at the end of the blocks. Complex jumps still
566 need to be compared for equivalence, which we'll do below. */
567
568 i1 = bb1->end;
569 if (onlyjump_p (i1)
570 || (returnjump_p (i1) && !side_effects_p (PATTERN (i1))))
571 i1 = PREV_INSN (i1);
572 i2 = bb2->end;
573 if (onlyjump_p (i2)
574 || (returnjump_p (i2) && !side_effects_p (PATTERN (i2))))
575 i2 = PREV_INSN (i2);
576
577 last1 = afterlast1 = last2 = afterlast2 = NULL_RTX;
578 while (true)
579 {
580 /* Ignore notes. */
581 while ((GET_CODE (i1) == NOTE && i1 != bb1->head))
582 i1 = PREV_INSN (i1);
583 while ((GET_CODE (i2) == NOTE && i2 != bb2->head))
584 i2 = PREV_INSN (i2);
585
586 if (i1 == bb1->head || i2 == bb2->head)
587 break;
588
589 /* Verify that I1 and I2 are equivalent. */
590
591 if (GET_CODE (i1) != GET_CODE (i2))
592 break;
593
594 p1 = PATTERN (i1);
595 p2 = PATTERN (i2);
596
597 /* If this is a CALL_INSN, compare register usage information.
598 If we don't check this on stack register machines, the two
599 CALL_INSNs might be merged leaving reg-stack.c with mismatching
600 numbers of stack registers in the same basic block.
601 If we don't check this on machines with delay slots, a delay slot may
602 be filled that clobbers a parameter expected by the subroutine.
603
604 ??? We take the simple route for now and assume that if they're
605 equal, they were constructed identically. */
606
607 if (GET_CODE (i1) == CALL_INSN
608 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
609 CALL_INSN_FUNCTION_USAGE (i2)))
610 break;
611
612#ifdef STACK_REGS
613 /* If cross_jump_death_matters is not 0, the insn's mode
614 indicates whether or not the insn contains any stack-like
615 regs. */
616
617 if ((mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
618 {
619 /* If register stack conversion has already been done, then
620 death notes must also be compared before it is certain that
621 the two instruction streams match. */
622
623 rtx note;
624 HARD_REG_SET i1_regset, i2_regset;
625
626 CLEAR_HARD_REG_SET (i1_regset);
627 CLEAR_HARD_REG_SET (i2_regset);
628
629 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
630 if (REG_NOTE_KIND (note) == REG_DEAD
631 && STACK_REG_P (XEXP (note, 0)))
632 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
633
634 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
635 if (REG_NOTE_KIND (note) == REG_DEAD
636 && STACK_REG_P (XEXP (note, 0)))
637 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
638
639 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
640
641 break;
642
643 done:
644 ;
645 }
646#endif
647
648 if (GET_CODE (p1) != GET_CODE (p2))
649 break;
650
651 if (! rtx_renumbered_equal_p (p1, p2))
652 {
653 /* The following code helps take care of G++ cleanups. */
654 rtx equiv1 = find_reg_equal_equiv_note (i1);
655 rtx equiv2 = find_reg_equal_equiv_note (i2);
656
657 if (equiv1 && equiv2
658 /* If the equivalences are not to a constant, they may
659 reference pseudos that no longer exist, so we can't
660 use them. */
661 && CONSTANT_P (XEXP (equiv1, 0))
662 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
663 {
664 rtx s1 = single_set (i1);
665 rtx s2 = single_set (i2);
666 if (s1 != 0 && s2 != 0
667 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
668 {
669 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
670 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
671 if (! rtx_renumbered_equal_p (p1, p2))
672 cancel_changes (0);
673 else if (apply_change_group ())
674 goto win;
675 }
676 }
677 break;
678 }
679
680 win:
681 /* Don't begin a cross-jump with a USE or CLOBBER insn. */
682 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
683 {
ab33a25a 684 /* If the merged insns have different REG_EQUAL notes, then
685 remove them. */
686 rtx equiv1 = find_reg_equal_equiv_note (i1);
687 rtx equiv2 = find_reg_equal_equiv_note (i2);
688
689 if (equiv1 && !equiv2)
690 remove_note (i1, equiv1);
691 else if (!equiv1 && equiv2)
692 remove_note (i2, equiv2);
693 else if (equiv1 && equiv2
694 && !rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
695 {
696 remove_note (i1, equiv1);
697 remove_note (i2, equiv2);
698 }
699
65f34de5 700 afterlast1 = last1, afterlast2 = last2;
701 last1 = i1, last2 = i2;
702 ninsns++;
703 }
704 i1 = PREV_INSN (i1);
705 i2 = PREV_INSN (i2);
706 }
707
708#ifdef HAVE_cc0
709 if (ninsns)
710 {
711 /* Don't allow the insn after a compare to be shared by
712 cross-jumping unless the compare is also shared. */
713 if (reg_mentioned_p (cc0_rtx, last1) && ! sets_cc0_p (last1))
714 last1 = afterlast1, last2 = afterlast2, ninsns--;
715 }
716#endif
717
4a82352a 718 /* Include preceding notes and labels in the cross-jump. One,
65f34de5 719 this may bring us to the head of the blocks as requested above.
720 Two, it keeps line number notes as matched as may be. */
721 if (ninsns)
722 {
723 while (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == NOTE)
724 last1 = PREV_INSN (last1);
725 if (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == CODE_LABEL)
726 last1 = PREV_INSN (last1);
727 while (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == NOTE)
728 last2 = PREV_INSN (last2);
729 if (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == CODE_LABEL)
730 last2 = PREV_INSN (last2);
731
732 *f1 = last1;
733 *f2 = last2;
734 }
735
736 return ninsns;
737}
738
739/* Return true iff outgoing edges of BB1 and BB2 match, together with
740 the branch instruction. This means that if we commonize the control
741 flow before end of the basic block, the semantic remains unchanged.
742
743 We may assume that there exists one edge with a common destination. */
744
745static bool
746outgoing_edges_match (bb1, bb2)
747 basic_block bb1;
748 basic_block bb2;
749{
750 /* If BB1 has only one successor, we must be looking at an unconditional
751 jump. Which, by the assumption above, means that we only need to check
752 that BB2 has one successor. */
753 if (bb1->succ && !bb1->succ->succ_next)
754 return (bb2->succ && !bb2->succ->succ_next);
755
756 /* Match conditional jumps - this may get tricky when fallthru and branch
757 edges are crossed. */
758 if (bb1->succ
759 && bb1->succ->succ_next
760 && !bb1->succ->succ_next->succ_next
761 && any_condjump_p (bb1->end))
762 {
763 edge b1, f1, b2, f2;
764 bool reverse, match;
765 rtx set1, set2, cond1, cond2;
766 enum rtx_code code1, code2;
767
768 if (!bb2->succ
769 || !bb2->succ->succ_next
770 || bb1->succ->succ_next->succ_next
771 || !any_condjump_p (bb2->end))
772 return false;
773
774 b1 = BRANCH_EDGE (bb1);
775 b2 = BRANCH_EDGE (bb2);
776 f1 = FALLTHRU_EDGE (bb1);
777 f2 = FALLTHRU_EDGE (bb2);
778
779 /* Get around possible forwarders on fallthru edges. Other cases
780 should be optimized out already. */
33dbe4d1 781 if (FORWARDER_BLOCK_P (f1->dest))
65f34de5 782 f1 = f1->dest->succ;
33dbe4d1 783 if (FORWARDER_BLOCK_P (f2->dest))
65f34de5 784 f2 = f2->dest->succ;
785
786 /* To simplify use of this function, return false if there are
787 unneeded forwarder blocks. These will get eliminated later
788 during cleanup_cfg. */
33dbe4d1 789 if (FORWARDER_BLOCK_P (f1->dest)
790 || FORWARDER_BLOCK_P (f2->dest)
791 || FORWARDER_BLOCK_P (b1->dest)
792 || FORWARDER_BLOCK_P (b2->dest))
65f34de5 793 return false;
794
795 if (f1->dest == f2->dest && b1->dest == b2->dest)
796 reverse = false;
797 else if (f1->dest == b2->dest && b1->dest == f2->dest)
798 reverse = true;
799 else
800 return false;
801
802 set1 = pc_set (bb1->end);
803 set2 = pc_set (bb2->end);
804 if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
805 != (XEXP (SET_SRC (set2), 1) == pc_rtx))
806 reverse = !reverse;
807
808 cond1 = XEXP (SET_SRC (set1), 0);
809 cond2 = XEXP (SET_SRC (set2), 0);
810 code1 = GET_CODE (cond1);
811 if (reverse)
812 code2 = reversed_comparison_code (cond2, bb2->end);
813 else
814 code2 = GET_CODE (cond2);
815 if (code2 == UNKNOWN)
816 return false;
817
818 /* Verify codes and operands match. */
819 match = ((code1 == code2
820 && rtx_renumbered_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
821 && rtx_renumbered_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
822 || (code1 == swap_condition (code2)
823 && rtx_renumbered_equal_p (XEXP (cond1, 1),
824 XEXP (cond2, 0))
825 && rtx_renumbered_equal_p (XEXP (cond1, 0),
826 XEXP (cond2, 1))));
827
828 /* If we return true, we will join the blocks. Which means that
829 we will only have one branch prediction bit to work with. Thus
830 we require the existing branches to have probabilities that are
831 roughly similar. */
832 /* ??? We should use bb->frequency to allow merging in infrequently
833 executed blocks, but at the moment it is not available when
834 cleanup_cfg is run. */
835 if (match && !optimize_size)
836 {
837 rtx note1, note2;
838 int prob1, prob2;
839 note1 = find_reg_note (bb1->end, REG_BR_PROB, 0);
840 note2 = find_reg_note (bb2->end, REG_BR_PROB, 0);
841
842 if (note1 && note2)
843 {
844 prob1 = INTVAL (XEXP (note1, 0));
845 prob2 = INTVAL (XEXP (note2, 0));
846 if (reverse)
847 prob2 = REG_BR_PROB_BASE - prob2;
848
849 /* Fail if the difference in probabilities is
850 greater than 5%. */
851 if (abs (prob1 - prob2) > REG_BR_PROB_BASE / 20)
852 return false;
853 }
854 else if (note1 || note2)
855 return false;
856 }
857
858 if (rtl_dump_file && match)
859 fprintf (rtl_dump_file, "Conditionals in bb %i and %i match.\n",
860 bb1->index, bb2->index);
861
862 return match;
863 }
864
865 /* ??? We can handle computed jumps too. This may be important for
866 inlined functions containing switch statements. Also jumps w/o
867 fallthru edges can be handled by simply matching whole insn. */
868 return false;
869}
870
871/* E1 and E2 are edges with the same destination block. Search their
872 predecessors for common code. If found, redirect control flow from
873 (maybe the middle of) E1->SRC to (maybe the middle of) E2->SRC. */
874
875static bool
876try_crossjump_to_edge (mode, e1, e2)
877 int mode;
878 edge e1, e2;
879{
880 int nmatch;
881 basic_block src1 = e1->src, src2 = e2->src;
882 basic_block redirect_to;
883 rtx newpos1, newpos2;
884 edge s;
885 rtx last;
886 rtx label;
887 rtx note;
888
889 /* Search backward through forwarder blocks. We don't need to worry
890 about multiple entry or chained forwarders, as they will be optimized
891 away. We do this to look past the unconditional jump following a
892 conditional jump that is required due to the current CFG shape. */
893 if (src1->pred
894 && !src1->pred->pred_next
33dbe4d1 895 && FORWARDER_BLOCK_P (src1))
65f34de5 896 {
897 e1 = src1->pred;
898 src1 = e1->src;
899 }
900 if (src2->pred
901 && !src2->pred->pred_next
33dbe4d1 902 && FORWARDER_BLOCK_P (src2))
65f34de5 903 {
904 e2 = src2->pred;
905 src2 = e2->src;
906 }
907
908 /* Nothing to do if we reach ENTRY, or a common source block. */
909 if (src1 == ENTRY_BLOCK_PTR || src2 == ENTRY_BLOCK_PTR)
910 return false;
911 if (src1 == src2)
912 return false;
913
914 /* Seeing more than 1 forwarder blocks would confuse us later... */
33dbe4d1 915 if (FORWARDER_BLOCK_P (e1->dest)
916 && FORWARDER_BLOCK_P (e1->dest->succ->dest))
65f34de5 917 return false;
33dbe4d1 918 if (FORWARDER_BLOCK_P (e2->dest)
919 && FORWARDER_BLOCK_P (e2->dest->succ->dest))
65f34de5 920 return false;
921
922 /* Likewise with dead code (possibly newly created by the other optimizations
923 of cfg_cleanup). */
924 if (!src1->pred || !src2->pred)
925 return false;
926
927 /* Likewise with complex edges.
928 ??? We should be able to handle most complex edges later with some
929 care. */
930 if (e1->flags & EDGE_COMPLEX)
931 return false;
932
933 /* Look for the common insn sequence, part the first ... */
934 if (!outgoing_edges_match (src1, src2))
935 return false;
936
937 /* ... and part the second. */
938 nmatch = flow_find_cross_jump (mode, src1, src2, &newpos1, &newpos2);
939 if (!nmatch)
940 return false;
941
942 /* Avoid splitting if possible. */
943 if (newpos2 == src2->head)
944 redirect_to = src2;
945 else
946 {
947 if (rtl_dump_file)
948 fprintf (rtl_dump_file, "Splitting bb %i before %i insns\n",
949 src2->index, nmatch);
950 redirect_to = split_block (src2, PREV_INSN (newpos2))->dest;
951 }
952
953 if (rtl_dump_file)
954 fprintf (rtl_dump_file,
955 "Cross jumping from bb %i to bb %i; %i common insns\n",
956 src1->index, src2->index, nmatch);
957
958 redirect_to->count += src1->count;
959 redirect_to->frequency += src1->frequency;
960
961 /* Recompute the frequencies and counts of outgoing edges. */
962 for (s = redirect_to->succ; s; s = s->succ_next)
963 {
964 edge s2;
965 basic_block d = s->dest;
966
33dbe4d1 967 if (FORWARDER_BLOCK_P (d))
65f34de5 968 d = d->succ->dest;
969 for (s2 = src1->succ; ; s2 = s2->succ_next)
970 {
971 basic_block d2 = s2->dest;
33dbe4d1 972 if (FORWARDER_BLOCK_P (d2))
65f34de5 973 d2 = d2->succ->dest;
974 if (d == d2)
975 break;
976 }
977 s->count += s2->count;
978
979 /* Take care to update possible forwarder blocks. We verified
980 that there is no more than one in the chain, so we can't run
981 into infinite loop. */
33dbe4d1 982 if (FORWARDER_BLOCK_P (s->dest))
65f34de5 983 {
984 s->dest->succ->count += s2->count;
985 s->dest->count += s2->count;
986 s->dest->frequency += EDGE_FREQUENCY (s);
987 }
33dbe4d1 988 if (FORWARDER_BLOCK_P (s2->dest))
65f34de5 989 {
990 s2->dest->succ->count -= s2->count;
991 s2->dest->count -= s2->count;
992 s2->dest->frequency -= EDGE_FREQUENCY (s);
993 }
994 if (!redirect_to->frequency && !src1->frequency)
995 s->probability = (s->probability + s2->probability) / 2;
996 else
997 s->probability =
998 ((s->probability * redirect_to->frequency +
999 s2->probability * src1->frequency)
1000 / (redirect_to->frequency + src1->frequency));
1001 }
1002
1003 note = find_reg_note (redirect_to->end, REG_BR_PROB, 0);
1004 if (note)
1005 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (redirect_to)->probability);
1006
1007 /* Edit SRC1 to go to REDIRECT_TO at NEWPOS1. */
1008
1009 /* Skip possible basic block header. */
1010 if (GET_CODE (newpos1) == CODE_LABEL)
1011 newpos1 = NEXT_INSN (newpos1);
1012 if (GET_CODE (newpos1) == NOTE)
1013 newpos1 = NEXT_INSN (newpos1);
1014 last = src1->end;
1015
1e625a2e 1016 /* Emit the jump insn. */
65f34de5 1017 label = block_label (redirect_to);
e4bf866d 1018 emit_jump_insn_after (gen_jump (label), src1->end);
65f34de5 1019 JUMP_LABEL (src1->end) = label;
1020 LABEL_NUSES (label)++;
65f34de5 1021
1022 /* Delete the now unreachable instructions. */
e4bf866d 1023 delete_insn_chain (newpos1, last);
65f34de5 1024
1025 /* Make sure there is a barrier after the new jump. */
1026 last = next_nonnote_insn (src1->end);
1027 if (!last || GET_CODE (last) != BARRIER)
1028 emit_barrier_after (src1->end);
1029
1030 /* Update CFG. */
1031 while (src1->succ)
1032 remove_edge (src1->succ);
7392df29 1033 make_single_succ_edge (src1, redirect_to, 0);
65f34de5 1034
33dbe4d1 1035 BB_SET_FLAG (src1, BB_UPDATE_LIFE);
1036 update_forwarder_flag (src1);
1037
65f34de5 1038 return true;
1039}
1040
1041/* Search the predecessors of BB for common insn sequences. When found,
1042 share code between them by redirecting control flow. Return true if
1043 any changes made. */
1044
1045static bool
1046try_crossjump_bb (mode, bb)
1047 int mode;
1048 basic_block bb;
1049{
1050 edge e, e2, nexte2, nexte, fallthru;
1051 bool changed;
1052
dd5b4b36 1053 /* Nothing to do if there is not at least two incoming edges. */
65f34de5 1054 if (!bb->pred || !bb->pred->pred_next)
1055 return false;
1056
1057 /* It is always cheapest to redirect a block that ends in a branch to
1058 a block that falls through into BB, as that adds no branches to the
1059 program. We'll try that combination first. */
1060 for (fallthru = bb->pred; fallthru; fallthru = fallthru->pred_next)
1061 if (fallthru->flags & EDGE_FALLTHRU)
1062 break;
1063
1064 changed = false;
1065 for (e = bb->pred; e; e = nexte)
1066 {
1067 nexte = e->pred_next;
1068
1069 /* Elide complex edges now, as neither try_crossjump_to_edge
1070 nor outgoing_edges_match can handle them. */
1071 if (e->flags & EDGE_COMPLEX)
1072 continue;
1073
1074 /* As noted above, first try with the fallthru predecessor. */
1075 if (fallthru)
1076 {
1077 /* Don't combine the fallthru edge into anything else.
1078 If there is a match, we'll do it the other way around. */
1079 if (e == fallthru)
1080 continue;
1081
1082 if (try_crossjump_to_edge (mode, e, fallthru))
1083 {
1084 changed = true;
1085 nexte = bb->pred;
1086 continue;
1087 }
1088 }
1089
1090 /* Non-obvious work limiting check: Recognize that we're going
1091 to call try_crossjump_bb on every basic block. So if we have
1092 two blocks with lots of outgoing edges (a switch) and they
1093 share lots of common destinations, then we would do the
1094 cross-jump check once for each common destination.
1095
1096 Now, if the blocks actually are cross-jump candidates, then
1097 all of their destinations will be shared. Which means that
1098 we only need check them for cross-jump candidacy once. We
1099 can eliminate redundant checks of crossjump(A,B) by arbitrarily
1100 choosing to do the check from the block for which the edge
1101 in question is the first successor of A. */
1102 if (e->src->succ != e)
1103 continue;
1104
1105 for (e2 = bb->pred; e2; e2 = nexte2)
1106 {
1107 nexte2 = e2->pred_next;
1108
1109 if (e2 == e)
1110 continue;
1111
1112 /* We've already checked the fallthru edge above. */
1113 if (e2 == fallthru)
1114 continue;
1115
1116 /* Again, neither try_crossjump_to_edge nor outgoing_edges_match
1117 can handle complex edges. */
1118 if (e2->flags & EDGE_COMPLEX)
1119 continue;
1120
1121 /* The "first successor" check above only prevents multiple
1122 checks of crossjump(A,B). In order to prevent redundant
1123 checks of crossjump(B,A), require that A be the block
1124 with the lowest index. */
1125 if (e->src->index > e2->src->index)
1126 continue;
1127
1128 if (try_crossjump_to_edge (mode, e, e2))
1129 {
1130 changed = true;
1131 nexte = bb->pred;
1132 break;
1133 }
1134 }
1135 }
1136
1137 return changed;
1138}
1139
1140/* Do simple CFG optimizations - basic block merging, simplifying of jump
1141 instructions etc. Return nonzero if changes were made. */
1142
1143static bool
1144try_optimize_cfg (mode)
1145 int mode;
1146{
1147 int i;
1148 bool changed_overall = false;
1149 bool changed;
1150 int iterations = 0;
33dbe4d1 1151 sbitmap blocks;
65f34de5 1152
b36d64df 1153 if (mode & CLEANUP_CROSSJUMP)
1154 add_noreturn_fake_exit_edges ();
1155
33dbe4d1 1156 for (i = 0; i < n_basic_blocks; i++)
1157 update_forwarder_flag (BASIC_BLOCK (i));
1158
65f34de5 1159 /* Attempt to merge blocks as made possible by edge removal. If a block
1160 has only one successor, and the successor has only one predecessor,
1161 they may be combined. */
1162
1163 do
1164 {
1165 changed = false;
1166 iterations++;
1167
1168 if (rtl_dump_file)
1169 fprintf (rtl_dump_file, "\n\ntry_optimize_cfg iteration %i\n\n",
1170 iterations);
1171
1172 for (i = 0; i < n_basic_blocks;)
1173 {
1174 basic_block c, b = BASIC_BLOCK (i);
1175 edge s;
1176 bool changed_here = false;
1177
1178 /* Delete trivially dead basic blocks. */
1179 while (b->pred == NULL)
1180 {
1181 c = BASIC_BLOCK (b->index - 1);
1182 if (rtl_dump_file)
1183 fprintf (rtl_dump_file, "Deleting block %i.\n", b->index);
1184 flow_delete_block (b);
1185 changed = true;
1186 b = c;
1187 }
1188
1189 /* Remove code labels no longer used. Don't do this before
1190 CALL_PLACEHOLDER is removed, as some branches may be hidden
1191 within. */
1192 if (b->pred->pred_next == NULL
1193 && (b->pred->flags & EDGE_FALLTHRU)
1194 && !(b->pred->flags & EDGE_COMPLEX)
1195 && GET_CODE (b->head) == CODE_LABEL
1196 && (!(mode & CLEANUP_PRE_SIBCALL)
1197 || !tail_recursion_label_p (b->head))
460ee42f 1198 /* If the previous block ends with a branch to this block,
1199 we can't delete the label. Normally this is a condjump
1200 that is yet to be simplified, but if CASE_DROPS_THRU,
1201 this can be a tablejump with some element going to the
1202 same place as the default (fallthru). */
65f34de5 1203 && (b->pred->src == ENTRY_BLOCK_PTR
460ee42f 1204 || GET_CODE (b->pred->src->end) != JUMP_INSN
1205 || ! label_is_jump_target_p (b->head, b->pred->src->end)))
65f34de5 1206 {
1207 rtx label = b->head;
1208 b->head = NEXT_INSN (b->head);
e4bf866d 1209 delete_insn_chain (label, label);
65f34de5 1210 if (rtl_dump_file)
1211 fprintf (rtl_dump_file, "Deleted label in block %i.\n",
1212 b->index);
1213 }
1214
1215 /* If we fall through an empty block, we can remove it. */
1216 if (b->pred->pred_next == NULL
1217 && (b->pred->flags & EDGE_FALLTHRU)
1218 && GET_CODE (b->head) != CODE_LABEL
33dbe4d1 1219 && FORWARDER_BLOCK_P (b)
65f34de5 1220 /* Note that forwarder_block_p true ensures that there
1221 is a successor for this block. */
1222 && (b->succ->flags & EDGE_FALLTHRU)
1223 && n_basic_blocks > 1)
1224 {
1225 if (rtl_dump_file)
1226 fprintf (rtl_dump_file, "Deleting fallthru block %i.\n",
1227 b->index);
1228 c = BASIC_BLOCK (b->index ? b->index - 1 : 1);
1229 redirect_edge_succ_nodup (b->pred, b->succ->dest);
1230 flow_delete_block (b);
1231 changed = true;
1232 b = c;
1233 }
1234
1235 /* Merge blocks. Loop because chains of blocks might be
1236 combineable. */
1237 while ((s = b->succ) != NULL
1238 && s->succ_next == NULL
1239 && !(s->flags & EDGE_COMPLEX)
1240 && (c = s->dest) != EXIT_BLOCK_PTR
1241 && c->pred->pred_next == NULL
1242 /* If the jump insn has side effects,
1243 we can't kill the edge. */
1244 && (GET_CODE (b->end) != JUMP_INSN
1245 || onlyjump_p (b->end))
1246 && merge_blocks (s, b, c, mode))
1247 changed_here = true;
1248
1249 /* Simplify branch over branch. */
1250 if ((mode & CLEANUP_EXPENSIVE) && try_simplify_condjump (b))
022d474c 1251 {
1252 BB_SET_FLAG (b, BB_UPDATE_LIFE);
1253 changed_here = true;
1254 }
65f34de5 1255
1256 /* If B has a single outgoing edge, but uses a non-trivial jump
1257 instruction without side-effects, we can either delete the
1258 jump entirely, or replace it with a simple unconditional jump.
1259 Use redirect_edge_and_branch to do the dirty work. */
1260 if (b->succ
1261 && ! b->succ->succ_next
1262 && b->succ->dest != EXIT_BLOCK_PTR
1263 && onlyjump_p (b->end)
1264 && redirect_edge_and_branch (b->succ, b->succ->dest))
33dbe4d1 1265 {
1266 BB_SET_FLAG (b, BB_UPDATE_LIFE);
1267 update_forwarder_flag (b);
1268 changed_here = true;
1269 }
65f34de5 1270
1271 /* Simplify branch to branch. */
1272 if (try_forward_edges (mode, b))
1273 changed_here = true;
1274
1275 /* Look for shared code between blocks. */
1276 if ((mode & CLEANUP_CROSSJUMP)
1277 && try_crossjump_bb (mode, b))
1278 changed_here = true;
1279
1280 /* Don't get confused by the index shift caused by deleting
1281 blocks. */
1282 if (!changed_here)
1283 i = b->index + 1;
1284 else
1285 changed = true;
1286 }
1287
1288 if ((mode & CLEANUP_CROSSJUMP)
1289 && try_crossjump_bb (mode, EXIT_BLOCK_PTR))
1290 changed = true;
1291
1292#ifdef ENABLE_CHECKING
1293 if (changed)
1294 verify_flow_info ();
1295#endif
1296
1297 changed_overall |= changed;
1298 }
1299 while (changed);
b36d64df 1300
1301 if (mode & CLEANUP_CROSSJUMP)
1302 remove_fake_edges ();
1303
022d474c 1304 if ((mode & CLEANUP_UPDATE_LIFE) && changed_overall)
33dbe4d1 1305 {
1306 bool found = 0;
1307 blocks = sbitmap_alloc (n_basic_blocks);
022d474c 1308 sbitmap_zero (blocks);
33dbe4d1 1309 for (i = 0; i < n_basic_blocks; i++)
1310 if (BB_FLAGS (BASIC_BLOCK (i)) & BB_UPDATE_LIFE)
1311 {
1312 found = 1;
1313 SET_BIT (blocks, i);
1314 }
1315 if (found)
1316 update_life_info (blocks, UPDATE_LIFE_GLOBAL,
1317 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
1318 | PROP_KILL_DEAD_CODE);
1319 sbitmap_free (blocks);
1320 }
1321 for (i = 0; i < n_basic_blocks; i++)
1322 BASIC_BLOCK (i)->aux = NULL;
1323
65f34de5 1324 return changed_overall;
1325}
1326\f
1e625a2e 1327/* Delete all unreachable basic blocks. */
e76f35e8 1328
65f34de5 1329static bool
1330delete_unreachable_blocks ()
1331{
1332 int i;
1333 bool changed = false;
1334
1335 find_unreachable_blocks ();
1336
1337 /* Delete all unreachable basic blocks. Count down so that we
1338 don't interfere with the block renumbering that happens in
1339 flow_delete_block. */
1340
1341 for (i = n_basic_blocks - 1; i >= 0; --i)
1342 {
1343 basic_block b = BASIC_BLOCK (i);
1344
1345 if (!(b->flags & BB_REACHABLE))
1346 flow_delete_block (b), changed = true;
1347 }
1348
1349 if (changed)
1350 tidy_fallthru_edges ();
1351 return changed;
1352}
65f34de5 1353\f
1354/* Tidy the CFG by deleting unreachable code and whatnot. */
1355
1356bool
1357cleanup_cfg (mode)
1358 int mode;
1359{
65f34de5 1360 bool changed = false;
1361
1362 timevar_push (TV_CLEANUP_CFG);
1363 changed = delete_unreachable_blocks ();
1364 if (try_optimize_cfg (mode))
1365 delete_unreachable_blocks (), changed = true;
1366
65f34de5 1367 /* Kill the data we won't maintain. */
1368 free_EXPR_LIST_list (&label_value_list);
1369 free_EXPR_LIST_list (&tail_recursion_label_list);
1370 timevar_pop (TV_CLEANUP_CFG);
1371
65f34de5 1372 return changed;
1373}