]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgcleanup.c
Move jump threading before reload
[thirdparty/gcc.git] / gcc / cfgcleanup.c
CommitLineData
402209ff 1/* Control flow optimization code for GNU compiler.
a5544970 2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
402209ff
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
402209ff
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
402209ff 19
1ea7e6ad 20/* This file contains optimizer of the control flow. The main entry point is
402209ff
JH
21 cleanup_cfg. Following optimizations are performed:
22
23 - Unreachable blocks removal
d1a6adeb 24 - Edge forwarding (edge to the forwarder block is forwarded to its
eaec9b3d 25 successor. Simplification of the branch instruction is performed by
402209ff 26 underlying infrastructure so branch can be converted to simplejump or
f5143c46 27 eliminated).
402209ff
JH
28 - Cross jumping (tail merging)
29 - Conditional jump-around-simplejump simplification
30 - Basic block merging. */
31
32#include "config.h"
33#include "system.h"
4977bab6 34#include "coretypes.h"
c7131fb2 35#include "backend.h"
957060b5 36#include "target.h"
402209ff 37#include "rtl.h"
957060b5
AM
38#include "tree.h"
39#include "cfghooks.h"
c7131fb2 40#include "df.h"
4d0cdd0c 41#include "memmodel.h"
957060b5 42#include "tm_p.h"
402209ff 43#include "insn-config.h"
957060b5 44#include "emit-rtl.h"
8ecba28a 45#include "cselib.h"
5f24e0dc 46#include "params.h"
ef330312
PB
47#include "tree-pass.h"
48#include "cfgloop.h"
60393bbc
AM
49#include "cfgrtl.h"
50#include "cfganal.h"
51#include "cfgbuild.h"
52#include "cfgcleanup.h"
c1e3e2d9 53#include "dce.h"
7d817ebc 54#include "dbgcnt.h"
a2250fe9 55#include "rtl-iter.h"
4b1a1d98 56#include "regs.h"
5a5a3bc5 57#include "function-abi.h"
402209ff 58
2dd2d53e 59#define FORWARDER_BLOCK_P(BB) ((BB)->flags & BB_FORWARDER_BLOCK)
c22cacf3 60
7cf240d5
JH
61/* Set to true when we are running first pass of try_optimize_cfg loop. */
62static bool first_pass;
c1e3e2d9 63
073a8998 64/* Set to true if crossjumps occurred in the latest run of try_optimize_cfg. */
bd2c6270 65static bool crossjumps_occurred;
c1e3e2d9 66
4ec5d4f5
BS
67/* Set to true if we couldn't run an optimization due to stale liveness
68 information; we should run df_analyze to enable more opportunities. */
69static bool block_was_dirty;
70
bf22920b 71static bool try_crossjump_to_edge (int, edge, edge, enum replace_direction);
d329e058 72static bool try_crossjump_bb (int, basic_block);
c2fc5456 73static bool outgoing_edges_match (int, basic_block, basic_block);
da5477a9 74static enum replace_direction old_insns_match_p (int, rtx_insn *, rtx_insn *);
d329e058 75
d329e058
AJ
76static void merge_blocks_move_predecessor_nojumps (basic_block, basic_block);
77static void merge_blocks_move_successor_nojumps (basic_block, basic_block);
d329e058
AJ
78static bool try_optimize_cfg (int);
79static bool try_simplify_condjump (basic_block);
80static bool try_forward_edges (int, basic_block);
6fb5fa3c 81static edge thread_jump (edge, basic_block);
d329e058
AJ
82static bool mark_effect (rtx, bitmap);
83static void notice_new_block (basic_block);
84static void update_forwarder_flag (basic_block);
c2fc5456 85static void merge_memattrs (rtx, rtx);
635559ab
JH
86\f
87/* Set flags for newly created block. */
88
89static void
d329e058 90notice_new_block (basic_block bb)
635559ab
JH
91{
92 if (!bb)
93 return;
5f0d2358 94
635559ab 95 if (forwarder_block_p (bb))
2dd2d53e 96 bb->flags |= BB_FORWARDER_BLOCK;
635559ab
JH
97}
98
99/* Recompute forwarder flag after block has been modified. */
100
101static void
d329e058 102update_forwarder_flag (basic_block bb)
635559ab
JH
103{
104 if (forwarder_block_p (bb))
2dd2d53e 105 bb->flags |= BB_FORWARDER_BLOCK;
635559ab 106 else
2dd2d53e 107 bb->flags &= ~BB_FORWARDER_BLOCK;
635559ab 108}
402209ff
JH
109\f
110/* Simplify a conditional jump around an unconditional jump.
111 Return true if something changed. */
112
113static bool
d329e058 114try_simplify_condjump (basic_block cbranch_block)
402209ff
JH
115{
116 basic_block jump_block, jump_dest_block, cbranch_dest_block;
117 edge cbranch_jump_edge, cbranch_fallthru_edge;
da5477a9 118 rtx_insn *cbranch_insn;
402209ff
JH
119
120 /* Verify that there are exactly two successors. */
628f6a4e 121 if (EDGE_COUNT (cbranch_block->succs) != 2)
402209ff
JH
122 return false;
123
124 /* Verify that we've got a normal conditional branch at the end
125 of the block. */
a813c111 126 cbranch_insn = BB_END (cbranch_block);
402209ff
JH
127 if (!any_condjump_p (cbranch_insn))
128 return false;
129
130 cbranch_fallthru_edge = FALLTHRU_EDGE (cbranch_block);
131 cbranch_jump_edge = BRANCH_EDGE (cbranch_block);
132
133 /* The next block must not have multiple predecessors, must not
134 be the last block in the function, and must contain just the
135 unconditional jump. */
136 jump_block = cbranch_fallthru_edge->dest;
c5cbcccf 137 if (!single_pred_p (jump_block)
fefa31b5 138 || jump_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
635559ab 139 || !FORWARDER_BLOCK_P (jump_block))
402209ff 140 return false;
c5cbcccf 141 jump_dest_block = single_succ (jump_block);
402209ff 142
750054a2
CT
143 /* If we are partitioning hot/cold basic blocks, we don't want to
144 mess up unconditional or indirect jumps that cross between hot
c22cacf3 145 and cold sections.
8e8d5162
CT
146
147 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
148 be optimizable (or blocks that appear to be mergeable), but which really
149 must be left untouched (they are required to make it safely across
150 partition boundaries). See the comments at the top of
8e8d5162 151 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
750054a2 152
87c8b4be
CT
153 if (BB_PARTITION (jump_block) != BB_PARTITION (jump_dest_block)
154 || (cbranch_jump_edge->flags & EDGE_CROSSING))
750054a2
CT
155 return false;
156
402209ff
JH
157 /* The conditional branch must target the block after the
158 unconditional branch. */
159 cbranch_dest_block = cbranch_jump_edge->dest;
160
fefa31b5 161 if (cbranch_dest_block == EXIT_BLOCK_PTR_FOR_FN (cfun)
1a8fb012 162 || jump_dest_block == EXIT_BLOCK_PTR_FOR_FN (cfun)
2f52c531 163 || !can_fallthru (jump_block, cbranch_dest_block))
402209ff
JH
164 return false;
165
ca6c03ca 166 /* Invert the conditional branch. */
1476d1bd
MM
167 if (!invert_jump (as_a <rtx_jump_insn *> (cbranch_insn),
168 block_label (jump_dest_block), 0))
ca6c03ca 169 return false;
402209ff 170
c263766c
RH
171 if (dump_file)
172 fprintf (dump_file, "Simplifying condjump %i around jump %i\n",
a813c111 173 INSN_UID (cbranch_insn), INSN_UID (BB_END (jump_block)));
402209ff
JH
174
175 /* Success. Update the CFG to match. Note that after this point
176 the edge variable names appear backwards; the redirection is done
177 this way to preserve edge profile data. */
178 cbranch_jump_edge = redirect_edge_succ_nodup (cbranch_jump_edge,
179 cbranch_dest_block);
180 cbranch_fallthru_edge = redirect_edge_succ_nodup (cbranch_fallthru_edge,
181 jump_dest_block);
182 cbranch_jump_edge->flags |= EDGE_FALLTHRU;
183 cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
b446e5a2 184 update_br_prob_note (cbranch_block);
402209ff
JH
185
186 /* Delete the block with the unconditional jump, and clean up the mess. */
f470c378
ZD
187 delete_basic_block (jump_block);
188 tidy_fallthru_edge (cbranch_jump_edge);
261139ce 189 update_forwarder_flag (cbranch_block);
402209ff
JH
190
191 return true;
192}
193\f
8ecba28a
JH
194/* Attempt to prove that operation is NOOP using CSElib or mark the effect
195 on register. Used by jump threading. */
5f0d2358 196
8ecba28a 197static bool
d329e058 198mark_effect (rtx exp, regset nonequal)
8ecba28a 199{
9f16e871 200 rtx dest;
8ecba28a
JH
201 switch (GET_CODE (exp))
202 {
203 /* In case we do clobber the register, mark it as equal, as we know the
c22cacf3 204 value is dead so it don't have to match. */
f87c27b4 205 case CLOBBER:
07a737f3
RS
206 dest = XEXP (exp, 0);
207 if (REG_P (dest))
208 bitmap_clear_range (nonequal, REGNO (dest), REG_NREGS (dest));
f87c27b4 209 return false;
5f0d2358 210
f87c27b4
KH
211 case SET:
212 if (rtx_equal_for_cselib_p (SET_DEST (exp), SET_SRC (exp)))
8ecba28a 213 return false;
f87c27b4
KH
214 dest = SET_DEST (exp);
215 if (dest == pc_rtx)
8ecba28a 216 return false;
f87c27b4
KH
217 if (!REG_P (dest))
218 return true;
07a737f3 219 bitmap_set_range (nonequal, REGNO (dest), REG_NREGS (dest));
f87c27b4
KH
220 return false;
221
222 default:
223 return false;
8ecba28a
JH
224 }
225}
fe477d8b 226
a2250fe9
RS
227/* Return true if X contains a register in NONEQUAL. */
228static bool
229mentions_nonequal_regs (const_rtx x, regset nonequal)
fe477d8b 230{
a2250fe9
RS
231 subrtx_iterator::array_type array;
232 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
fe477d8b 233 {
a2250fe9
RS
234 const_rtx x = *iter;
235 if (REG_P (x))
fe477d8b 236 {
53d1bae9
RS
237 unsigned int end_regno = END_REGNO (x);
238 for (unsigned int regno = REGNO (x); regno < end_regno; ++regno)
239 if (REGNO_REG_SET_P (nonequal, regno))
240 return true;
fe477d8b
JH
241 }
242 }
a2250fe9 243 return false;
fe477d8b 244}
a2250fe9 245
8ecba28a 246/* Attempt to prove that the basic block B will have no side effects and
95bd1dd7 247 always continues in the same edge if reached via E. Return the edge
8ecba28a
JH
248 if exist, NULL otherwise. */
249
250static edge
6fb5fa3c 251thread_jump (edge e, basic_block b)
8ecba28a 252{
da5477a9
DM
253 rtx set1, set2, cond1, cond2;
254 rtx_insn *insn;
8ecba28a
JH
255 enum rtx_code code1, code2, reversed_code2;
256 bool reverse1 = false;
3cd8c58a 257 unsigned i;
8ecba28a
JH
258 regset nonequal;
259 bool failed = false;
a2041967 260 reg_set_iterator rsi;
8ecba28a 261
216779db
IL
262 /* Jump threading may cause fixup_partitions to introduce new crossing edges,
263 which is not allowed after reload. */
264 gcc_checking_assert (!reload_completed || !crtl->has_bb_partition);
265
2dd2d53e 266 if (b->flags & BB_NONTHREADABLE_BLOCK)
1540f9eb
JH
267 return NULL;
268
8ecba28a
JH
269 /* At the moment, we do handle only conditional jumps, but later we may
270 want to extend this code to tablejumps and others. */
628f6a4e 271 if (EDGE_COUNT (e->src->succs) != 2)
8ecba28a 272 return NULL;
628f6a4e 273 if (EDGE_COUNT (b->succs) != 2)
1540f9eb 274 {
2dd2d53e 275 b->flags |= BB_NONTHREADABLE_BLOCK;
1540f9eb
JH
276 return NULL;
277 }
8ecba28a
JH
278
279 /* Second branch must end with onlyjump, as we will eliminate the jump. */
a813c111 280 if (!any_condjump_p (BB_END (e->src)))
8ecba28a 281 return NULL;
f87c27b4 282
a813c111 283 if (!any_condjump_p (BB_END (b)) || !onlyjump_p (BB_END (b)))
1540f9eb 284 {
2dd2d53e 285 b->flags |= BB_NONTHREADABLE_BLOCK;
1540f9eb
JH
286 return NULL;
287 }
8ecba28a 288
a813c111
SB
289 set1 = pc_set (BB_END (e->src));
290 set2 = pc_set (BB_END (b));
8ecba28a 291 if (((e->flags & EDGE_FALLTHRU) != 0)
68f3f6f1 292 != (XEXP (SET_SRC (set1), 1) == pc_rtx))
8ecba28a
JH
293 reverse1 = true;
294
295 cond1 = XEXP (SET_SRC (set1), 0);
296 cond2 = XEXP (SET_SRC (set2), 0);
297 if (reverse1)
a813c111 298 code1 = reversed_comparison_code (cond1, BB_END (e->src));
8ecba28a
JH
299 else
300 code1 = GET_CODE (cond1);
301
302 code2 = GET_CODE (cond2);
a813c111 303 reversed_code2 = reversed_comparison_code (cond2, BB_END (b));
8ecba28a
JH
304
305 if (!comparison_dominates_p (code1, code2)
306 && !comparison_dominates_p (code1, reversed_code2))
307 return NULL;
308
309 /* Ensure that the comparison operators are equivalent.
95bd1dd7 310 ??? This is far too pessimistic. We should allow swapped operands,
8ecba28a
JH
311 different CCmodes, or for example comparisons for interval, that
312 dominate even when operands are not equivalent. */
313 if (!rtx_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
314 || !rtx_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
315 return NULL;
316
23c64481
JJ
317 /* Punt if BB_END (e->src) is doloop-like conditional jump that modifies
318 the registers used in cond1. */
319 if (modified_in_p (cond1, BB_END (e->src)))
320 return NULL;
321
8ecba28a
JH
322 /* Short circuit cases where block B contains some side effects, as we can't
323 safely bypass it. */
a813c111 324 for (insn = NEXT_INSN (BB_HEAD (b)); insn != NEXT_INSN (BB_END (b));
8ecba28a
JH
325 insn = NEXT_INSN (insn))
326 if (INSN_P (insn) && side_effects_p (PATTERN (insn)))
1540f9eb 327 {
2dd2d53e 328 b->flags |= BB_NONTHREADABLE_BLOCK;
1540f9eb
JH
329 return NULL;
330 }
8ecba28a 331
457eeaae 332 cselib_init (0);
8ecba28a
JH
333
334 /* First process all values computed in the source basic block. */
3cd8c58a
NS
335 for (insn = NEXT_INSN (BB_HEAD (e->src));
336 insn != NEXT_INSN (BB_END (e->src));
8ecba28a
JH
337 insn = NEXT_INSN (insn))
338 if (INSN_P (insn))
339 cselib_process_insn (insn);
340
8bdbfff5 341 nonequal = BITMAP_ALLOC (NULL);
8ecba28a 342 CLEAR_REG_SET (nonequal);
5f0d2358 343
8ecba28a
JH
344 /* Now assume that we've continued by the edge E to B and continue
345 processing as if it were same basic block.
8ecba28a 346 Our goal is to prove that whole block is an NOOP. */
5f0d2358 347
3cd8c58a
NS
348 for (insn = NEXT_INSN (BB_HEAD (b));
349 insn != NEXT_INSN (BB_END (b)) && !failed;
8ecba28a 350 insn = NEXT_INSN (insn))
f87c27b4 351 {
becba8a7
JJ
352 /* cond2 must not mention any register that is not equal to the
353 former block. Check this before processing that instruction,
354 as BB_END (b) could contain also clobbers. */
355 if (insn == BB_END (b)
356 && mentions_nonequal_regs (cond2, nonequal))
357 goto failed_exit;
358
f87c27b4
KH
359 if (INSN_P (insn))
360 {
361 rtx pat = PATTERN (insn);
362
363 if (GET_CODE (pat) == PARALLEL)
364 {
3cd8c58a 365 for (i = 0; i < (unsigned)XVECLEN (pat, 0); i++)
f87c27b4
KH
366 failed |= mark_effect (XVECEXP (pat, 0, i), nonequal);
367 }
368 else
369 failed |= mark_effect (pat, nonequal);
370 }
5f0d2358 371
f87c27b4
KH
372 cselib_process_insn (insn);
373 }
8ecba28a
JH
374
375 /* Later we should clear nonequal of dead registers. So far we don't
376 have life information in cfg_cleanup. */
377 if (failed)
1540f9eb 378 {
2dd2d53e 379 b->flags |= BB_NONTHREADABLE_BLOCK;
1540f9eb
JH
380 goto failed_exit;
381 }
8ecba28a 382
a2041967
KH
383 EXECUTE_IF_SET_IN_REG_SET (nonequal, 0, i, rsi)
384 goto failed_exit;
8ecba28a 385
8bdbfff5 386 BITMAP_FREE (nonequal);
8ecba28a
JH
387 cselib_finish ();
388 if ((comparison_dominates_p (code1, code2) != 0)
4deaa2f8 389 != (XEXP (SET_SRC (set2), 1) == pc_rtx))
8ecba28a
JH
390 return BRANCH_EDGE (b);
391 else
392 return FALLTHRU_EDGE (b);
393
394failed_exit:
8bdbfff5 395 BITMAP_FREE (nonequal);
8ecba28a
JH
396 cselib_finish ();
397 return NULL;
398}
399\f
402209ff 400/* Attempt to forward edges leaving basic block B.
eaec9b3d 401 Return true if successful. */
402209ff
JH
402
403static bool
d329e058 404try_forward_edges (int mode, basic_block b)
402209ff
JH
405{
406 bool changed = false;
628f6a4e
BE
407 edge_iterator ei;
408 edge e, *threaded_edges = NULL;
402209ff 409
628f6a4e 410 for (ei = ei_start (b->succs); (e = ei_safe_edge (ei)); )
402209ff
JH
411 {
412 basic_block target, first;
8a829274
EB
413 location_t goto_locus;
414 int counter;
8ecba28a 415 bool threaded = false;
bcb3bc6d 416 int nthreaded_edges = 0;
4ec5d4f5 417 bool may_thread = first_pass || (b->flags & BB_MODIFIED) != 0;
687d5dfe 418 bool new_target_threaded = false;
402209ff 419
402209ff
JH
420 /* Skip complex edges because we don't know how to update them.
421
c22cacf3
MS
422 Still handle fallthru edges, as we can succeed to forward fallthru
423 edge to the same place as the branch edge of conditional branch
424 and turn conditional branch to an unconditional branch. */
402209ff 425 if (e->flags & EDGE_COMPLEX)
628f6a4e
BE
426 {
427 ei_next (&ei);
428 continue;
429 }
402209ff
JH
430
431 target = first = e->dest;
24bd1a0b 432 counter = NUM_FIXED_BLOCKS;
7241571e 433 goto_locus = e->goto_locus;
402209ff 434
0cae8d31 435 while (counter < n_basic_blocks_for_fn (cfun))
402209ff 436 {
8ecba28a 437 basic_block new_target = NULL;
4ec5d4f5 438 may_thread |= (target->flags & BB_MODIFIED) != 0;
8ecba28a
JH
439
440 if (FORWARDER_BLOCK_P (target)
fefa31b5 441 && single_succ (target) != EXIT_BLOCK_PTR_FOR_FN (cfun))
8ecba28a
JH
442 {
443 /* Bypass trivial infinite loops. */
c5cbcccf
ZD
444 new_target = single_succ (target);
445 if (target == new_target)
0cae8d31 446 counter = n_basic_blocks_for_fn (cfun);
7241571e
JJ
447 else if (!optimize)
448 {
449 /* When not optimizing, ensure that edges or forwarder
450 blocks with different locus are not optimized out. */
8a829274
EB
451 location_t new_locus = single_succ_edge (target)->goto_locus;
452 location_t locus = goto_locus;
7241571e 453
ffa4602f
EB
454 if (LOCATION_LOCUS (new_locus) != UNKNOWN_LOCATION
455 && LOCATION_LOCUS (locus) != UNKNOWN_LOCATION
5368224f 456 && new_locus != locus)
50a36e42
EB
457 new_target = NULL;
458 else
7241571e 459 {
ffa4602f 460 if (LOCATION_LOCUS (new_locus) != UNKNOWN_LOCATION)
50a36e42 461 locus = new_locus;
7241571e 462
da5477a9 463 rtx_insn *last = BB_END (target);
11321111
AO
464 if (DEBUG_INSN_P (last))
465 last = prev_nondebug_insn (last);
ffa4602f
EB
466 if (last && INSN_P (last))
467 new_locus = INSN_LOCATION (last);
468 else
469 new_locus = UNKNOWN_LOCATION;
11321111 470
ffa4602f
EB
471 if (LOCATION_LOCUS (new_locus) != UNKNOWN_LOCATION
472 && LOCATION_LOCUS (locus) != UNKNOWN_LOCATION
5368224f 473 && new_locus != locus)
50a36e42
EB
474 new_target = NULL;
475 else
476 {
ffa4602f 477 if (LOCATION_LOCUS (new_locus) != UNKNOWN_LOCATION)
50a36e42
EB
478 locus = new_locus;
479
480 goto_locus = locus;
481 }
7241571e
JJ
482 }
483 }
8ecba28a 484 }
5f0d2358 485
8ecba28a
JH
486 /* Allow to thread only over one edge at time to simplify updating
487 of probabilities. */
7cf240d5 488 else if ((mode & CLEANUP_THREADING) && may_thread)
8ecba28a 489 {
6fb5fa3c 490 edge t = thread_jump (e, target);
1c570418 491 if (t)
8ecba28a 492 {
bcb3bc6d 493 if (!threaded_edges)
0cae8d31
DM
494 threaded_edges = XNEWVEC (edge,
495 n_basic_blocks_for_fn (cfun));
3b3b1e32
RH
496 else
497 {
498 int i;
499
500 /* Detect an infinite loop across blocks not
501 including the start block. */
502 for (i = 0; i < nthreaded_edges; ++i)
503 if (threaded_edges[i] == t)
504 break;
505 if (i < nthreaded_edges)
b90e45ae 506 {
0cae8d31 507 counter = n_basic_blocks_for_fn (cfun);
b90e45ae
JH
508 break;
509 }
3b3b1e32
RH
510 }
511
512 /* Detect an infinite loop across the start block. */
513 if (t->dest == b)
514 break;
515
0cae8d31
DM
516 gcc_assert (nthreaded_edges
517 < (n_basic_blocks_for_fn (cfun)
518 - NUM_FIXED_BLOCKS));
1c570418 519 threaded_edges[nthreaded_edges++] = t;
3b3b1e32
RH
520
521 new_target = t->dest;
522 new_target_threaded = true;
8ecba28a
JH
523 }
524 }
5f0d2358 525
8ecba28a
JH
526 if (!new_target)
527 break;
402209ff 528
8ecba28a 529 counter++;
687d5dfe
JH
530 /* Do not turn non-crossing jump to crossing. Depending on target
531 it may require different instruction pattern. */
532 if ((e->flags & EDGE_CROSSING)
533 || BB_PARTITION (first) == BB_PARTITION (new_target))
534 {
535 target = new_target;
536 threaded |= new_target_threaded;
537 }
f87c27b4 538 }
402209ff 539
0cae8d31 540 if (counter >= n_basic_blocks_for_fn (cfun))
402209ff 541 {
c263766c
RH
542 if (dump_file)
543 fprintf (dump_file, "Infinite loop in BB %i.\n",
0b17ab2f 544 target->index);
402209ff
JH
545 }
546 else if (target == first)
547 ; /* We didn't do anything. */
548 else
549 {
550 /* Save the values now, as the edge may get removed. */
ef30ab83 551 profile_count edge_count = e->count ();
1c570418 552 int n = 0;
402209ff 553
7241571e
JJ
554 e->goto_locus = goto_locus;
555
6ee3c8e4 556 /* Don't force if target is exit block. */
fefa31b5 557 if (threaded && target != EXIT_BLOCK_PTR_FOR_FN (cfun))
402209ff 558 {
8ecba28a 559 notice_new_block (redirect_edge_and_branch_force (e, target));
c263766c
RH
560 if (dump_file)
561 fprintf (dump_file, "Conditionals threaded.\n");
402209ff 562 }
8ecba28a 563 else if (!redirect_edge_and_branch (e, target))
402209ff 564 {
c263766c
RH
565 if (dump_file)
566 fprintf (dump_file,
5f0d2358 567 "Forwarding edge %i->%i to %i failed.\n",
0b17ab2f 568 b->index, e->dest->index, target->index);
628f6a4e 569 ei_next (&ei);
8ecba28a 570 continue;
402209ff 571 }
5f0d2358 572
8ecba28a
JH
573 /* We successfully forwarded the edge. Now update profile
574 data: for each edge we traversed in the chain, remove
575 the original edge's execution count. */
8ecba28a
JH
576 do
577 {
578 edge t;
5f0d2358 579
c5cbcccf 580 if (!single_succ_p (first))
3b3b1e32 581 {
341c100f 582 gcc_assert (n < nthreaded_edges);
3b3b1e32 583 t = threaded_edges [n++];
341c100f 584 gcc_assert (t->src == first);
e7a74006 585 update_bb_profile_for_threading (first, edge_count, t);
b446e5a2 586 update_br_prob_note (first);
3b3b1e32 587 }
8ecba28a 588 else
bcb3bc6d 589 {
15db5571 590 first->count -= edge_count;
bcb3bc6d
JH
591 /* It is possible that as the result of
592 threading we've removed edge as it is
593 threaded to the fallthru edge. Avoid
594 getting out of sync. */
595 if (n < nthreaded_edges
596 && first == threaded_edges [n]->src)
597 n++;
c5cbcccf 598 t = single_succ_edge (first);
f87c27b4 599 }
5f0d2358 600
8ecba28a
JH
601 first = t->dest;
602 }
603 while (first != target);
604
605 changed = true;
628f6a4e 606 continue;
402209ff 607 }
628f6a4e 608 ei_next (&ei);
402209ff
JH
609 }
610
04695783 611 free (threaded_edges);
402209ff
JH
612 return changed;
613}
614\f
402209ff
JH
615
616/* Blocks A and B are to be merged into a single block. A has no incoming
617 fallthru edge, so it can be moved before B without adding or modifying
618 any jumps (aside from the jump from A to B). */
619
4262e623 620static void
d329e058 621merge_blocks_move_predecessor_nojumps (basic_block a, basic_block b)
402209ff 622{
da5477a9 623 rtx_insn *barrier;
402209ff 624
750054a2
CT
625 /* If we are partitioning hot/cold basic blocks, we don't want to
626 mess up unconditional or indirect jumps that cross between hot
8e8d5162 627 and cold sections.
c22cacf3 628
8e8d5162 629 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
630 be optimizable (or blocks that appear to be mergeable), but which really
631 must be left untouched (they are required to make it safely across
632 partition boundaries). See the comments at the top of
8e8d5162
CT
633 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
634
87c8b4be 635 if (BB_PARTITION (a) != BB_PARTITION (b))
750054a2
CT
636 return;
637
a813c111 638 barrier = next_nonnote_insn (BB_END (a));
341c100f 639 gcc_assert (BARRIER_P (barrier));
53c17031 640 delete_insn (barrier);
402209ff 641
402209ff 642 /* Scramble the insn chain. */
a813c111
SB
643 if (BB_END (a) != PREV_INSN (BB_HEAD (b)))
644 reorder_insns_nobb (BB_HEAD (a), BB_END (a), PREV_INSN (BB_HEAD (b)));
6fb5fa3c 645 df_set_bb_dirty (a);
402209ff 646
c263766c
RH
647 if (dump_file)
648 fprintf (dump_file, "Moved block %d before %d and merged.\n",
0b17ab2f 649 a->index, b->index);
402209ff 650
bf77398c 651 /* Swap the records for the two blocks around. */
402209ff 652
918ed612
ZD
653 unlink_block (a);
654 link_block (a, b->prev_bb);
655
402209ff 656 /* Now blocks A and B are contiguous. Merge them. */
bc35512f 657 merge_blocks (a, b);
402209ff
JH
658}
659
660/* Blocks A and B are to be merged into a single block. B has no outgoing
661 fallthru edge, so it can be moved after A without adding or modifying
662 any jumps (aside from the jump from A to B). */
663
4262e623 664static void
d329e058 665merge_blocks_move_successor_nojumps (basic_block a, basic_block b)
402209ff 666{
da5477a9 667 rtx_insn *barrier, *real_b_end;
dfe08bc4 668 rtx_insn *label;
8942ee0f 669 rtx_jump_table_data *table;
402209ff 670
750054a2
CT
671 /* If we are partitioning hot/cold basic blocks, we don't want to
672 mess up unconditional or indirect jumps that cross between hot
c22cacf3
MS
673 and cold sections.
674
8e8d5162 675 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
676 be optimizable (or blocks that appear to be mergeable), but which really
677 must be left untouched (they are required to make it safely across
678 partition boundaries). See the comments at the top of
8e8d5162
CT
679 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
680
87c8b4be 681 if (BB_PARTITION (a) != BB_PARTITION (b))
750054a2
CT
682 return;
683
a813c111 684 real_b_end = BB_END (b);
402209ff 685
ee735eef
JZ
686 /* If there is a jump table following block B temporarily add the jump table
687 to block B so that it will also be moved to the correct location. */
a813c111 688 if (tablejump_p (BB_END (b), &label, &table)
dfe08bc4 689 && prev_active_insn (label) == BB_END (b))
402209ff 690 {
1130d5e3 691 BB_END (b) = table;
402209ff
JH
692 }
693
694 /* There had better have been a barrier there. Delete it. */
a813c111 695 barrier = NEXT_INSN (BB_END (b));
4b4bf941 696 if (barrier && BARRIER_P (barrier))
53c17031 697 delete_insn (barrier);
402209ff 698
402209ff
JH
699
700 /* Scramble the insn chain. */
a813c111 701 reorder_insns_nobb (BB_HEAD (b), BB_END (b), BB_END (a));
402209ff 702
f62ce55b 703 /* Restore the real end of b. */
1130d5e3 704 BB_END (b) = real_b_end;
f62ce55b 705
c263766c
RH
706 if (dump_file)
707 fprintf (dump_file, "Moved block %d after %d and merged.\n",
0b17ab2f 708 b->index, a->index);
2150ad33
RH
709
710 /* Now blocks A and B are contiguous. Merge them. */
bc35512f 711 merge_blocks (a, b);
402209ff
JH
712}
713
714/* Attempt to merge basic blocks that are potentially non-adjacent.
ec3ae3da
JH
715 Return NULL iff the attempt failed, otherwise return basic block
716 where cleanup_cfg should continue. Because the merging commonly
717 moves basic block away or introduces another optimization
e0bb17a8 718 possibility, return basic block just before B so cleanup_cfg don't
ec3ae3da
JH
719 need to iterate.
720
721 It may be good idea to return basic block before C in the case
722 C has been moved after B and originally appeared earlier in the
4d6922ee 723 insn sequence, but we have no information available about the
ec3ae3da
JH
724 relative ordering of these two. Hopefully it is not too common. */
725
726static basic_block
bc35512f 727merge_blocks_move (edge e, basic_block b, basic_block c, int mode)
402209ff 728{
ec3ae3da 729 basic_block next;
402209ff 730
750054a2
CT
731 /* If we are partitioning hot/cold basic blocks, we don't want to
732 mess up unconditional or indirect jumps that cross between hot
c22cacf3
MS
733 and cold sections.
734
8e8d5162 735 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
736 be optimizable (or blocks that appear to be mergeable), but which really
737 must be left untouched (they are required to make it safely across
738 partition boundaries). See the comments at the top of
8e8d5162
CT
739 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
740
87c8b4be 741 if (BB_PARTITION (b) != BB_PARTITION (c))
750054a2 742 return NULL;
c22cacf3 743
402209ff
JH
744 /* If B has a fallthru edge to C, no need to move anything. */
745 if (e->flags & EDGE_FALLTHRU)
746 {
0b17ab2f 747 int b_index = b->index, c_index = c->index;
7d776ee2
RG
748
749 /* Protect the loop latches. */
750 if (current_loops && c->loop_father->latch == c)
751 return NULL;
752
bc35512f 753 merge_blocks (b, c);
635559ab 754 update_forwarder_flag (b);
402209ff 755
c263766c
RH
756 if (dump_file)
757 fprintf (dump_file, "Merged %d and %d without moving.\n",
f87c27b4 758 b_index, c_index);
402209ff 759
fefa31b5 760 return b->prev_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun) ? b : b->prev_bb;
402209ff 761 }
5f0d2358 762
402209ff
JH
763 /* Otherwise we will need to move code around. Do that only if expensive
764 transformations are allowed. */
765 else if (mode & CLEANUP_EXPENSIVE)
766 {
4262e623
JH
767 edge tmp_edge, b_fallthru_edge;
768 bool c_has_outgoing_fallthru;
769 bool b_has_incoming_fallthru;
402209ff
JH
770
771 /* Avoid overactive code motion, as the forwarder blocks should be
c22cacf3 772 eliminated by edge redirection instead. One exception might have
402209ff
JH
773 been if B is a forwarder block and C has no fallthru edge, but
774 that should be cleaned up by bb-reorder instead. */
635559ab 775 if (FORWARDER_BLOCK_P (b) || FORWARDER_BLOCK_P (c))
ec3ae3da 776 return NULL;
402209ff
JH
777
778 /* We must make sure to not munge nesting of lexical blocks,
779 and loop notes. This is done by squeezing out all the notes
780 and leaving them there to lie. Not ideal, but functional. */
781
0fd4b31d 782 tmp_edge = find_fallthru_edge (c->succs);
402209ff 783 c_has_outgoing_fallthru = (tmp_edge != NULL);
402209ff 784
0fd4b31d 785 tmp_edge = find_fallthru_edge (b->preds);
402209ff 786 b_has_incoming_fallthru = (tmp_edge != NULL);
4262e623 787 b_fallthru_edge = tmp_edge;
ec3ae3da 788 next = b->prev_bb;
912b79e7
JH
789 if (next == c)
790 next = next->prev_bb;
4262e623
JH
791
792 /* Otherwise, we're going to try to move C after B. If C does
793 not have an outgoing fallthru, then it can be moved
794 immediately after B without introducing or modifying jumps. */
795 if (! c_has_outgoing_fallthru)
796 {
797 merge_blocks_move_successor_nojumps (b, c);
fefa31b5 798 return next == ENTRY_BLOCK_PTR_FOR_FN (cfun) ? next->next_bb : next;
4262e623 799 }
402209ff
JH
800
801 /* If B does not have an incoming fallthru, then it can be moved
802 immediately before C without introducing or modifying jumps.
803 C cannot be the first block, so we do not have to worry about
804 accessing a non-existent block. */
402209ff 805
4262e623
JH
806 if (b_has_incoming_fallthru)
807 {
473fb060 808 basic_block bb;
5f0d2358 809
fefa31b5 810 if (b_fallthru_edge->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
ec3ae3da 811 return NULL;
7dddfb65
JH
812 bb = force_nonfallthru (b_fallthru_edge);
813 if (bb)
814 notice_new_block (bb);
4262e623 815 }
5f0d2358 816
4262e623 817 merge_blocks_move_predecessor_nojumps (b, c);
fefa31b5 818 return next == ENTRY_BLOCK_PTR_FOR_FN (cfun) ? next->next_bb : next;
402209ff 819 }
5f0d2358 820
10d6c0d0 821 return NULL;
402209ff
JH
822}
823\f
c2fc5456
R
824
825/* Removes the memory attributes of MEM expression
826 if they are not equal. */
827
893479de 828static void
c2fc5456
R
829merge_memattrs (rtx x, rtx y)
830{
831 int i;
832 int j;
833 enum rtx_code code;
834 const char *fmt;
835
836 if (x == y)
837 return;
838 if (x == 0 || y == 0)
839 return;
840
841 code = GET_CODE (x);
842
843 if (code != GET_CODE (y))
844 return;
845
846 if (GET_MODE (x) != GET_MODE (y))
847 return;
848
96b3c03f 849 if (code == MEM && !mem_attrs_eq_p (MEM_ATTRS (x), MEM_ATTRS (y)))
c2fc5456
R
850 {
851 if (! MEM_ATTRS (x))
852 MEM_ATTRS (y) = 0;
853 else if (! MEM_ATTRS (y))
854 MEM_ATTRS (x) = 0;
c22cacf3 855 else
c2fc5456 856 {
c2fc5456
R
857 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
858 {
859 set_mem_alias_set (x, 0);
860 set_mem_alias_set (y, 0);
861 }
c22cacf3 862
c2fc5456
R
863 if (! mem_expr_equal_p (MEM_EXPR (x), MEM_EXPR (y)))
864 {
865 set_mem_expr (x, 0);
866 set_mem_expr (y, 0);
527210c4
RS
867 clear_mem_offset (x);
868 clear_mem_offset (y);
c2fc5456 869 }
527210c4
RS
870 else if (MEM_OFFSET_KNOWN_P (x) != MEM_OFFSET_KNOWN_P (y)
871 || (MEM_OFFSET_KNOWN_P (x)
d05d7551 872 && maybe_ne (MEM_OFFSET (x), MEM_OFFSET (y))))
c2fc5456 873 {
527210c4
RS
874 clear_mem_offset (x);
875 clear_mem_offset (y);
c2fc5456 876 }
c22cacf3 877
d05d7551
RS
878 if (!MEM_SIZE_KNOWN_P (x))
879 clear_mem_size (y);
880 else if (!MEM_SIZE_KNOWN_P (y))
881 clear_mem_size (x);
882 else if (known_le (MEM_SIZE (x), MEM_SIZE (y)))
883 set_mem_size (x, MEM_SIZE (y));
884 else if (known_le (MEM_SIZE (y), MEM_SIZE (x)))
885 set_mem_size (y, MEM_SIZE (x));
c2fc5456 886 else
f5541398 887 {
d05d7551 888 /* The sizes aren't ordered, so we can't merge them. */
f5541398
RS
889 clear_mem_size (x);
890 clear_mem_size (y);
891 }
c2fc5456
R
892
893 set_mem_align (x, MIN (MEM_ALIGN (x), MEM_ALIGN (y)));
894 set_mem_align (y, MEM_ALIGN (x));
895 }
896 }
84cf4ab6
JJ
897 if (code == MEM)
898 {
899 if (MEM_READONLY_P (x) != MEM_READONLY_P (y))
900 {
901 MEM_READONLY_P (x) = 0;
902 MEM_READONLY_P (y) = 0;
903 }
904 if (MEM_NOTRAP_P (x) != MEM_NOTRAP_P (y))
905 {
906 MEM_NOTRAP_P (x) = 0;
907 MEM_NOTRAP_P (y) = 0;
908 }
909 if (MEM_VOLATILE_P (x) != MEM_VOLATILE_P (y))
910 {
911 MEM_VOLATILE_P (x) = 1;
912 MEM_VOLATILE_P (y) = 1;
913 }
914 }
c22cacf3 915
c2fc5456
R
916 fmt = GET_RTX_FORMAT (code);
917 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
918 {
919 switch (fmt[i])
920 {
921 case 'E':
922 /* Two vectors must have the same length. */
923 if (XVECLEN (x, i) != XVECLEN (y, i))
924 return;
925
926 for (j = 0; j < XVECLEN (x, i); j++)
927 merge_memattrs (XVECEXP (x, i, j), XVECEXP (y, i, j));
928
929 break;
930
931 case 'e':
932 merge_memattrs (XEXP (x, i), XEXP (y, i));
933 }
934 }
935 return;
936}
937
938
472c95f5
TV
939 /* Checks if patterns P1 and P2 are equivalent, apart from the possibly
940 different single sets S1 and S2. */
c2fc5456
R
941
942static bool
472c95f5
TV
943equal_different_set_p (rtx p1, rtx s1, rtx p2, rtx s2)
944{
945 int i;
946 rtx e1, e2;
947
948 if (p1 == s1 && p2 == s2)
949 return true;
950
951 if (GET_CODE (p1) != PARALLEL || GET_CODE (p2) != PARALLEL)
952 return false;
953
954 if (XVECLEN (p1, 0) != XVECLEN (p2, 0))
955 return false;
956
957 for (i = 0; i < XVECLEN (p1, 0); i++)
958 {
959 e1 = XVECEXP (p1, 0, i);
960 e2 = XVECEXP (p2, 0, i);
961 if (e1 == s1 && e2 == s2)
962 continue;
963 if (reload_completed
964 ? rtx_renumbered_equal_p (e1, e2) : rtx_equal_p (e1, e2))
965 continue;
966
21c0a521 967 return false;
472c95f5
TV
968 }
969
970 return true;
971}
972
3e87f2d6
SC
973
974/* NOTE1 is the REG_EQUAL note, if any, attached to an insn
975 that is a single_set with a SET_SRC of SRC1. Similarly
976 for NOTE2/SRC2.
977
978 So effectively NOTE1/NOTE2 are an alternate form of
979 SRC1/SRC2 respectively.
980
981 Return nonzero if SRC1 or NOTE1 has the same constant
982 integer value as SRC2 or NOTE2. Else return zero. */
983static int
984values_equal_p (rtx note1, rtx note2, rtx src1, rtx src2)
985{
986 if (note1
987 && note2
988 && CONST_INT_P (XEXP (note1, 0))
989 && rtx_equal_p (XEXP (note1, 0), XEXP (note2, 0)))
990 return 1;
991
992 if (!note1
993 && !note2
994 && CONST_INT_P (src1)
995 && CONST_INT_P (src2)
996 && rtx_equal_p (src1, src2))
997 return 1;
998
999 if (note1
1000 && CONST_INT_P (src2)
1001 && rtx_equal_p (XEXP (note1, 0), src2))
1002 return 1;
1003
1004 if (note2
1005 && CONST_INT_P (src1)
1006 && rtx_equal_p (XEXP (note2, 0), src1))
1007 return 1;
1008
1009 return 0;
1010}
1011
472c95f5
TV
1012/* Examine register notes on I1 and I2 and return:
1013 - dir_forward if I1 can be replaced by I2, or
1014 - dir_backward if I2 can be replaced by I1, or
1015 - dir_both if both are the case. */
1016
1017static enum replace_direction
da5477a9 1018can_replace_by (rtx_insn *i1, rtx_insn *i2)
472c95f5
TV
1019{
1020 rtx s1, s2, d1, d2, src1, src2, note1, note2;
1021 bool c1, c2;
1022
1023 /* Check for 2 sets. */
1024 s1 = single_set (i1);
1025 s2 = single_set (i2);
1026 if (s1 == NULL_RTX || s2 == NULL_RTX)
1027 return dir_none;
1028
1029 /* Check that the 2 sets set the same dest. */
1030 d1 = SET_DEST (s1);
1031 d2 = SET_DEST (s2);
1032 if (!(reload_completed
1033 ? rtx_renumbered_equal_p (d1, d2) : rtx_equal_p (d1, d2)))
1034 return dir_none;
1035
1036 /* Find identical req_equiv or reg_equal note, which implies that the 2 sets
1037 set dest to the same value. */
1038 note1 = find_reg_equal_equiv_note (i1);
1039 note2 = find_reg_equal_equiv_note (i2);
3e87f2d6
SC
1040
1041 src1 = SET_SRC (s1);
1042 src2 = SET_SRC (s2);
1043
1044 if (!values_equal_p (note1, note2, src1, src2))
472c95f5
TV
1045 return dir_none;
1046
1047 if (!equal_different_set_p (PATTERN (i1), s1, PATTERN (i2), s2))
1048 return dir_none;
1049
1050 /* Although the 2 sets set dest to the same value, we cannot replace
1051 (set (dest) (const_int))
1052 by
1053 (set (dest) (reg))
1054 because we don't know if the reg is live and has the same value at the
1055 location of replacement. */
472c95f5
TV
1056 c1 = CONST_INT_P (src1);
1057 c2 = CONST_INT_P (src2);
1058 if (c1 && c2)
1059 return dir_both;
1060 else if (c2)
1061 return dir_forward;
1062 else if (c1)
1063 return dir_backward;
1064
1065 return dir_none;
1066}
1067
1068/* Merges directions A and B. */
1069
1070static enum replace_direction
1071merge_dir (enum replace_direction a, enum replace_direction b)
1072{
1073 /* Implements the following table:
1074 |bo fw bw no
1075 ---+-----------
1076 bo |bo fw bw no
1077 fw |-- fw no no
1078 bw |-- -- bw no
1079 no |-- -- -- no. */
1080
1081 if (a == b)
1082 return a;
1083
1084 if (a == dir_both)
1085 return b;
1086 if (b == dir_both)
1087 return a;
1088
1089 return dir_none;
1090}
1091
aade772d
JJ
1092/* Array of flags indexed by reg note kind, true if the given
1093 reg note is CFA related. */
1094static const bool reg_note_cfa_p[] = {
1095#undef REG_CFA_NOTE
1096#define DEF_REG_NOTE(NAME) false,
1097#define REG_CFA_NOTE(NAME) true,
1098#include "reg-notes.def"
1099#undef REG_CFA_NOTE
1100#undef DEF_REG_NOTE
1101 false
1102};
1103
1104/* Return true if I1 and I2 have identical CFA notes (the same order
1105 and equivalent content). */
1106
1107static bool
1108insns_have_identical_cfa_notes (rtx_insn *i1, rtx_insn *i2)
1109{
1110 rtx n1, n2;
1111 for (n1 = REG_NOTES (i1), n2 = REG_NOTES (i2); ;
1112 n1 = XEXP (n1, 1), n2 = XEXP (n2, 1))
1113 {
1114 /* Skip over reg notes not related to CFI information. */
1115 while (n1 && !reg_note_cfa_p[REG_NOTE_KIND (n1)])
1116 n1 = XEXP (n1, 1);
1117 while (n2 && !reg_note_cfa_p[REG_NOTE_KIND (n2)])
1118 n2 = XEXP (n2, 1);
1119 if (n1 == NULL_RTX && n2 == NULL_RTX)
1120 return true;
1121 if (n1 == NULL_RTX || n2 == NULL_RTX)
1122 return false;
1123 if (XEXP (n1, 0) == XEXP (n2, 0))
1124 ;
1125 else if (XEXP (n1, 0) == NULL_RTX || XEXP (n2, 0) == NULL_RTX)
1126 return false;
1127 else if (!(reload_completed
1128 ? rtx_renumbered_equal_p (XEXP (n1, 0), XEXP (n2, 0))
1129 : rtx_equal_p (XEXP (n1, 0), XEXP (n2, 0))))
1130 return false;
1131 }
1132}
1133
472c95f5
TV
1134/* Examine I1 and I2 and return:
1135 - dir_forward if I1 can be replaced by I2, or
1136 - dir_backward if I2 can be replaced by I1, or
1137 - dir_both if both are the case. */
1138
1139static enum replace_direction
da5477a9 1140old_insns_match_p (int mode ATTRIBUTE_UNUSED, rtx_insn *i1, rtx_insn *i2)
c2fc5456
R
1141{
1142 rtx p1, p2;
1143
1144 /* Verify that I1 and I2 are equivalent. */
1145 if (GET_CODE (i1) != GET_CODE (i2))
472c95f5 1146 return dir_none;
c2fc5456 1147
ba21aba3
DD
1148 /* __builtin_unreachable() may lead to empty blocks (ending with
1149 NOTE_INSN_BASIC_BLOCK). They may be crossjumped. */
1150 if (NOTE_INSN_BASIC_BLOCK_P (i1) && NOTE_INSN_BASIC_BLOCK_P (i2))
472c95f5 1151 return dir_both;
ba21aba3 1152
9a08d230
RH
1153 /* ??? Do not allow cross-jumping between different stack levels. */
1154 p1 = find_reg_note (i1, REG_ARGS_SIZE, NULL);
1155 p2 = find_reg_note (i2, REG_ARGS_SIZE, NULL);
42aa5124
RH
1156 if (p1 && p2)
1157 {
1158 p1 = XEXP (p1, 0);
1159 p2 = XEXP (p2, 0);
1160 if (!rtx_equal_p (p1, p2))
1161 return dir_none;
1162
1163 /* ??? Worse, this adjustment had better be constant lest we
1164 have differing incoming stack levels. */
1165 if (!frame_pointer_needed
68184180 1166 && known_eq (find_args_size_adjust (i1), HOST_WIDE_INT_MIN))
42aa5124
RH
1167 return dir_none;
1168 }
1169 else if (p1 || p2)
9a08d230
RH
1170 return dir_none;
1171
aade772d
JJ
1172 /* Do not allow cross-jumping between frame related insns and other
1173 insns. */
1174 if (RTX_FRAME_RELATED_P (i1) != RTX_FRAME_RELATED_P (i2))
1175 return dir_none;
1176
7752e522 1177 p1 = PATTERN (i1);
c2fc5456
R
1178 p2 = PATTERN (i2);
1179
1180 if (GET_CODE (p1) != GET_CODE (p2))
472c95f5 1181 return dir_none;
c2fc5456
R
1182
1183 /* If this is a CALL_INSN, compare register usage information.
1184 If we don't check this on stack register machines, the two
1185 CALL_INSNs might be merged leaving reg-stack.c with mismatching
1186 numbers of stack registers in the same basic block.
1187 If we don't check this on machines with delay slots, a delay slot may
1188 be filled that clobbers a parameter expected by the subroutine.
1189
1190 ??? We take the simple route for now and assume that if they're
31ce8a53 1191 equal, they were constructed identically.
c2fc5456 1192
31ce8a53
BS
1193 Also check for identical exception regions. */
1194
1195 if (CALL_P (i1))
1196 {
1197 /* Ensure the same EH region. */
1198 rtx n1 = find_reg_note (i1, REG_EH_REGION, 0);
1199 rtx n2 = find_reg_note (i2, REG_EH_REGION, 0);
1200
1201 if (!n1 && n2)
472c95f5 1202 return dir_none;
31ce8a53
BS
1203
1204 if (n1 && (!n2 || XEXP (n1, 0) != XEXP (n2, 0)))
472c95f5 1205 return dir_none;
31ce8a53
BS
1206
1207 if (!rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
c22cacf3 1208 CALL_INSN_FUNCTION_USAGE (i2))
31ce8a53 1209 || SIBLING_CALL_P (i1) != SIBLING_CALL_P (i2))
472c95f5 1210 return dir_none;
68a9738a
JJ
1211
1212 /* For address sanitizer, never crossjump __asan_report_* builtins,
1213 otherwise errors might be reported on incorrect lines. */
de5a5fa1 1214 if (flag_sanitize & SANITIZE_ADDRESS)
68a9738a
JJ
1215 {
1216 rtx call = get_call_rtx_from (i1);
1217 if (call && GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
1218 {
1219 rtx symbol = XEXP (XEXP (call, 0), 0);
1220 if (SYMBOL_REF_DECL (symbol)
1221 && TREE_CODE (SYMBOL_REF_DECL (symbol)) == FUNCTION_DECL)
1222 {
1223 if ((DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (symbol))
1224 == BUILT_IN_NORMAL)
1225 && DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
1226 >= BUILT_IN_ASAN_REPORT_LOAD1
1227 && DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
8946c29e 1228 <= BUILT_IN_ASAN_STOREN)
68a9738a
JJ
1229 return dir_none;
1230 }
1231 }
1232 }
4b1a1d98 1233
01699686 1234 if (insn_callee_abi (i1) != insn_callee_abi (i2))
4b1a1d98 1235 return dir_none;
31ce8a53 1236 }
c2fc5456 1237
aade772d
JJ
1238 /* If both i1 and i2 are frame related, verify all the CFA notes
1239 in the same order and with the same content. */
1240 if (RTX_FRAME_RELATED_P (i1) && !insns_have_identical_cfa_notes (i1, i2))
1241 return dir_none;
1242
c2fc5456
R
1243#ifdef STACK_REGS
1244 /* If cross_jump_death_matters is not 0, the insn's mode
1245 indicates whether or not the insn contains any stack-like
1246 regs. */
1247
1248 if ((mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
1249 {
1250 /* If register stack conversion has already been done, then
c22cacf3
MS
1251 death notes must also be compared before it is certain that
1252 the two instruction streams match. */
c2fc5456
R
1253
1254 rtx note;
1255 HARD_REG_SET i1_regset, i2_regset;
1256
1257 CLEAR_HARD_REG_SET (i1_regset);
1258 CLEAR_HARD_REG_SET (i2_regset);
1259
1260 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
1261 if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
1262 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
1263
1264 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
1265 if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
1266 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
1267
a8579651 1268 if (i1_regset != i2_regset)
472c95f5 1269 return dir_none;
c2fc5456
R
1270 }
1271#endif
1272
1273 if (reload_completed
1274 ? rtx_renumbered_equal_p (p1, p2) : rtx_equal_p (p1, p2))
472c95f5 1275 return dir_both;
c2fc5456 1276
472c95f5 1277 return can_replace_by (i1, i2);
c2fc5456
R
1278}
1279\f
31ce8a53
BS
1280/* When comparing insns I1 and I2 in flow_find_cross_jump or
1281 flow_find_head_matching_sequence, ensure the notes match. */
1282
1283static void
da5477a9 1284merge_notes (rtx_insn *i1, rtx_insn *i2)
31ce8a53
BS
1285{
1286 /* If the merged insns have different REG_EQUAL notes, then
1287 remove them. */
1288 rtx equiv1 = find_reg_equal_equiv_note (i1);
1289 rtx equiv2 = find_reg_equal_equiv_note (i2);
1290
1291 if (equiv1 && !equiv2)
1292 remove_note (i1, equiv1);
1293 else if (!equiv1 && equiv2)
1294 remove_note (i2, equiv2);
1295 else if (equiv1 && equiv2
1296 && !rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
1297 {
1298 remove_note (i1, equiv1);
1299 remove_note (i2, equiv2);
1300 }
1301}
1302
823918ae
TV
1303 /* Walks from I1 in BB1 backward till the next non-debug insn, and returns the
1304 resulting insn in I1, and the corresponding bb in BB1. At the head of a
1305 bb, if there is a predecessor bb that reaches this bb via fallthru, and
1306 FOLLOW_FALLTHRU, walks further in the predecessor bb and registers this in
1307 DID_FALLTHRU. Otherwise, stops at the head of the bb. */
1308
1309static void
da5477a9 1310walk_to_nondebug_insn (rtx_insn **i1, basic_block *bb1, bool follow_fallthru,
823918ae
TV
1311 bool *did_fallthru)
1312{
1313 edge fallthru;
1314
1315 *did_fallthru = false;
1316
1317 /* Ignore notes. */
1318 while (!NONDEBUG_INSN_P (*i1))
1319 {
1320 if (*i1 != BB_HEAD (*bb1))
1321 {
1322 *i1 = PREV_INSN (*i1);
1323 continue;
1324 }
1325
1326 if (!follow_fallthru)
1327 return;
1328
1329 fallthru = find_fallthru_edge ((*bb1)->preds);
fefa31b5 1330 if (!fallthru || fallthru->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
823918ae
TV
1331 || !single_succ_p (fallthru->src))
1332 return;
1333
1334 *bb1 = fallthru->src;
1335 *i1 = BB_END (*bb1);
1336 *did_fallthru = true;
1337 }
1338}
1339
c2fc5456 1340/* Look through the insns at the end of BB1 and BB2 and find the longest
472c95f5
TV
1341 sequence that are either equivalent, or allow forward or backward
1342 replacement. Store the first insns for that sequence in *F1 and *F2 and
1343 return the sequence length.
1344
1345 DIR_P indicates the allowed replacement direction on function entry, and
1346 the actual replacement direction on function exit. If NULL, only equivalent
1347 sequences are allowed.
c2fc5456
R
1348
1349 To simplify callers of this function, if the blocks match exactly,
1350 store the head of the blocks in *F1 and *F2. */
1351
31ce8a53 1352int
da5477a9
DM
1353flow_find_cross_jump (basic_block bb1, basic_block bb2, rtx_insn **f1,
1354 rtx_insn **f2, enum replace_direction *dir_p)
c2fc5456 1355{
da5477a9 1356 rtx_insn *i1, *i2, *last1, *last2, *afterlast1, *afterlast2;
c2fc5456 1357 int ninsns = 0;
472c95f5 1358 enum replace_direction dir, last_dir, afterlast_dir;
823918ae 1359 bool follow_fallthru, did_fallthru;
472c95f5
TV
1360
1361 if (dir_p)
1362 dir = *dir_p;
1363 else
1364 dir = dir_both;
1365 afterlast_dir = dir;
1366 last_dir = afterlast_dir;
c2fc5456
R
1367
1368 /* Skip simple jumps at the end of the blocks. Complex jumps still
1369 need to be compared for equivalence, which we'll do below. */
1370
1371 i1 = BB_END (bb1);
da5477a9 1372 last1 = afterlast1 = last2 = afterlast2 = NULL;
c2fc5456
R
1373 if (onlyjump_p (i1)
1374 || (returnjump_p (i1) && !side_effects_p (PATTERN (i1))))
1375 {
1376 last1 = i1;
1377 i1 = PREV_INSN (i1);
1378 }
1379
1380 i2 = BB_END (bb2);
1381 if (onlyjump_p (i2)
1382 || (returnjump_p (i2) && !side_effects_p (PATTERN (i2))))
1383 {
1384 last2 = i2;
a0cbe71e
JJ
1385 /* Count everything except for unconditional jump as insn.
1386 Don't count any jumps if dir_p is NULL. */
1387 if (!simplejump_p (i2) && !returnjump_p (i2) && last1 && dir_p)
c2fc5456
R
1388 ninsns++;
1389 i2 = PREV_INSN (i2);
1390 }
1391
1392 while (true)
1393 {
823918ae
TV
1394 /* In the following example, we can replace all jumps to C by jumps to A.
1395
1396 This removes 4 duplicate insns.
1397 [bb A] insn1 [bb C] insn1
1398 insn2 insn2
1399 [bb B] insn3 insn3
1400 insn4 insn4
1401 jump_insn jump_insn
1402
1403 We could also replace all jumps to A by jumps to C, but that leaves B
1404 alive, and removes only 2 duplicate insns. In a subsequent crossjump
1405 step, all jumps to B would be replaced with jumps to the middle of C,
1406 achieving the same result with more effort.
1407 So we allow only the first possibility, which means that we don't allow
1408 fallthru in the block that's being replaced. */
1409
1410 follow_fallthru = dir_p && dir != dir_forward;
1411 walk_to_nondebug_insn (&i1, &bb1, follow_fallthru, &did_fallthru);
1412 if (did_fallthru)
1413 dir = dir_backward;
1414
1415 follow_fallthru = dir_p && dir != dir_backward;
1416 walk_to_nondebug_insn (&i2, &bb2, follow_fallthru, &did_fallthru);
1417 if (did_fallthru)
1418 dir = dir_forward;
c2fc5456
R
1419
1420 if (i1 == BB_HEAD (bb1) || i2 == BB_HEAD (bb2))
1421 break;
1422
ba61fc53
JH
1423 /* Do not turn corssing edge to non-crossing or vice versa after
1424 reload. */
1425 if (BB_PARTITION (BLOCK_FOR_INSN (i1))
1426 != BB_PARTITION (BLOCK_FOR_INSN (i2))
1427 && reload_completed)
1428 break;
1429
472c95f5
TV
1430 dir = merge_dir (dir, old_insns_match_p (0, i1, i2));
1431 if (dir == dir_none || (!dir_p && dir != dir_both))
c2fc5456
R
1432 break;
1433
1434 merge_memattrs (i1, i2);
1435
1436 /* Don't begin a cross-jump with a NOTE insn. */
1437 if (INSN_P (i1))
1438 {
31ce8a53 1439 merge_notes (i1, i2);
c2fc5456
R
1440
1441 afterlast1 = last1, afterlast2 = last2;
1442 last1 = i1, last2 = i2;
472c95f5
TV
1443 afterlast_dir = last_dir;
1444 last_dir = dir;
a0cbe71e 1445 if (active_insn_p (i1))
2a562b0a 1446 ninsns++;
c2fc5456
R
1447 }
1448
1449 i1 = PREV_INSN (i1);
1450 i2 = PREV_INSN (i2);
1451 }
1452
c2fc5456
R
1453 /* Don't allow the insn after a compare to be shared by
1454 cross-jumping unless the compare is also shared. */
618f4073
TS
1455 if (HAVE_cc0 && ninsns && reg_mentioned_p (cc0_rtx, last1)
1456 && ! sets_cc0_p (last1))
472c95f5 1457 last1 = afterlast1, last2 = afterlast2, last_dir = afterlast_dir, ninsns--;
c2fc5456
R
1458
1459 /* Include preceding notes and labels in the cross-jump. One,
1460 this may bring us to the head of the blocks as requested above.
1461 Two, it keeps line number notes as matched as may be. */
1462 if (ninsns)
1463 {
823918ae 1464 bb1 = BLOCK_FOR_INSN (last1);
b5b8b0ac 1465 while (last1 != BB_HEAD (bb1) && !NONDEBUG_INSN_P (PREV_INSN (last1)))
c2fc5456
R
1466 last1 = PREV_INSN (last1);
1467
1468 if (last1 != BB_HEAD (bb1) && LABEL_P (PREV_INSN (last1)))
1469 last1 = PREV_INSN (last1);
1470
823918ae 1471 bb2 = BLOCK_FOR_INSN (last2);
b5b8b0ac 1472 while (last2 != BB_HEAD (bb2) && !NONDEBUG_INSN_P (PREV_INSN (last2)))
c2fc5456
R
1473 last2 = PREV_INSN (last2);
1474
1475 if (last2 != BB_HEAD (bb2) && LABEL_P (PREV_INSN (last2)))
1476 last2 = PREV_INSN (last2);
1477
1478 *f1 = last1;
1479 *f2 = last2;
1480 }
1481
472c95f5
TV
1482 if (dir_p)
1483 *dir_p = last_dir;
c2fc5456
R
1484 return ninsns;
1485}
1486
31ce8a53
BS
1487/* Like flow_find_cross_jump, except start looking for a matching sequence from
1488 the head of the two blocks. Do not include jumps at the end.
1489 If STOP_AFTER is nonzero, stop after finding that many matching
b59e0455
JJ
1490 instructions. If STOP_AFTER is zero, count all INSN_P insns, if it is
1491 non-zero, only count active insns. */
31ce8a53
BS
1492
1493int
da5477a9
DM
1494flow_find_head_matching_sequence (basic_block bb1, basic_block bb2, rtx_insn **f1,
1495 rtx_insn **f2, int stop_after)
31ce8a53 1496{
da5477a9 1497 rtx_insn *i1, *i2, *last1, *last2, *beforelast1, *beforelast2;
31ce8a53
BS
1498 int ninsns = 0;
1499 edge e;
1500 edge_iterator ei;
1501 int nehedges1 = 0, nehedges2 = 0;
1502
1503 FOR_EACH_EDGE (e, ei, bb1->succs)
1504 if (e->flags & EDGE_EH)
1505 nehedges1++;
1506 FOR_EACH_EDGE (e, ei, bb2->succs)
1507 if (e->flags & EDGE_EH)
1508 nehedges2++;
1509
1510 i1 = BB_HEAD (bb1);
1511 i2 = BB_HEAD (bb2);
da5477a9 1512 last1 = beforelast1 = last2 = beforelast2 = NULL;
31ce8a53
BS
1513
1514 while (true)
1515 {
4ec5d4f5 1516 /* Ignore notes, except NOTE_INSN_EPILOGUE_BEG. */
31ce8a53 1517 while (!NONDEBUG_INSN_P (i1) && i1 != BB_END (bb1))
4ec5d4f5
BS
1518 {
1519 if (NOTE_P (i1) && NOTE_KIND (i1) == NOTE_INSN_EPILOGUE_BEG)
1520 break;
1521 i1 = NEXT_INSN (i1);
1522 }
31ce8a53
BS
1523
1524 while (!NONDEBUG_INSN_P (i2) && i2 != BB_END (bb2))
4ec5d4f5
BS
1525 {
1526 if (NOTE_P (i2) && NOTE_KIND (i2) == NOTE_INSN_EPILOGUE_BEG)
1527 break;
1528 i2 = NEXT_INSN (i2);
1529 }
31ce8a53 1530
662592e1
BS
1531 if ((i1 == BB_END (bb1) && !NONDEBUG_INSN_P (i1))
1532 || (i2 == BB_END (bb2) && !NONDEBUG_INSN_P (i2)))
1533 break;
1534
31ce8a53
BS
1535 if (NOTE_P (i1) || NOTE_P (i2)
1536 || JUMP_P (i1) || JUMP_P (i2))
1537 break;
1538
1539 /* A sanity check to make sure we're not merging insns with different
1540 effects on EH. If only one of them ends a basic block, it shouldn't
1541 have an EH edge; if both end a basic block, there should be the same
1542 number of EH edges. */
1543 if ((i1 == BB_END (bb1) && i2 != BB_END (bb2)
1544 && nehedges1 > 0)
1545 || (i2 == BB_END (bb2) && i1 != BB_END (bb1)
1546 && nehedges2 > 0)
1547 || (i1 == BB_END (bb1) && i2 == BB_END (bb2)
1548 && nehedges1 != nehedges2))
1549 break;
1550
472c95f5 1551 if (old_insns_match_p (0, i1, i2) != dir_both)
31ce8a53
BS
1552 break;
1553
1554 merge_memattrs (i1, i2);
1555
1556 /* Don't begin a cross-jump with a NOTE insn. */
1557 if (INSN_P (i1))
1558 {
1559 merge_notes (i1, i2);
1560
1561 beforelast1 = last1, beforelast2 = last2;
1562 last1 = i1, last2 = i2;
b59e0455 1563 if (!stop_after || active_insn_p (i1))
a0cbe71e 1564 ninsns++;
31ce8a53
BS
1565 }
1566
1567 if (i1 == BB_END (bb1) || i2 == BB_END (bb2)
1568 || (stop_after > 0 && ninsns == stop_after))
1569 break;
1570
1571 i1 = NEXT_INSN (i1);
1572 i2 = NEXT_INSN (i2);
1573 }
1574
31ce8a53
BS
1575 /* Don't allow a compare to be shared by cross-jumping unless the insn
1576 after the compare is also shared. */
618f4073
TS
1577 if (HAVE_cc0 && ninsns && reg_mentioned_p (cc0_rtx, last1)
1578 && sets_cc0_p (last1))
31ce8a53 1579 last1 = beforelast1, last2 = beforelast2, ninsns--;
31ce8a53
BS
1580
1581 if (ninsns)
1582 {
1583 *f1 = last1;
1584 *f2 = last2;
1585 }
1586
1587 return ninsns;
1588}
1589
c2fc5456
R
1590/* Return true iff outgoing edges of BB1 and BB2 match, together with
1591 the branch instruction. This means that if we commonize the control
1592 flow before end of the basic block, the semantic remains unchanged.
402209ff
JH
1593
1594 We may assume that there exists one edge with a common destination. */
1595
1596static bool
c2fc5456 1597outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
402209ff 1598{
0dd0e980
JH
1599 int nehedges1 = 0, nehedges2 = 0;
1600 edge fallthru1 = 0, fallthru2 = 0;
1601 edge e1, e2;
628f6a4e 1602 edge_iterator ei;
0dd0e980 1603
6626665f 1604 /* If we performed shrink-wrapping, edges to the exit block can
484db665
BS
1605 only be distinguished for JUMP_INSNs. The two paths may differ in
1606 whether they went through the prologue. Sibcalls are fine, we know
1607 that we either didn't need or inserted an epilogue before them. */
1608 if (crtl->shrink_wrapped
fefa31b5
DM
1609 && single_succ_p (bb1)
1610 && single_succ (bb1) == EXIT_BLOCK_PTR_FOR_FN (cfun)
b532a785
JJ
1611 && (!JUMP_P (BB_END (bb1))
1612 /* Punt if the only successor is a fake edge to exit, the jump
1613 must be some weird one. */
1614 || (single_succ_edge (bb1)->flags & EDGE_FAKE) != 0)
484db665
BS
1615 && !(CALL_P (BB_END (bb1)) && SIBLING_CALL_P (BB_END (bb1))))
1616 return false;
b532a785 1617
c04cf67b
RH
1618 /* If BB1 has only one successor, we may be looking at either an
1619 unconditional jump, or a fake edge to exit. */
c5cbcccf
ZD
1620 if (single_succ_p (bb1)
1621 && (single_succ_edge (bb1)->flags & (EDGE_COMPLEX | EDGE_FAKE)) == 0
4b4bf941 1622 && (!JUMP_P (BB_END (bb1)) || simplejump_p (BB_END (bb1))))
c5cbcccf
ZD
1623 return (single_succ_p (bb2)
1624 && (single_succ_edge (bb2)->flags
1625 & (EDGE_COMPLEX | EDGE_FAKE)) == 0
4b4bf941 1626 && (!JUMP_P (BB_END (bb2)) || simplejump_p (BB_END (bb2))));
402209ff
JH
1627
1628 /* Match conditional jumps - this may get tricky when fallthru and branch
1629 edges are crossed. */
628f6a4e 1630 if (EDGE_COUNT (bb1->succs) == 2
a813c111
SB
1631 && any_condjump_p (BB_END (bb1))
1632 && onlyjump_p (BB_END (bb1)))
402209ff 1633 {
c2fc5456
R
1634 edge b1, f1, b2, f2;
1635 bool reverse, match;
1636 rtx set1, set2, cond1, cond2;
1637 enum rtx_code code1, code2;
1638
628f6a4e 1639 if (EDGE_COUNT (bb2->succs) != 2
a813c111
SB
1640 || !any_condjump_p (BB_END (bb2))
1641 || !onlyjump_p (BB_END (bb2)))
0a2ed1f1 1642 return false;
c2fc5456
R
1643
1644 b1 = BRANCH_EDGE (bb1);
1645 b2 = BRANCH_EDGE (bb2);
1646 f1 = FALLTHRU_EDGE (bb1);
1647 f2 = FALLTHRU_EDGE (bb2);
1648
1649 /* Get around possible forwarders on fallthru edges. Other cases
c22cacf3 1650 should be optimized out already. */
c2fc5456
R
1651 if (FORWARDER_BLOCK_P (f1->dest))
1652 f1 = single_succ_edge (f1->dest);
1653
1654 if (FORWARDER_BLOCK_P (f2->dest))
1655 f2 = single_succ_edge (f2->dest);
1656
1657 /* To simplify use of this function, return false if there are
1658 unneeded forwarder blocks. These will get eliminated later
1659 during cleanup_cfg. */
1660 if (FORWARDER_BLOCK_P (f1->dest)
1661 || FORWARDER_BLOCK_P (f2->dest)
1662 || FORWARDER_BLOCK_P (b1->dest)
1663 || FORWARDER_BLOCK_P (b2->dest))
1664 return false;
1665
1666 if (f1->dest == f2->dest && b1->dest == b2->dest)
1667 reverse = false;
1668 else if (f1->dest == b2->dest && b1->dest == f2->dest)
1669 reverse = true;
1670 else
1671 return false;
1672
1673 set1 = pc_set (BB_END (bb1));
1674 set2 = pc_set (BB_END (bb2));
1675 if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
1676 != (XEXP (SET_SRC (set2), 1) == pc_rtx))
1677 reverse = !reverse;
1678
1679 cond1 = XEXP (SET_SRC (set1), 0);
1680 cond2 = XEXP (SET_SRC (set2), 0);
1681 code1 = GET_CODE (cond1);
1682 if (reverse)
1683 code2 = reversed_comparison_code (cond2, BB_END (bb2));
1684 else
1685 code2 = GET_CODE (cond2);
1686
1687 if (code2 == UNKNOWN)
1688 return false;
1689
1690 /* Verify codes and operands match. */
1691 match = ((code1 == code2
1692 && rtx_renumbered_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
1693 && rtx_renumbered_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
1694 || (code1 == swap_condition (code2)
1695 && rtx_renumbered_equal_p (XEXP (cond1, 1),
1696 XEXP (cond2, 0))
1697 && rtx_renumbered_equal_p (XEXP (cond1, 0),
1698 XEXP (cond2, 1))));
1699
1700 /* If we return true, we will join the blocks. Which means that
1701 we will only have one branch prediction bit to work with. Thus
1702 we require the existing branches to have probabilities that are
1703 roughly similar. */
1704 if (match
efd8f750
JH
1705 && optimize_bb_for_speed_p (bb1)
1706 && optimize_bb_for_speed_p (bb2))
c2fc5456 1707 {
357067f2 1708 profile_probability prob2;
c2fc5456
R
1709
1710 if (b1->dest == b2->dest)
1711 prob2 = b2->probability;
1712 else
1713 /* Do not use f2 probability as f2 may be forwarded. */
357067f2 1714 prob2 = b2->probability.invert ();
c2fc5456
R
1715
1716 /* Fail if the difference in probabilities is greater than 50%.
1717 This rules out two well-predicted branches with opposite
1718 outcomes. */
357067f2 1719 if (b1->probability.differs_lot_from_p (prob2))
c2fc5456
R
1720 {
1721 if (dump_file)
357067f2
JH
1722 {
1723 fprintf (dump_file,
1724 "Outcomes of branch in bb %i and %i differ too"
1725 " much (", bb1->index, bb2->index);
1726 b1->probability.dump (dump_file);
1727 prob2.dump (dump_file);
1728 fprintf (dump_file, ")\n");
1729 }
c2fc5456
R
1730 return false;
1731 }
1732 }
1733
1734 if (dump_file && match)
1735 fprintf (dump_file, "Conditionals in bb %i and %i match.\n",
1736 bb1->index, bb2->index);
1737
1738 return match;
402209ff
JH
1739 }
1740
09da1532 1741 /* Generic case - we are seeing a computed jump, table jump or trapping
0dd0e980
JH
1742 instruction. */
1743
39811184
JZ
1744 /* Check whether there are tablejumps in the end of BB1 and BB2.
1745 Return true if they are identical. */
1746 {
dfe08bc4 1747 rtx_insn *label1, *label2;
8942ee0f 1748 rtx_jump_table_data *table1, *table2;
39811184 1749
a813c111
SB
1750 if (tablejump_p (BB_END (bb1), &label1, &table1)
1751 && tablejump_p (BB_END (bb2), &label2, &table2)
39811184
JZ
1752 && GET_CODE (PATTERN (table1)) == GET_CODE (PATTERN (table2)))
1753 {
1754 /* The labels should never be the same rtx. If they really are same
1755 the jump tables are same too. So disable crossjumping of blocks BB1
1756 and BB2 because when deleting the common insns in the end of BB1
6de9cd9a 1757 by delete_basic_block () the jump table would be deleted too. */
4af16369 1758 /* If LABEL2 is referenced in BB1->END do not do anything
39811184
JZ
1759 because we would loose information when replacing
1760 LABEL1 by LABEL2 and then LABEL2 by LABEL1 in BB1->END. */
a813c111 1761 if (label1 != label2 && !rtx_referenced_p (label2, BB_END (bb1)))
39811184
JZ
1762 {
1763 /* Set IDENTICAL to true when the tables are identical. */
1764 bool identical = false;
1765 rtx p1, p2;
1766
1767 p1 = PATTERN (table1);
1768 p2 = PATTERN (table2);
1769 if (GET_CODE (p1) == ADDR_VEC && rtx_equal_p (p1, p2))
1770 {
1771 identical = true;
1772 }
1773 else if (GET_CODE (p1) == ADDR_DIFF_VEC
1774 && (XVECLEN (p1, 1) == XVECLEN (p2, 1))
1775 && rtx_equal_p (XEXP (p1, 2), XEXP (p2, 2))
1776 && rtx_equal_p (XEXP (p1, 3), XEXP (p2, 3)))
1777 {
1778 int i;
1779
1780 identical = true;
1781 for (i = XVECLEN (p1, 1) - 1; i >= 0 && identical; i--)
1782 if (!rtx_equal_p (XVECEXP (p1, 1, i), XVECEXP (p2, 1, i)))
1783 identical = false;
1784 }
1785
c2fc5456 1786 if (identical)
39811184 1787 {
39811184
JZ
1788 bool match;
1789
c2fc5456 1790 /* Temporarily replace references to LABEL1 with LABEL2
39811184 1791 in BB1->END so that we could compare the instructions. */
a2b7026c 1792 replace_label_in_insn (BB_END (bb1), label1, label2, false);
39811184 1793
472c95f5
TV
1794 match = (old_insns_match_p (mode, BB_END (bb1), BB_END (bb2))
1795 == dir_both);
c263766c
RH
1796 if (dump_file && match)
1797 fprintf (dump_file,
39811184
JZ
1798 "Tablejumps in bb %i and %i match.\n",
1799 bb1->index, bb2->index);
1800
c2fc5456
R
1801 /* Set the original label in BB1->END because when deleting
1802 a block whose end is a tablejump, the tablejump referenced
1803 from the instruction is deleted too. */
a2b7026c 1804 replace_label_in_insn (BB_END (bb1), label2, label1, false);
c2fc5456 1805
39811184
JZ
1806 return match;
1807 }
1808 }
1809 return false;
1810 }
1811 }
39811184 1812
d41d6122
TJ
1813 /* Find the last non-debug non-note instruction in each bb, except
1814 stop when we see the NOTE_INSN_BASIC_BLOCK, as old_insns_match_p
1815 handles that case specially. old_insns_match_p does not handle
1816 other types of instruction notes. */
da5477a9
DM
1817 rtx_insn *last1 = BB_END (bb1);
1818 rtx_insn *last2 = BB_END (bb2);
d41d6122
TJ
1819 while (!NOTE_INSN_BASIC_BLOCK_P (last1) &&
1820 (DEBUG_INSN_P (last1) || NOTE_P (last1)))
1821 last1 = PREV_INSN (last1);
1822 while (!NOTE_INSN_BASIC_BLOCK_P (last2) &&
1823 (DEBUG_INSN_P (last2) || NOTE_P (last2)))
1824 last2 = PREV_INSN (last2);
1825 gcc_assert (last1 && last2);
1826
0dd0e980 1827 /* First ensure that the instructions match. There may be many outgoing
39811184 1828 edges so this test is generally cheaper. */
206604dc 1829 if (old_insns_match_p (mode, last1, last2) != dir_both)
0dd0e980
JH
1830 return false;
1831
1832 /* Search the outgoing edges, ensure that the counts do match, find possible
1833 fallthru and exception handling edges since these needs more
1834 validation. */
628f6a4e
BE
1835 if (EDGE_COUNT (bb1->succs) != EDGE_COUNT (bb2->succs))
1836 return false;
1837
206604dc 1838 bool nonfakeedges = false;
628f6a4e 1839 FOR_EACH_EDGE (e1, ei, bb1->succs)
0dd0e980 1840 {
628f6a4e 1841 e2 = EDGE_SUCC (bb2, ei.index);
c22cacf3 1842
206604dc
JJ
1843 if ((e1->flags & EDGE_FAKE) == 0)
1844 nonfakeedges = true;
1845
0dd0e980
JH
1846 if (e1->flags & EDGE_EH)
1847 nehedges1++;
5f0d2358 1848
0dd0e980
JH
1849 if (e2->flags & EDGE_EH)
1850 nehedges2++;
5f0d2358 1851
0dd0e980
JH
1852 if (e1->flags & EDGE_FALLTHRU)
1853 fallthru1 = e1;
1854 if (e2->flags & EDGE_FALLTHRU)
1855 fallthru2 = e2;
1856 }
5f0d2358 1857
0dd0e980 1858 /* If number of edges of various types does not match, fail. */
628f6a4e 1859 if (nehedges1 != nehedges2
5f0d2358 1860 || (fallthru1 != 0) != (fallthru2 != 0))
0dd0e980
JH
1861 return false;
1862
206604dc
JJ
1863 /* If !ACCUMULATE_OUTGOING_ARGS, bb1 (and bb2) have no successors
1864 and the last real insn doesn't have REG_ARGS_SIZE note, don't
1865 attempt to optimize, as the two basic blocks might have different
1866 REG_ARGS_SIZE depths. For noreturn calls and unconditional
1867 traps there should be REG_ARG_SIZE notes, they could be missing
1868 for __builtin_unreachable () uses though. */
1869 if (!nonfakeedges
1870 && !ACCUMULATE_OUTGOING_ARGS
1871 && (!INSN_P (last1)
1872 || !find_reg_note (last1, REG_ARGS_SIZE, NULL)))
1873 return false;
1874
0dd0e980
JH
1875 /* fallthru edges must be forwarded to the same destination. */
1876 if (fallthru1)
1877 {
1878 basic_block d1 = (forwarder_block_p (fallthru1->dest)
c5cbcccf 1879 ? single_succ (fallthru1->dest): fallthru1->dest);
0dd0e980 1880 basic_block d2 = (forwarder_block_p (fallthru2->dest)
c5cbcccf 1881 ? single_succ (fallthru2->dest): fallthru2->dest);
5f0d2358 1882
0dd0e980
JH
1883 if (d1 != d2)
1884 return false;
1885 }
5f0d2358 1886
5f77fbd4
JJ
1887 /* Ensure the same EH region. */
1888 {
a813c111
SB
1889 rtx n1 = find_reg_note (BB_END (bb1), REG_EH_REGION, 0);
1890 rtx n2 = find_reg_note (BB_END (bb2), REG_EH_REGION, 0);
5f0d2358 1891
5f77fbd4
JJ
1892 if (!n1 && n2)
1893 return false;
1894
1895 if (n1 && (!n2 || XEXP (n1, 0) != XEXP (n2, 0)))
1896 return false;
1897 }
5f0d2358 1898
38109dab
GL
1899 /* The same checks as in try_crossjump_to_edge. It is required for RTL
1900 version of sequence abstraction. */
1901 FOR_EACH_EDGE (e1, ei, bb2->succs)
1902 {
1903 edge e2;
1904 edge_iterator ei;
1905 basic_block d1 = e1->dest;
1906
1907 if (FORWARDER_BLOCK_P (d1))
1908 d1 = EDGE_SUCC (d1, 0)->dest;
1909
1910 FOR_EACH_EDGE (e2, ei, bb1->succs)
1911 {
1912 basic_block d2 = e2->dest;
1913 if (FORWARDER_BLOCK_P (d2))
1914 d2 = EDGE_SUCC (d2, 0)->dest;
1915 if (d1 == d2)
1916 break;
1917 }
1918
1919 if (!e2)
1920 return false;
1921 }
1922
0dd0e980 1923 return true;
402209ff
JH
1924}
1925
38109dab
GL
1926/* Returns true if BB basic block has a preserve label. */
1927
1928static bool
1929block_has_preserve_label (basic_block bb)
1930{
1931 return (bb
1932 && block_label (bb)
1933 && LABEL_PRESERVE_P (block_label (bb)));
1934}
1935
402209ff
JH
1936/* E1 and E2 are edges with the same destination block. Search their
1937 predecessors for common code. If found, redirect control flow from
bf22920b
TV
1938 (maybe the middle of) E1->SRC to (maybe the middle of) E2->SRC (dir_forward),
1939 or the other way around (dir_backward). DIR specifies the allowed
1940 replacement direction. */
402209ff
JH
1941
1942static bool
bf22920b
TV
1943try_crossjump_to_edge (int mode, edge e1, edge e2,
1944 enum replace_direction dir)
402209ff 1945{
c2fc5456 1946 int nmatch;
402209ff 1947 basic_block src1 = e1->src, src2 = e2->src;
39587bb9 1948 basic_block redirect_to, redirect_from, to_remove;
823918ae 1949 basic_block osrc1, osrc2, redirect_edges_to, tmp;
da5477a9 1950 rtx_insn *newpos1, *newpos2;
402209ff 1951 edge s;
628f6a4e 1952 edge_iterator ei;
c2fc5456 1953
da5477a9 1954 newpos1 = newpos2 = NULL;
6de9cd9a 1955
402209ff
JH
1956 /* Search backward through forwarder blocks. We don't need to worry
1957 about multiple entry or chained forwarders, as they will be optimized
1958 away. We do this to look past the unconditional jump following a
1959 conditional jump that is required due to the current CFG shape. */
c5cbcccf 1960 if (single_pred_p (src1)
635559ab 1961 && FORWARDER_BLOCK_P (src1))
c5cbcccf 1962 e1 = single_pred_edge (src1), src1 = e1->src;
5f0d2358 1963
c5cbcccf 1964 if (single_pred_p (src2)
635559ab 1965 && FORWARDER_BLOCK_P (src2))
c5cbcccf 1966 e2 = single_pred_edge (src2), src2 = e2->src;
402209ff
JH
1967
1968 /* Nothing to do if we reach ENTRY, or a common source block. */
fefa31b5
DM
1969 if (src1 == ENTRY_BLOCK_PTR_FOR_FN (cfun) || src2
1970 == ENTRY_BLOCK_PTR_FOR_FN (cfun))
402209ff
JH
1971 return false;
1972 if (src1 == src2)
1973 return false;
1974
1975 /* Seeing more than 1 forwarder blocks would confuse us later... */
635559ab 1976 if (FORWARDER_BLOCK_P (e1->dest)
c5cbcccf 1977 && FORWARDER_BLOCK_P (single_succ (e1->dest)))
402209ff 1978 return false;
5f0d2358 1979
635559ab 1980 if (FORWARDER_BLOCK_P (e2->dest)
c5cbcccf 1981 && FORWARDER_BLOCK_P (single_succ (e2->dest)))
402209ff
JH
1982 return false;
1983
1984 /* Likewise with dead code (possibly newly created by the other optimizations
1985 of cfg_cleanup). */
628f6a4e 1986 if (EDGE_COUNT (src1->preds) == 0 || EDGE_COUNT (src2->preds) == 0)
402209ff
JH
1987 return false;
1988
ba61fc53
JH
1989 /* Do not turn corssing edge to non-crossing or vice versa after reload. */
1990 if (BB_PARTITION (src1) != BB_PARTITION (src2)
1991 && reload_completed)
1992 return false;
1993
402209ff 1994 /* Look for the common insn sequence, part the first ... */
c2fc5456 1995 if (!outgoing_edges_match (mode, src1, src2))
402209ff
JH
1996 return false;
1997
1998 /* ... and part the second. */
472c95f5 1999 nmatch = flow_find_cross_jump (src1, src2, &newpos1, &newpos2, &dir);
12183e0f 2000
823918ae
TV
2001 osrc1 = src1;
2002 osrc2 = src2;
2003 if (newpos1 != NULL_RTX)
2004 src1 = BLOCK_FOR_INSN (newpos1);
2005 if (newpos2 != NULL_RTX)
2006 src2 = BLOCK_FOR_INSN (newpos2);
2007
dd68669b
JL
2008 /* Check that SRC1 and SRC2 have preds again. They may have changed
2009 above due to the call to flow_find_cross_jump. */
2010 if (EDGE_COUNT (src1->preds) == 0 || EDGE_COUNT (src2->preds) == 0)
2011 return false;
2012
bf22920b
TV
2013 if (dir == dir_backward)
2014 {
ba61fc53
JH
2015 std::swap (osrc1, osrc2);
2016 std::swap (src1, src2);
2017 std::swap (e1, e2);
2018 std::swap (newpos1, newpos2);
bf22920b
TV
2019 }
2020
12183e0f
PH
2021 /* Don't proceed with the crossjump unless we found a sufficient number
2022 of matching instructions or the 'from' block was totally matched
2023 (such that its predecessors will hopefully be redirected and the
2024 block removed). */
c2fc5456
R
2025 if ((nmatch < PARAM_VALUE (PARAM_MIN_CROSSJUMP_INSNS))
2026 && (newpos1 != BB_HEAD (src1)))
7d22e898 2027 return false;
402209ff 2028
75c40d56 2029 /* Avoid deleting preserve label when redirecting ABNORMAL edges. */
38109dab
GL
2030 if (block_has_preserve_label (e1->dest)
2031 && (e1->flags & EDGE_ABNORMAL))
2032 return false;
2033
39811184
JZ
2034 /* Here we know that the insns in the end of SRC1 which are common with SRC2
2035 will be deleted.
2036 If we have tablejumps in the end of SRC1 and SRC2
2037 they have been already compared for equivalence in outgoing_edges_match ()
2038 so replace the references to TABLE1 by references to TABLE2. */
21c0a521 2039 {
dfe08bc4 2040 rtx_insn *label1, *label2;
8942ee0f 2041 rtx_jump_table_data *table1, *table2;
39811184 2042
823918ae
TV
2043 if (tablejump_p (BB_END (osrc1), &label1, &table1)
2044 && tablejump_p (BB_END (osrc2), &label2, &table2)
39811184
JZ
2045 && label1 != label2)
2046 {
da5477a9 2047 rtx_insn *insn;
39811184
JZ
2048
2049 /* Replace references to LABEL1 with LABEL2. */
39811184
JZ
2050 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2051 {
2052 /* Do not replace the label in SRC1->END because when deleting
2053 a block whose end is a tablejump, the tablejump referenced
2054 from the instruction is deleted too. */
823918ae 2055 if (insn != BB_END (osrc1))
a2b7026c 2056 replace_label_in_insn (insn, label1, label2, true);
39811184
JZ
2057 }
2058 }
21c0a521 2059 }
10d6c0d0 2060
b604fe9b
SB
2061 /* Avoid splitting if possible. We must always split when SRC2 has
2062 EH predecessor edges, or we may end up with basic blocks with both
2063 normal and EH predecessor edges. */
c2fc5456 2064 if (newpos2 == BB_HEAD (src2)
b604fe9b 2065 && !(EDGE_PRED (src2, 0)->flags & EDGE_EH))
402209ff
JH
2066 redirect_to = src2;
2067 else
2068 {
c2fc5456 2069 if (newpos2 == BB_HEAD (src2))
b604fe9b
SB
2070 {
2071 /* Skip possible basic block header. */
c2fc5456
R
2072 if (LABEL_P (newpos2))
2073 newpos2 = NEXT_INSN (newpos2);
b5b8b0ac
AO
2074 while (DEBUG_INSN_P (newpos2))
2075 newpos2 = NEXT_INSN (newpos2);
c2fc5456
R
2076 if (NOTE_P (newpos2))
2077 newpos2 = NEXT_INSN (newpos2);
b5b8b0ac
AO
2078 while (DEBUG_INSN_P (newpos2))
2079 newpos2 = NEXT_INSN (newpos2);
b604fe9b
SB
2080 }
2081
c263766c
RH
2082 if (dump_file)
2083 fprintf (dump_file, "Splitting bb %i before %i insns\n",
0b17ab2f 2084 src2->index, nmatch);
c2fc5456 2085 redirect_to = split_block (src2, PREV_INSN (newpos2))->dest;
402209ff
JH
2086 }
2087
c263766c 2088 if (dump_file)
c2fc5456
R
2089 fprintf (dump_file,
2090 "Cross jumping from bb %i to bb %i; %i common insns\n",
2091 src1->index, src2->index, nmatch);
402209ff 2092
6fc0bb99 2093 /* We may have some registers visible through the block. */
6fb5fa3c 2094 df_set_bb_dirty (redirect_to);
402209ff 2095
823918ae
TV
2096 if (osrc2 == src2)
2097 redirect_edges_to = redirect_to;
2098 else
2099 redirect_edges_to = osrc2;
2100
e7a74006 2101 /* Recompute the counts of destinations of outgoing edges. */
823918ae 2102 FOR_EACH_EDGE (s, ei, redirect_edges_to->succs)
402209ff
JH
2103 {
2104 edge s2;
628f6a4e 2105 edge_iterator ei;
402209ff
JH
2106 basic_block d = s->dest;
2107
635559ab 2108 if (FORWARDER_BLOCK_P (d))
c5cbcccf 2109 d = single_succ (d);
5f0d2358 2110
628f6a4e 2111 FOR_EACH_EDGE (s2, ei, src1->succs)
402209ff
JH
2112 {
2113 basic_block d2 = s2->dest;
635559ab 2114 if (FORWARDER_BLOCK_P (d2))
c5cbcccf 2115 d2 = single_succ (d2);
402209ff
JH
2116 if (d == d2)
2117 break;
2118 }
5f0d2358 2119
402209ff 2120 /* Take care to update possible forwarder blocks. We verified
c22cacf3
MS
2121 that there is no more than one in the chain, so we can't run
2122 into infinite loop. */
635559ab 2123 if (FORWARDER_BLOCK_P (s->dest))
e7a74006 2124 s->dest->count += s->count ();
5f0d2358 2125
635559ab 2126 if (FORWARDER_BLOCK_P (s2->dest))
e7a74006 2127 s2->dest->count -= s->count ();
5f0d2358 2128
97c07987
JH
2129 s->probability = s->probability.combine_with_count
2130 (redirect_edges_to->count,
2131 s2->probability, src1->count);
402209ff
JH
2132 }
2133
e7a74006 2134 /* Adjust count for the block. An earlier jump
52982a97
EB
2135 threading pass may have left the profile in an inconsistent
2136 state (see update_bb_profile_for_threading) so we must be
2137 prepared for overflows. */
823918ae
TV
2138 tmp = redirect_to;
2139 do
2140 {
2141 tmp->count += src1->count;
823918ae
TV
2142 if (tmp == redirect_edges_to)
2143 break;
2144 tmp = find_fallthru_edge (tmp->succs)->dest;
2145 }
2146 while (true);
2147 update_br_prob_note (redirect_edges_to);
402209ff
JH
2148
2149 /* Edit SRC1 to go to REDIRECT_TO at NEWPOS1. */
2150
c2fc5456
R
2151 /* Skip possible basic block header. */
2152 if (LABEL_P (newpos1))
2153 newpos1 = NEXT_INSN (newpos1);
b5b8b0ac
AO
2154
2155 while (DEBUG_INSN_P (newpos1))
2156 newpos1 = NEXT_INSN (newpos1);
2157
cd9c1ca8 2158 if (NOTE_INSN_BASIC_BLOCK_P (newpos1))
c2fc5456
R
2159 newpos1 = NEXT_INSN (newpos1);
2160
b5b8b0ac
AO
2161 while (DEBUG_INSN_P (newpos1))
2162 newpos1 = NEXT_INSN (newpos1);
2163
c2fc5456 2164 redirect_from = split_block (src1, PREV_INSN (newpos1))->src;
c5cbcccf 2165 to_remove = single_succ (redirect_from);
402209ff 2166
c5cbcccf 2167 redirect_edge_and_branch_force (single_succ_edge (redirect_from), redirect_to);
f470c378 2168 delete_basic_block (to_remove);
402209ff 2169
39587bb9 2170 update_forwarder_flag (redirect_from);
7cbd12b8
JJ
2171 if (redirect_to != src2)
2172 update_forwarder_flag (src2);
635559ab 2173
402209ff
JH
2174 return true;
2175}
2176
2177/* Search the predecessors of BB for common insn sequences. When found,
2178 share code between them by redirecting control flow. Return true if
2179 any changes made. */
2180
2181static bool
d329e058 2182try_crossjump_bb (int mode, basic_block bb)
402209ff 2183{
628f6a4e 2184 edge e, e2, fallthru;
402209ff 2185 bool changed;
628f6a4e 2186 unsigned max, ix, ix2;
402209ff 2187
f63d1bf7 2188 /* Nothing to do if there is not at least two incoming edges. */
628f6a4e 2189 if (EDGE_COUNT (bb->preds) < 2)
402209ff
JH
2190 return false;
2191
bbcb0c05
SB
2192 /* Don't crossjump if this block ends in a computed jump,
2193 unless we are optimizing for size. */
efd8f750 2194 if (optimize_bb_for_size_p (bb)
fefa31b5 2195 && bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
bbcb0c05
SB
2196 && computed_jump_p (BB_END (bb)))
2197 return false;
2198
750054a2
CT
2199 /* If we are partitioning hot/cold basic blocks, we don't want to
2200 mess up unconditional or indirect jumps that cross between hot
c22cacf3
MS
2201 and cold sections.
2202
8e8d5162 2203 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
2204 be optimizable (or blocks that appear to be mergeable), but which really
2205 must be left untouched (they are required to make it safely across
2206 partition boundaries). See the comments at the top of
8e8d5162
CT
2207 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
2208
c22cacf3
MS
2209 if (BB_PARTITION (EDGE_PRED (bb, 0)->src) !=
2210 BB_PARTITION (EDGE_PRED (bb, 1)->src)
87c8b4be 2211 || (EDGE_PRED (bb, 0)->flags & EDGE_CROSSING))
750054a2
CT
2212 return false;
2213
402209ff
JH
2214 /* It is always cheapest to redirect a block that ends in a branch to
2215 a block that falls through into BB, as that adds no branches to the
2216 program. We'll try that combination first. */
5f24e0dc
RH
2217 fallthru = NULL;
2218 max = PARAM_VALUE (PARAM_MAX_CROSSJUMP_EDGES);
628f6a4e
BE
2219
2220 if (EDGE_COUNT (bb->preds) > max)
2221 return false;
2222
0fd4b31d 2223 fallthru = find_fallthru_edge (bb->preds);
402209ff
JH
2224
2225 changed = false;
0248bceb 2226 for (ix = 0; ix < EDGE_COUNT (bb->preds);)
402209ff 2227 {
0248bceb 2228 e = EDGE_PRED (bb, ix);
628f6a4e 2229 ix++;
402209ff 2230
c1e3e2d9
SB
2231 /* As noted above, first try with the fallthru predecessor (or, a
2232 fallthru predecessor if we are in cfglayout mode). */
402209ff
JH
2233 if (fallthru)
2234 {
2235 /* Don't combine the fallthru edge into anything else.
2236 If there is a match, we'll do it the other way around. */
2237 if (e == fallthru)
2238 continue;
7cf240d5
JH
2239 /* If nothing changed since the last attempt, there is nothing
2240 we can do. */
2241 if (!first_pass
4ec5d4f5
BS
2242 && !((e->src->flags & BB_MODIFIED)
2243 || (fallthru->src->flags & BB_MODIFIED)))
7cf240d5 2244 continue;
402209ff 2245
bf22920b 2246 if (try_crossjump_to_edge (mode, e, fallthru, dir_forward))
402209ff
JH
2247 {
2248 changed = true;
628f6a4e 2249 ix = 0;
402209ff
JH
2250 continue;
2251 }
2252 }
2253
2254 /* Non-obvious work limiting check: Recognize that we're going
2255 to call try_crossjump_bb on every basic block. So if we have
2256 two blocks with lots of outgoing edges (a switch) and they
2257 share lots of common destinations, then we would do the
2258 cross-jump check once for each common destination.
2259
2260 Now, if the blocks actually are cross-jump candidates, then
2261 all of their destinations will be shared. Which means that
2262 we only need check them for cross-jump candidacy once. We
2263 can eliminate redundant checks of crossjump(A,B) by arbitrarily
2264 choosing to do the check from the block for which the edge
2265 in question is the first successor of A. */
628f6a4e 2266 if (EDGE_SUCC (e->src, 0) != e)
402209ff
JH
2267 continue;
2268
0248bceb 2269 for (ix2 = 0; ix2 < EDGE_COUNT (bb->preds); ix2++)
402209ff 2270 {
0248bceb 2271 e2 = EDGE_PRED (bb, ix2);
402209ff
JH
2272
2273 if (e2 == e)
2274 continue;
2275
2276 /* We've already checked the fallthru edge above. */
2277 if (e2 == fallthru)
2278 continue;
2279
402209ff
JH
2280 /* The "first successor" check above only prevents multiple
2281 checks of crossjump(A,B). In order to prevent redundant
2282 checks of crossjump(B,A), require that A be the block
2283 with the lowest index. */
0b17ab2f 2284 if (e->src->index > e2->src->index)
402209ff
JH
2285 continue;
2286
7cf240d5
JH
2287 /* If nothing changed since the last attempt, there is nothing
2288 we can do. */
2289 if (!first_pass
4ec5d4f5
BS
2290 && !((e->src->flags & BB_MODIFIED)
2291 || (e2->src->flags & BB_MODIFIED)))
7cf240d5
JH
2292 continue;
2293
bf22920b
TV
2294 /* Both e and e2 are not fallthru edges, so we can crossjump in either
2295 direction. */
2296 if (try_crossjump_to_edge (mode, e, e2, dir_both))
402209ff
JH
2297 {
2298 changed = true;
628f6a4e 2299 ix = 0;
402209ff
JH
2300 break;
2301 }
2302 }
2303 }
2304
c1e3e2d9 2305 if (changed)
bd2c6270 2306 crossjumps_occurred = true;
c1e3e2d9 2307
402209ff
JH
2308 return changed;
2309}
2310
4ec5d4f5
BS
2311/* Search the successors of BB for common insn sequences. When found,
2312 share code between them by moving it across the basic block
2313 boundary. Return true if any changes made. */
2314
2315static bool
2316try_head_merge_bb (basic_block bb)
2317{
2318 basic_block final_dest_bb = NULL;
2319 int max_match = INT_MAX;
2320 edge e0;
da5477a9 2321 rtx_insn **headptr, **currptr, **nextptr;
4ec5d4f5
BS
2322 bool changed, moveall;
2323 unsigned ix;
da5477a9 2324 rtx_insn *e0_last_head;
61aa0978
DM
2325 rtx cond;
2326 rtx_insn *move_before;
4ec5d4f5 2327 unsigned nedges = EDGE_COUNT (bb->succs);
da5477a9 2328 rtx_insn *jump = BB_END (bb);
4ec5d4f5
BS
2329 regset live, live_union;
2330
2331 /* Nothing to do if there is not at least two outgoing edges. */
2332 if (nedges < 2)
2333 return false;
2334
2335 /* Don't crossjump if this block ends in a computed jump,
2336 unless we are optimizing for size. */
2337 if (optimize_bb_for_size_p (bb)
fefa31b5 2338 && bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
4ec5d4f5
BS
2339 && computed_jump_p (BB_END (bb)))
2340 return false;
2341
2342 cond = get_condition (jump, &move_before, true, false);
2343 if (cond == NULL_RTX)
43052d45 2344 {
618f4073 2345 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, jump))
43052d45
BS
2346 move_before = prev_nonnote_nondebug_insn (jump);
2347 else
43052d45
BS
2348 move_before = jump;
2349 }
4ec5d4f5
BS
2350
2351 for (ix = 0; ix < nedges; ix++)
fefa31b5 2352 if (EDGE_SUCC (bb, ix)->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4ec5d4f5
BS
2353 return false;
2354
2355 for (ix = 0; ix < nedges; ix++)
2356 {
2357 edge e = EDGE_SUCC (bb, ix);
2358 basic_block other_bb = e->dest;
2359
2360 if (df_get_bb_dirty (other_bb))
2361 {
2362 block_was_dirty = true;
2363 return false;
2364 }
2365
2366 if (e->flags & EDGE_ABNORMAL)
2367 return false;
2368
2369 /* Normally, all destination blocks must only be reachable from this
2370 block, i.e. they must have one incoming edge.
2371
2372 There is one special case we can handle, that of multiple consecutive
2373 jumps where the first jumps to one of the targets of the second jump.
2374 This happens frequently in switch statements for default labels.
2375 The structure is as follows:
2376 FINAL_DEST_BB
2377 ....
2378 if (cond) jump A;
2379 fall through
2380 BB
2381 jump with targets A, B, C, D...
2382 A
2383 has two incoming edges, from FINAL_DEST_BB and BB
2384
2385 In this case, we can try to move the insns through BB and into
2386 FINAL_DEST_BB. */
2387 if (EDGE_COUNT (other_bb->preds) != 1)
2388 {
2389 edge incoming_edge, incoming_bb_other_edge;
2390 edge_iterator ei;
2391
2392 if (final_dest_bb != NULL
2393 || EDGE_COUNT (other_bb->preds) != 2)
2394 return false;
2395
2396 /* We must be able to move the insns across the whole block. */
2397 move_before = BB_HEAD (bb);
2398 while (!NONDEBUG_INSN_P (move_before))
2399 move_before = NEXT_INSN (move_before);
2400
2401 if (EDGE_COUNT (bb->preds) != 1)
2402 return false;
2403 incoming_edge = EDGE_PRED (bb, 0);
2404 final_dest_bb = incoming_edge->src;
2405 if (EDGE_COUNT (final_dest_bb->succs) != 2)
2406 return false;
2407 FOR_EACH_EDGE (incoming_bb_other_edge, ei, final_dest_bb->succs)
2408 if (incoming_bb_other_edge != incoming_edge)
2409 break;
2410 if (incoming_bb_other_edge->dest != other_bb)
2411 return false;
2412 }
2413 }
2414
2415 e0 = EDGE_SUCC (bb, 0);
da5477a9 2416 e0_last_head = NULL;
4ec5d4f5
BS
2417 changed = false;
2418
2419 for (ix = 1; ix < nedges; ix++)
2420 {
2421 edge e = EDGE_SUCC (bb, ix);
da5477a9 2422 rtx_insn *e0_last, *e_last;
4ec5d4f5
BS
2423 int nmatch;
2424
2425 nmatch = flow_find_head_matching_sequence (e0->dest, e->dest,
2426 &e0_last, &e_last, 0);
2427 if (nmatch == 0)
2428 return false;
2429
2430 if (nmatch < max_match)
2431 {
2432 max_match = nmatch;
2433 e0_last_head = e0_last;
2434 }
2435 }
2436
2437 /* If we matched an entire block, we probably have to avoid moving the
2438 last insn. */
2439 if (max_match > 0
2440 && e0_last_head == BB_END (e0->dest)
2441 && (find_reg_note (e0_last_head, REG_EH_REGION, 0)
2442 || control_flow_insn_p (e0_last_head)))
2443 {
2444 max_match--;
2445 if (max_match == 0)
2446 return false;
ec2d7121 2447 e0_last_head = prev_real_nondebug_insn (e0_last_head);
4ec5d4f5
BS
2448 }
2449
2450 if (max_match == 0)
2451 return false;
2452
2453 /* We must find a union of the live registers at each of the end points. */
2454 live = BITMAP_ALLOC (NULL);
2455 live_union = BITMAP_ALLOC (NULL);
2456
da5477a9
DM
2457 currptr = XNEWVEC (rtx_insn *, nedges);
2458 headptr = XNEWVEC (rtx_insn *, nedges);
2459 nextptr = XNEWVEC (rtx_insn *, nedges);
4ec5d4f5
BS
2460
2461 for (ix = 0; ix < nedges; ix++)
2462 {
2463 int j;
2464 basic_block merge_bb = EDGE_SUCC (bb, ix)->dest;
da5477a9 2465 rtx_insn *head = BB_HEAD (merge_bb);
4ec5d4f5 2466
b59e0455
JJ
2467 while (!NONDEBUG_INSN_P (head))
2468 head = NEXT_INSN (head);
4ec5d4f5
BS
2469 headptr[ix] = head;
2470 currptr[ix] = head;
2471
2472 /* Compute the end point and live information */
2473 for (j = 1; j < max_match; j++)
b59e0455
JJ
2474 do
2475 head = NEXT_INSN (head);
2476 while (!NONDEBUG_INSN_P (head));
4ec5d4f5
BS
2477 simulate_backwards_to_point (merge_bb, live, head);
2478 IOR_REG_SET (live_union, live);
2479 }
2480
2481 /* If we're moving across two blocks, verify the validity of the
2482 first move, then adjust the target and let the loop below deal
2483 with the final move. */
2484 if (final_dest_bb != NULL)
2485 {
61aa0978 2486 rtx_insn *move_upto;
4ec5d4f5
BS
2487
2488 moveall = can_move_insns_across (currptr[0], e0_last_head, move_before,
2489 jump, e0->dest, live_union,
2490 NULL, &move_upto);
2491 if (!moveall)
2492 {
2493 if (move_upto == NULL_RTX)
2494 goto out;
2495
2496 while (e0_last_head != move_upto)
2497 {
2498 df_simulate_one_insn_backwards (e0->dest, e0_last_head,
2499 live_union);
2500 e0_last_head = PREV_INSN (e0_last_head);
2501 }
2502 }
2503 if (e0_last_head == NULL_RTX)
2504 goto out;
2505
2506 jump = BB_END (final_dest_bb);
2507 cond = get_condition (jump, &move_before, true, false);
2508 if (cond == NULL_RTX)
43052d45 2509 {
618f4073 2510 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, jump))
43052d45
BS
2511 move_before = prev_nonnote_nondebug_insn (jump);
2512 else
43052d45
BS
2513 move_before = jump;
2514 }
4ec5d4f5
BS
2515 }
2516
2517 do
2518 {
61aa0978 2519 rtx_insn *move_upto;
4ec5d4f5
BS
2520 moveall = can_move_insns_across (currptr[0], e0_last_head,
2521 move_before, jump, e0->dest, live_union,
2522 NULL, &move_upto);
2523 if (!moveall && move_upto == NULL_RTX)
2524 {
2525 if (jump == move_before)
2526 break;
2527
2528 /* Try again, using a different insertion point. */
2529 move_before = jump;
2530
4ec5d4f5
BS
2531 /* Don't try moving before a cc0 user, as that may invalidate
2532 the cc0. */
618f4073 2533 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, jump))
4ec5d4f5 2534 break;
4ec5d4f5
BS
2535
2536 continue;
2537 }
2538
2539 if (final_dest_bb && !moveall)
2540 /* We haven't checked whether a partial move would be OK for the first
2541 move, so we have to fail this case. */
2542 break;
2543
2544 changed = true;
2545 for (;;)
2546 {
2547 if (currptr[0] == move_upto)
2548 break;
2549 for (ix = 0; ix < nedges; ix++)
2550 {
da5477a9 2551 rtx_insn *curr = currptr[ix];
4ec5d4f5
BS
2552 do
2553 curr = NEXT_INSN (curr);
2554 while (!NONDEBUG_INSN_P (curr));
2555 currptr[ix] = curr;
2556 }
2557 }
2558
2559 /* If we can't currently move all of the identical insns, remember
2560 each insn after the range that we'll merge. */
2561 if (!moveall)
2562 for (ix = 0; ix < nedges; ix++)
2563 {
da5477a9 2564 rtx_insn *curr = currptr[ix];
4ec5d4f5
BS
2565 do
2566 curr = NEXT_INSN (curr);
2567 while (!NONDEBUG_INSN_P (curr));
2568 nextptr[ix] = curr;
2569 }
2570
2571 reorder_insns (headptr[0], currptr[0], PREV_INSN (move_before));
2572 df_set_bb_dirty (EDGE_SUCC (bb, 0)->dest);
2573 if (final_dest_bb != NULL)
2574 df_set_bb_dirty (final_dest_bb);
2575 df_set_bb_dirty (bb);
2576 for (ix = 1; ix < nedges; ix++)
2577 {
2578 df_set_bb_dirty (EDGE_SUCC (bb, ix)->dest);
2579 delete_insn_chain (headptr[ix], currptr[ix], false);
2580 }
2581 if (!moveall)
2582 {
2583 if (jump == move_before)
2584 break;
2585
2586 /* For the unmerged insns, try a different insertion point. */
2587 move_before = jump;
2588
4ec5d4f5
BS
2589 /* Don't try moving before a cc0 user, as that may invalidate
2590 the cc0. */
618f4073 2591 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, jump))
4ec5d4f5 2592 break;
4ec5d4f5
BS
2593
2594 for (ix = 0; ix < nedges; ix++)
2595 currptr[ix] = headptr[ix] = nextptr[ix];
2596 }
2597 }
2598 while (!moveall);
2599
2600 out:
2601 free (currptr);
2602 free (headptr);
2603 free (nextptr);
2604
bd2c6270 2605 crossjumps_occurred |= changed;
4ec5d4f5
BS
2606
2607 return changed;
2608}
2609
7752e522
JJ
2610/* Return true if BB contains just bb note, or bb note followed
2611 by only DEBUG_INSNs. */
2612
2613static bool
2614trivially_empty_bb_p (basic_block bb)
2615{
da5477a9 2616 rtx_insn *insn = BB_END (bb);
7752e522
JJ
2617
2618 while (1)
2619 {
2620 if (insn == BB_HEAD (bb))
2621 return true;
2622 if (!DEBUG_INSN_P (insn))
2623 return false;
2624 insn = PREV_INSN (insn);
2625 }
2626}
2627
45676a7c
SB
2628/* Return true if BB contains just a return and possibly a USE of the
2629 return value. Fill in *RET and *USE with the return and use insns
2ea0d750 2630 if any found, otherwise NULL. All CLOBBERs are ignored. */
45676a7c
SB
2631
2632static bool
2633bb_is_just_return (basic_block bb, rtx_insn **ret, rtx_insn **use)
2634{
2635 *ret = *use = NULL;
2636 rtx_insn *insn;
2637
2638 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
2639 return false;
2640
2641 FOR_BB_INSNS (bb, insn)
2642 if (NONDEBUG_INSN_P (insn))
2643 {
2ea0d750
SB
2644 rtx pat = PATTERN (insn);
2645
2646 if (!*ret && ANY_RETURN_P (pat))
45676a7c 2647 *ret = insn;
2ea0d750
SB
2648 else if (!*ret && !*use && GET_CODE (pat) == USE
2649 && REG_P (XEXP (pat, 0))
2650 && REG_FUNCTION_VALUE_P (XEXP (pat, 0)))
45676a7c 2651 *use = insn;
2ea0d750 2652 else if (GET_CODE (pat) != CLOBBER)
45676a7c
SB
2653 return false;
2654 }
2655
2656 return !!*ret;
2657}
2658
402209ff
JH
2659/* Do simple CFG optimizations - basic block merging, simplifying of jump
2660 instructions etc. Return nonzero if changes were made. */
2661
2662static bool
d329e058 2663try_optimize_cfg (int mode)
402209ff 2664{
402209ff
JH
2665 bool changed_overall = false;
2666 bool changed;
2667 int iterations = 0;
ec3ae3da 2668 basic_block bb, b, next;
402209ff 2669
6fb5fa3c 2670 if (mode & (CLEANUP_CROSSJUMP | CLEANUP_THREADING))
38c1593d
JH
2671 clear_bb_flags ();
2672
bd2c6270 2673 crossjumps_occurred = false;
c1e3e2d9 2674
11cd3bed 2675 FOR_EACH_BB_FN (bb, cfun)
2dd2d53e
SB
2676 update_forwarder_flag (bb);
2677
245f1bfa 2678 if (! targetm.cannot_modify_jumps_p ())
402209ff 2679 {
7cf240d5 2680 first_pass = true;
e4ec2cac
AO
2681 /* Attempt to merge blocks as made possible by edge removal. If
2682 a block has only one successor, and the successor has only
2683 one predecessor, they may be combined. */
2684 do
402209ff 2685 {
4ec5d4f5 2686 block_was_dirty = false;
e4ec2cac
AO
2687 changed = false;
2688 iterations++;
2689
c263766c
RH
2690 if (dump_file)
2691 fprintf (dump_file,
e4ec2cac
AO
2692 "\n\ntry_optimize_cfg iteration %i\n\n",
2693 iterations);
402209ff 2694
fefa31b5
DM
2695 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
2696 != EXIT_BLOCK_PTR_FOR_FN (cfun);)
402209ff 2697 {
e0082a72 2698 basic_block c;
e4ec2cac
AO
2699 edge s;
2700 bool changed_here = false;
5f0d2358 2701
468059bc
DD
2702 /* Delete trivially dead basic blocks. This is either
2703 blocks with no predecessors, or empty blocks with no
1e211590
DD
2704 successors. However if the empty block with no
2705 successors is the successor of the ENTRY_BLOCK, it is
2706 kept. This ensures that the ENTRY_BLOCK will have a
2707 successor which is a precondition for many RTL
2708 passes. Empty blocks may result from expanding
468059bc
DD
2709 __builtin_unreachable (). */
2710 if (EDGE_COUNT (b->preds) == 0
1e211590 2711 || (EDGE_COUNT (b->succs) == 0
7752e522 2712 && trivially_empty_bb_p (b)
fefa31b5
DM
2713 && single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest
2714 != b))
e4ec2cac 2715 {
f6366fc7 2716 c = b->prev_bb;
f1de5107 2717 if (EDGE_COUNT (b->preds) > 0)
3b5fda81
JJ
2718 {
2719 edge e;
2720 edge_iterator ei;
2721
f1de5107
JJ
2722 if (current_ir_type () == IR_RTL_CFGLAYOUT)
2723 {
c758576a
JJ
2724 rtx_insn *insn;
2725 for (insn = BB_FOOTER (b);
2726 insn; insn = NEXT_INSN (insn))
2727 if (BARRIER_P (insn))
2728 break;
2729 if (insn)
f1de5107 2730 FOR_EACH_EDGE (e, ei, b->preds)
c758576a 2731 if ((e->flags & EDGE_FALLTHRU))
f1de5107 2732 {
c758576a
JJ
2733 if (BB_FOOTER (b)
2734 && BB_FOOTER (e->src) == NULL)
f1de5107 2735 {
d8ce2eae
DM
2736 BB_FOOTER (e->src) = BB_FOOTER (b);
2737 BB_FOOTER (b) = NULL;
f1de5107
JJ
2738 }
2739 else
c758576a 2740 emit_barrier_after_bb (e->src);
f1de5107
JJ
2741 }
2742 }
2743 else
2744 {
da5477a9 2745 rtx_insn *last = get_last_bb_insn (b);
f1de5107
JJ
2746 if (last && BARRIER_P (last))
2747 FOR_EACH_EDGE (e, ei, b->preds)
2748 if ((e->flags & EDGE_FALLTHRU))
2749 emit_barrier_after (BB_END (e->src));
2750 }
3b5fda81 2751 }
f470c378 2752 delete_basic_block (b);
bef16e87 2753 changed = true;
6626665f 2754 /* Avoid trying to remove the exit block. */
fefa31b5 2755 b = (c == ENTRY_BLOCK_PTR_FOR_FN (cfun) ? c->next_bb : c);
83bd032b 2756 continue;
e4ec2cac 2757 }
402209ff 2758
6ce2bcb7 2759 /* Remove code labels no longer used. */
c5cbcccf
ZD
2760 if (single_pred_p (b)
2761 && (single_pred_edge (b)->flags & EDGE_FALLTHRU)
2762 && !(single_pred_edge (b)->flags & EDGE_COMPLEX)
4b4bf941 2763 && LABEL_P (BB_HEAD (b))
6c979aa1 2764 && !LABEL_PRESERVE_P (BB_HEAD (b))
e4ec2cac
AO
2765 /* If the previous block ends with a branch to this
2766 block, we can't delete the label. Normally this
2767 is a condjump that is yet to be simplified, but
2768 if CASE_DROPS_THRU, this can be a tablejump with
2769 some element going to the same place as the
2770 default (fallthru). */
fefa31b5 2771 && (single_pred (b) == ENTRY_BLOCK_PTR_FOR_FN (cfun)
c5cbcccf 2772 || !JUMP_P (BB_END (single_pred (b)))
a813c111 2773 || ! label_is_jump_target_p (BB_HEAD (b),
c5cbcccf 2774 BB_END (single_pred (b)))))
e4ec2cac 2775 {
03fbe718 2776 delete_insn (BB_HEAD (b));
c263766c
RH
2777 if (dump_file)
2778 fprintf (dump_file, "Deleted label in block %i.\n",
0b17ab2f 2779 b->index);
e4ec2cac 2780 }
402209ff 2781
e4ec2cac 2782 /* If we fall through an empty block, we can remove it. */
9be94227 2783 if (!(mode & (CLEANUP_CFGLAYOUT | CLEANUP_NO_INSN_DEL))
c5cbcccf
ZD
2784 && single_pred_p (b)
2785 && (single_pred_edge (b)->flags & EDGE_FALLTHRU)
4b4bf941 2786 && !LABEL_P (BB_HEAD (b))
e4ec2cac
AO
2787 && FORWARDER_BLOCK_P (b)
2788 /* Note that forwarder_block_p true ensures that
2789 there is a successor for this block. */
c5cbcccf 2790 && (single_succ_edge (b)->flags & EDGE_FALLTHRU)
0cae8d31 2791 && n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS + 1)
e4ec2cac 2792 {
c263766c
RH
2793 if (dump_file)
2794 fprintf (dump_file,
e4ec2cac 2795 "Deleting fallthru block %i.\n",
0b17ab2f 2796 b->index);
e4ec2cac 2797
fefa31b5
DM
2798 c = ((b->prev_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2799 ? b->next_bb : b->prev_bb);
c5cbcccf
ZD
2800 redirect_edge_succ_nodup (single_pred_edge (b),
2801 single_succ (b));
f470c378 2802 delete_basic_block (b);
e4ec2cac
AO
2803 changed = true;
2804 b = c;
1e211590 2805 continue;
e4ec2cac 2806 }
5f0d2358 2807
50a36e42 2808 /* Merge B with its single successor, if any. */
c5cbcccf
ZD
2809 if (single_succ_p (b)
2810 && (s = single_succ_edge (b))
ec3ae3da 2811 && !(s->flags & EDGE_COMPLEX)
fefa31b5 2812 && (c = s->dest) != EXIT_BLOCK_PTR_FOR_FN (cfun)
c5cbcccf 2813 && single_pred_p (c)
bc35512f
JH
2814 && b != c)
2815 {
2816 /* When not in cfg_layout mode use code aware of reordering
2817 INSN. This code possibly creates new basic blocks so it
2818 does not fit merge_blocks interface and is kept here in
2819 hope that it will become useless once more of compiler
2820 is transformed to use cfg_layout mode. */
c22cacf3 2821
bc35512f
JH
2822 if ((mode & CLEANUP_CFGLAYOUT)
2823 && can_merge_blocks_p (b, c))
2824 {
2825 merge_blocks (b, c);
2826 update_forwarder_flag (b);
2827 changed_here = true;
2828 }
2829 else if (!(mode & CLEANUP_CFGLAYOUT)
2830 /* If the jump insn has side effects,
2831 we can't kill the edge. */
4b4bf941 2832 && (!JUMP_P (BB_END (b))
e24e7211 2833 || (reload_completed
a813c111 2834 ? simplejump_p (BB_END (b))
e4efa971
JH
2835 : (onlyjump_p (BB_END (b))
2836 && !tablejump_p (BB_END (b),
2837 NULL, NULL))))
bc35512f
JH
2838 && (next = merge_blocks_move (s, b, c, mode)))
2839 {
2840 b = next;
2841 changed_here = true;
2842 }
ec3ae3da 2843 }
e4ec2cac 2844
45676a7c
SB
2845 /* Try to change a branch to a return to just that return. */
2846 rtx_insn *ret, *use;
2847 if (single_succ_p (b)
2848 && onlyjump_p (BB_END (b))
2849 && bb_is_just_return (single_succ (b), &ret, &use))
2850 {
2851 if (redirect_jump (as_a <rtx_jump_insn *> (BB_END (b)),
2852 PATTERN (ret), 0))
2853 {
2854 if (use)
2855 emit_insn_before (copy_insn (PATTERN (use)),
2856 BB_END (b));
2857 if (dump_file)
2858 fprintf (dump_file, "Changed jump %d->%d to return.\n",
2859 b->index, single_succ (b)->index);
2860 redirect_edge_succ (single_succ_edge (b),
2861 EXIT_BLOCK_PTR_FOR_FN (cfun));
2862 single_succ_edge (b)->flags &= ~EDGE_CROSSING;
2863 changed_here = true;
2864 }
2865 }
2866
2867 /* Try to change a conditional branch to a return to the
2868 respective conditional return. */
2869 if (EDGE_COUNT (b->succs) == 2
2870 && any_condjump_p (BB_END (b))
2871 && bb_is_just_return (BRANCH_EDGE (b)->dest, &ret, &use))
2872 {
2873 if (redirect_jump (as_a <rtx_jump_insn *> (BB_END (b)),
2874 PATTERN (ret), 0))
2875 {
2876 if (use)
2877 emit_insn_before (copy_insn (PATTERN (use)),
2878 BB_END (b));
2879 if (dump_file)
2880 fprintf (dump_file, "Changed conditional jump %d->%d "
2881 "to conditional return.\n",
2882 b->index, BRANCH_EDGE (b)->dest->index);
2883 redirect_edge_succ (BRANCH_EDGE (b),
2884 EXIT_BLOCK_PTR_FOR_FN (cfun));
2885 BRANCH_EDGE (b)->flags &= ~EDGE_CROSSING;
2886 changed_here = true;
2887 }
2888 }
2889
2890 /* Try to flip a conditional branch that falls through to
2891 a return so that it becomes a conditional return and a
2892 new jump to the original branch target. */
2893 if (EDGE_COUNT (b->succs) == 2
ac2a4c0d 2894 && BRANCH_EDGE (b)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
45676a7c
SB
2895 && any_condjump_p (BB_END (b))
2896 && bb_is_just_return (FALLTHRU_EDGE (b)->dest, &ret, &use))
2897 {
2898 if (invert_jump (as_a <rtx_jump_insn *> (BB_END (b)),
2899 JUMP_LABEL (BB_END (b)), 0))
2900 {
2901 basic_block new_ft = BRANCH_EDGE (b)->dest;
2902 if (redirect_jump (as_a <rtx_jump_insn *> (BB_END (b)),
2903 PATTERN (ret), 0))
2904 {
2905 if (use)
2906 emit_insn_before (copy_insn (PATTERN (use)),
2907 BB_END (b));
2908 if (dump_file)
2909 fprintf (dump_file, "Changed conditional jump "
2910 "%d->%d to conditional return, adding "
2911 "fall-through jump.\n",
2912 b->index, BRANCH_EDGE (b)->dest->index);
2913 redirect_edge_succ (BRANCH_EDGE (b),
2914 EXIT_BLOCK_PTR_FOR_FN (cfun));
2915 BRANCH_EDGE (b)->flags &= ~EDGE_CROSSING;
2916 std::swap (BRANCH_EDGE (b)->probability,
2917 FALLTHRU_EDGE (b)->probability);
2918 update_br_prob_note (b);
2919 basic_block jb = force_nonfallthru (FALLTHRU_EDGE (b));
2920 notice_new_block (jb);
2921 if (!redirect_jump (as_a <rtx_jump_insn *> (BB_END (jb)),
2922 block_label (new_ft), 0))
2923 gcc_unreachable ();
2924 redirect_edge_succ (single_succ_edge (jb), new_ft);
2925 changed_here = true;
2926 }
2927 else
2928 {
2929 /* Invert the jump back to what it was. This should
2930 never fail. */
2931 if (!invert_jump (as_a <rtx_jump_insn *> (BB_END (b)),
2932 JUMP_LABEL (BB_END (b)), 0))
2933 gcc_unreachable ();
2934 }
2935 }
2936 }
2937
e4ec2cac 2938 /* Simplify branch over branch. */
bc35512f
JH
2939 if ((mode & CLEANUP_EXPENSIVE)
2940 && !(mode & CLEANUP_CFGLAYOUT)
2941 && try_simplify_condjump (b))
38c1593d 2942 changed_here = true;
402209ff 2943
e4ec2cac
AO
2944 /* If B has a single outgoing edge, but uses a
2945 non-trivial jump instruction without side-effects, we
2946 can either delete the jump entirely, or replace it
3348b696 2947 with a simple unconditional jump. */
c5cbcccf 2948 if (single_succ_p (b)
fefa31b5 2949 && single_succ (b) != EXIT_BLOCK_PTR_FOR_FN (cfun)
a813c111 2950 && onlyjump_p (BB_END (b))
339ba33b 2951 && !CROSSING_JUMP_P (BB_END (b))
c5cbcccf
ZD
2952 && try_redirect_by_replacing_jump (single_succ_edge (b),
2953 single_succ (b),
20b4e8ae 2954 (mode & CLEANUP_CFGLAYOUT) != 0))
e4ec2cac 2955 {
e4ec2cac
AO
2956 update_forwarder_flag (b);
2957 changed_here = true;
2958 }
402209ff 2959
e4ec2cac
AO
2960 /* Simplify branch to branch. */
2961 if (try_forward_edges (mode, b))
afe8b6ec
EB
2962 {
2963 update_forwarder_flag (b);
2964 changed_here = true;
2965 }
402209ff 2966
e4ec2cac
AO
2967 /* Look for shared code between blocks. */
2968 if ((mode & CLEANUP_CROSSJUMP)
2969 && try_crossjump_bb (mode, b))
2970 changed_here = true;
402209ff 2971
4ec5d4f5
BS
2972 if ((mode & CLEANUP_CROSSJUMP)
2973 /* This can lengthen register lifetimes. Do it only after
2974 reload. */
2975 && reload_completed
2976 && try_head_merge_bb (b))
2977 changed_here = true;
2978
e4ec2cac
AO
2979 /* Don't get confused by the index shift caused by
2980 deleting blocks. */
2981 if (!changed_here)
e0082a72 2982 b = b->next_bb;
e4ec2cac
AO
2983 else
2984 changed = true;
2985 }
402209ff 2986
e4ec2cac 2987 if ((mode & CLEANUP_CROSSJUMP)
fefa31b5 2988 && try_crossjump_bb (mode, EXIT_BLOCK_PTR_FOR_FN (cfun)))
402209ff 2989 changed = true;
402209ff 2990
4ec5d4f5
BS
2991 if (block_was_dirty)
2992 {
2993 /* This should only be set by head-merging. */
2994 gcc_assert (mode & CLEANUP_CROSSJUMP);
2995 df_analyze ();
2996 }
2997
e4ec2cac 2998 if (changed)
600b5b1d
TJ
2999 {
3000 /* Edge forwarding in particular can cause hot blocks previously
3001 reached by both hot and cold blocks to become dominated only
3002 by cold blocks. This will cause the verification below to fail,
3003 and lead to now cold code in the hot section. This is not easy
3004 to detect and fix during edge forwarding, and in some cases
3005 is only visible after newly unreachable blocks are deleted,
3006 which will be done in fixup_partitions. */
3ff0dc17 3007 if ((mode & CLEANUP_NO_PARTITIONING) == 0)
3211aea2
JH
3008 {
3009 fixup_partitions ();
3010 checking_verify_flow_info ();
3011 }
600b5b1d 3012 }
402209ff 3013
e4ec2cac 3014 changed_overall |= changed;
7cf240d5 3015 first_pass = false;
e4ec2cac
AO
3016 }
3017 while (changed);
402209ff 3018 }
ca6c03ca 3019
04a90bec 3020 FOR_ALL_BB_FN (b, cfun)
2dd2d53e 3021 b->flags &= ~(BB_FORWARDER_BLOCK | BB_NONTHREADABLE_BLOCK);
635559ab 3022
402209ff
JH
3023 return changed_overall;
3024}
3025\f
6d2f8887 3026/* Delete all unreachable basic blocks. */
4262e623 3027
969d70ca 3028bool
d329e058 3029delete_unreachable_blocks (void)
402209ff 3030{
402209ff 3031 bool changed = false;
b5b8b0ac 3032 basic_block b, prev_bb;
402209ff
JH
3033
3034 find_unreachable_blocks ();
3035
65f4b875
AO
3036 /* When we're in GIMPLE mode and there may be debug bind insns, we
3037 should delete blocks in reverse dominator order, so as to get a
3038 chance to substitute all released DEFs into debug bind stmts. If
3039 we don't have dominators information, walking blocks backward
3040 gets us a better chance of retaining most debug information than
b5b8b0ac 3041 otherwise. */
65f4b875 3042 if (MAY_HAVE_DEBUG_BIND_INSNS && current_ir_type () == IR_GIMPLE
b5b8b0ac 3043 && dom_info_available_p (CDI_DOMINATORS))
402209ff 3044 {
fefa31b5
DM
3045 for (b = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
3046 b != ENTRY_BLOCK_PTR_FOR_FN (cfun); b = prev_bb)
b5b8b0ac
AO
3047 {
3048 prev_bb = b->prev_bb;
3049
3050 if (!(b->flags & BB_REACHABLE))
3051 {
3052 /* Speed up the removal of blocks that don't dominate
3053 others. Walking backwards, this should be the common
3054 case. */
3055 if (!first_dom_son (CDI_DOMINATORS, b))
3056 delete_basic_block (b);
3057 else
3058 {
9771b263 3059 vec<basic_block> h
b5b8b0ac
AO
3060 = get_all_dominated_blocks (CDI_DOMINATORS, b);
3061
9771b263 3062 while (h.length ())
b5b8b0ac 3063 {
9771b263 3064 b = h.pop ();
b5b8b0ac
AO
3065
3066 prev_bb = b->prev_bb;
0b17ab2f 3067
b5b8b0ac
AO
3068 gcc_assert (!(b->flags & BB_REACHABLE));
3069
3070 delete_basic_block (b);
3071 }
3072
9771b263 3073 h.release ();
b5b8b0ac
AO
3074 }
3075
3076 changed = true;
3077 }
3078 }
3079 }
3080 else
3081 {
fefa31b5
DM
3082 for (b = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
3083 b != ENTRY_BLOCK_PTR_FOR_FN (cfun); b = prev_bb)
6a58eee9 3084 {
b5b8b0ac
AO
3085 prev_bb = b->prev_bb;
3086
3087 if (!(b->flags & BB_REACHABLE))
3088 {
3089 delete_basic_block (b);
3090 changed = true;
3091 }
6a58eee9 3092 }
402209ff
JH
3093 }
3094
3095 if (changed)
3096 tidy_fallthru_edges ();
3097 return changed;
3098}
6fb5fa3c
DB
3099
3100/* Delete any jump tables never referenced. We can't delete them at the
29f3fd5b
SB
3101 time of removing tablejump insn as they are referenced by the preceding
3102 insns computing the destination, so we delay deleting and garbagecollect
3103 them once life information is computed. */
6fb5fa3c
DB
3104void
3105delete_dead_jumptables (void)
3106{
3107 basic_block bb;
3108
29f3fd5b
SB
3109 /* A dead jump table does not belong to any basic block. Scan insns
3110 between two adjacent basic blocks. */
11cd3bed 3111 FOR_EACH_BB_FN (bb, cfun)
6fb5fa3c 3112 {
da5477a9 3113 rtx_insn *insn, *next;
29f3fd5b
SB
3114
3115 for (insn = NEXT_INSN (BB_END (bb));
3116 insn && !NOTE_INSN_BASIC_BLOCK_P (insn);
3117 insn = next)
57d6c446 3118 {
29f3fd5b
SB
3119 next = NEXT_INSN (insn);
3120 if (LABEL_P (insn)
3121 && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
3122 && JUMP_TABLE_DATA_P (next))
3123 {
da5477a9 3124 rtx_insn *label = insn, *jump = next;
29f3fd5b
SB
3125
3126 if (dump_file)
3127 fprintf (dump_file, "Dead jumptable %i removed\n",
3128 INSN_UID (insn));
3129
3130 next = NEXT_INSN (next);
3131 delete_insn (jump);
3132 delete_insn (label);
3133 }
6fb5fa3c
DB
3134 }
3135 }
3136}
3137
402209ff
JH
3138\f
3139/* Tidy the CFG by deleting unreachable code and whatnot. */
3140
3141bool
d329e058 3142cleanup_cfg (int mode)
402209ff 3143{
402209ff
JH
3144 bool changed = false;
3145
aeceeb06
SB
3146 /* Set the cfglayout mode flag here. We could update all the callers
3147 but that is just inconvenient, especially given that we eventually
3148 want to have cfglayout mode as the default. */
3149 if (current_ir_type () == IR_RTL_CFGLAYOUT)
3150 mode |= CLEANUP_CFGLAYOUT;
3151
402209ff 3152 timevar_push (TV_CLEANUP_CFG);
3dec4024
JH
3153 if (delete_unreachable_blocks ())
3154 {
3155 changed = true;
3156 /* We've possibly created trivially dead code. Cleanup it right
95bd1dd7 3157 now to introduce more opportunities for try_optimize_cfg. */
6fb5fa3c 3158 if (!(mode & (CLEANUP_NO_INSN_DEL))
3dec4024 3159 && !reload_completed)
62e5bf5d 3160 delete_trivially_dead_insns (get_insns (), max_reg_num ());
3dec4024 3161 }
bf77398c
ZD
3162
3163 compact_blocks ();
3164
c1e3e2d9
SB
3165 /* To tail-merge blocks ending in the same noreturn function (e.g.
3166 a call to abort) we have to insert fake edges to exit. Do this
3167 here once. The fake edges do not interfere with any other CFG
3168 cleanups. */
3169 if (mode & CLEANUP_CROSSJUMP)
3170 add_noreturn_fake_exit_edges ();
3171
7d817ebc
DE
3172 if (!dbg_cnt (cfg_cleanup))
3173 return changed;
3174
3dec4024
JH
3175 while (try_optimize_cfg (mode))
3176 {
3177 delete_unreachable_blocks (), changed = true;
c1e3e2d9 3178 if (!(mode & CLEANUP_NO_INSN_DEL))
3dec4024 3179 {
c1e3e2d9
SB
3180 /* Try to remove some trivially dead insns when doing an expensive
3181 cleanup. But delete_trivially_dead_insns doesn't work after
3182 reload (it only handles pseudos) and run_fast_dce is too costly
3183 to run in every iteration.
3184
3185 For effective cross jumping, we really want to run a fast DCE to
3186 clean up any dead conditions, or they get in the way of performing
3187 useful tail merges.
3188
3189 Other transformations in cleanup_cfg are not so sensitive to dead
3190 code, so delete_trivially_dead_insns or even doing nothing at all
3191 is good enough. */
3192 if ((mode & CLEANUP_EXPENSIVE) && !reload_completed
3193 && !delete_trivially_dead_insns (get_insns (), max_reg_num ()))
3dec4024 3194 break;
bd2c6270 3195 if ((mode & CLEANUP_CROSSJUMP) && crossjumps_occurred)
f1f10541
RS
3196 {
3197 run_fast_dce ();
3198 mode &= ~CLEANUP_FORCE_FAST_DCE;
3199 }
3dec4024
JH
3200 }
3201 else
3202 break;
3dec4024 3203 }
402209ff 3204
c1e3e2d9
SB
3205 if (mode & CLEANUP_CROSSJUMP)
3206 remove_fake_exit_edges ();
3207
f1f10541
RS
3208 if (mode & CLEANUP_FORCE_FAST_DCE)
3209 run_fast_dce ();
3210
29f3fd5b
SB
3211 /* Don't call delete_dead_jumptables in cfglayout mode, because
3212 that function assumes that jump tables are in the insns stream.
3213 But we also don't _have_ to delete dead jumptables in cfglayout
3214 mode because we shouldn't even be looking at things that are
3215 not in a basic block. Dead jumptables are cleaned up when
3216 going out of cfglayout mode. */
3217 if (!(mode & CLEANUP_CFGLAYOUT))
6fb5fa3c
DB
3218 delete_dead_jumptables ();
3219
7d776ee2
RG
3220 /* ??? We probably do this way too often. */
3221 if (current_loops
3222 && (changed
3223 || (mode & CLEANUP_CFG_CHANGED)))
3224 {
7d776ee2
RG
3225 timevar_push (TV_REPAIR_LOOPS);
3226 /* The above doesn't preserve dominance info if available. */
3227 gcc_assert (!dom_info_available_p (CDI_DOMINATORS));
3228 calculate_dominance_info (CDI_DOMINATORS);
01cb1ef5 3229 fix_loop_structure (NULL);
7d776ee2
RG
3230 free_dominance_info (CDI_DOMINATORS);
3231 timevar_pop (TV_REPAIR_LOOPS);
3232 }
3233
402209ff
JH
3234 timevar_pop (TV_CLEANUP_CFG);
3235
402209ff
JH
3236 return changed;
3237}
ef330312 3238\f
27a4cd48
DM
3239namespace {
3240
3241const pass_data pass_data_jump =
11a687e7 3242{
27a4cd48
DM
3243 RTL_PASS, /* type */
3244 "jump", /* name */
3245 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3246 TV_JUMP, /* tv_id */
3247 0, /* properties_required */
3248 0, /* properties_provided */
3249 0, /* properties_destroyed */
3250 0, /* todo_flags_start */
3bea341f 3251 0, /* todo_flags_finish */
11a687e7 3252};
27a4cd48
DM
3253
3254class pass_jump : public rtl_opt_pass
3255{
3256public:
c3284718
RS
3257 pass_jump (gcc::context *ctxt)
3258 : rtl_opt_pass (pass_data_jump, ctxt)
27a4cd48
DM
3259 {}
3260
3261 /* opt_pass methods: */
be55bfe6 3262 virtual unsigned int execute (function *);
27a4cd48
DM
3263
3264}; // class pass_jump
3265
be55bfe6
TS
3266unsigned int
3267pass_jump::execute (function *)
3268{
3269 delete_trivially_dead_insns (get_insns (), max_reg_num ());
3270 if (dump_file)
3271 dump_flow_info (dump_file, dump_flags);
3272 cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0)
3273 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
3274 return 0;
3275}
3276
27a4cd48
DM
3277} // anon namespace
3278
3279rtl_opt_pass *
3280make_pass_jump (gcc::context *ctxt)
3281{
3282 return new pass_jump (ctxt);
3283}
11a687e7 3284\f
27a4cd48
DM
3285namespace {
3286
216779db 3287const pass_data pass_data_jump_after_combine =
98fe146e
IL
3288{
3289 RTL_PASS, /* type */
216779db 3290 "jump_after_combine", /* name */
98fe146e
IL
3291 OPTGROUP_NONE, /* optinfo_flags */
3292 TV_JUMP, /* tv_id */
3293 0, /* properties_required */
3294 0, /* properties_provided */
3295 0, /* properties_destroyed */
3296 0, /* todo_flags_start */
3297 0, /* todo_flags_finish */
3298};
3299
216779db 3300class pass_jump_after_combine : public rtl_opt_pass
98fe146e
IL
3301{
3302public:
216779db
IL
3303 pass_jump_after_combine (gcc::context *ctxt)
3304 : rtl_opt_pass (pass_data_jump_after_combine, ctxt)
98fe146e
IL
3305 {}
3306
3307 /* opt_pass methods: */
3308 virtual unsigned int execute (function *);
3309
216779db 3310}; // class pass_jump_after_combine
98fe146e
IL
3311
3312unsigned int
216779db 3313pass_jump_after_combine::execute (function *)
98fe146e
IL
3314{
3315 cleanup_cfg (flag_thread_jumps ? CLEANUP_THREADING : 0);
3316 return 0;
3317}
3318
3319} // anon namespace
3320
3321rtl_opt_pass *
216779db 3322make_pass_jump_after_combine (gcc::context *ctxt)
98fe146e 3323{
216779db 3324 return new pass_jump_after_combine (ctxt);
98fe146e
IL
3325}
3326
3327namespace {
3328
27a4cd48 3329const pass_data pass_data_jump2 =
ef330312 3330{
27a4cd48
DM
3331 RTL_PASS, /* type */
3332 "jump2", /* name */
3333 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3334 TV_JUMP, /* tv_id */
3335 0, /* properties_required */
3336 0, /* properties_provided */
3337 0, /* properties_destroyed */
3338 0, /* todo_flags_start */
3bea341f 3339 0, /* todo_flags_finish */
ef330312 3340};
27a4cd48
DM
3341
3342class pass_jump2 : public rtl_opt_pass
3343{
3344public:
c3284718
RS
3345 pass_jump2 (gcc::context *ctxt)
3346 : rtl_opt_pass (pass_data_jump2, ctxt)
27a4cd48
DM
3347 {}
3348
3349 /* opt_pass methods: */
be55bfe6
TS
3350 virtual unsigned int execute (function *)
3351 {
3352 cleanup_cfg (flag_crossjumping ? CLEANUP_CROSSJUMP : 0);
3353 return 0;
3354 }
27a4cd48
DM
3355
3356}; // class pass_jump2
3357
3358} // anon namespace
3359
3360rtl_opt_pass *
3361make_pass_jump2 (gcc::context *ctxt)
3362{
3363 return new pass_jump2 (ctxt);
3364}