]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgrtl.c
Merge dataflow branch into mainline
[thirdparty/gcc.git] / gcc / cfgrtl.c
CommitLineData
b36d64df 1/* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3072d30e 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
b36d64df 5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING. If not, write to the Free
67ce556b 20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA. */
b36d64df 22
5cc577b6 23/* This file contains low level functions to manipulate the CFG and analyze it
24 that are aware of the RTL intermediate language.
b36d64df 25
26 Available functionality:
c60fa3a7 27 - Basic CFG/RTL manipulation API documented in cfghooks.h
5cc577b6 28 - CFG-aware instruction chain manipulation
b36d64df 29 delete_insn, delete_insn_chain
c60fa3a7 30 - Edge splitting and committing to edges
31 insert_insn_on_edge, commit_edge_insertions
32 - CFG updating after insn simplification
33 purge_dead_edges, purge_all_dead_edges
34
35 Functions not supposed for generic use:
5cc577b6 36 - Infrastructure to determine quickly basic block for insn
b36d64df 37 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
5cc577b6 38 - Edge redirection with updating and optimizing of insn chain
c60fa3a7 39 block_label, tidy_fallthru_edge, force_nonfallthru */
b36d64df 40\f
41#include "config.h"
42#include "system.h"
805e22b2 43#include "coretypes.h"
44#include "tm.h"
b36d64df 45#include "tree.h"
46#include "rtl.h"
47#include "hard-reg-set.h"
48#include "basic-block.h"
49#include "regs.h"
50#include "flags.h"
51#include "output.h"
52#include "function.h"
53#include "except.h"
54#include "toplev.h"
55#include "tm_p.h"
56#include "obstack.h"
26fb1781 57#include "insn-config.h"
1026363d 58#include "cfglayout.h"
804e497b 59#include "expr.h"
065efcb1 60#include "target.h"
c50ae675 61#include "cfgloop.h"
e0dde8f8 62#include "ggc.h"
77fce4cd 63#include "tree-pass.h"
3072d30e 64#include "df.h"
b36d64df 65
4c9e08a4 66static int can_delete_note_p (rtx);
67static int can_delete_label_p (rtx);
ca462ec0 68static void commit_one_edge_insertion (edge);
4c9e08a4 69static basic_block rtl_split_edge (edge);
5f5d4cd1 70static bool rtl_move_block_after (basic_block, basic_block);
4c9e08a4 71static int rtl_verify_flow_info (void);
5f5d4cd1 72static basic_block cfg_layout_split_block (basic_block, void *);
4ee9c684 73static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
4c9e08a4 74static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
75static void cfg_layout_delete_block (basic_block);
76static void rtl_delete_block (basic_block);
77static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
4ee9c684 78static edge rtl_redirect_edge_and_branch (edge, basic_block);
5f5d4cd1 79static basic_block rtl_split_block (basic_block, void *);
80static void rtl_dump_bb (basic_block, FILE *, int);
4c9e08a4 81static int rtl_verify_flow_info_1 (void);
5f5d4cd1 82static void rtl_make_forwarder_block (edge);
b36d64df 83\f
84/* Return true if NOTE is not one of the ones that must be kept paired,
5cc577b6 85 so that we may simply delete it. */
b36d64df 86
87static int
4c9e08a4 88can_delete_note_p (rtx note)
b36d64df 89{
ad4583d9 90 return (NOTE_KIND (note) == NOTE_INSN_DELETED
91 || NOTE_KIND (note) == NOTE_INSN_BASIC_BLOCK);
b36d64df 92}
93
94/* True if a given label can be deleted. */
95
96static int
4c9e08a4 97can_delete_label_p (rtx label)
b36d64df 98{
5cc577b6 99 return (!LABEL_PRESERVE_P (label)
100 /* User declared labels must be preserved. */
101 && LABEL_NAME (label) == 0
2f138c1c 102 && !in_expr_list_p (forced_labels, label));
b36d64df 103}
104
105/* Delete INSN by patching it out. Return the next insn. */
106
107rtx
4c9e08a4 108delete_insn (rtx insn)
b36d64df 109{
110 rtx next = NEXT_INSN (insn);
111 rtx note;
112 bool really_delete = true;
113
6d7dc5b9 114 if (LABEL_P (insn))
b36d64df 115 {
116 /* Some labels can't be directly removed from the INSN chain, as they
a0c938f0 117 might be references via variables, constant pool etc.
118 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
b36d64df 119 if (! can_delete_label_p (insn))
120 {
121 const char *name = LABEL_NAME (insn);
122
123 really_delete = false;
124 PUT_CODE (insn, NOTE);
ad4583d9 125 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
7bd3dcc4 126 NOTE_DELETED_LABEL_NAME (insn) = name;
b36d64df 127 }
5cc577b6 128
b36d64df 129 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
130 }
131
132 if (really_delete)
133 {
e172357f 134 /* If this insn has already been deleted, something is very wrong. */
cc636d56 135 gcc_assert (!INSN_DELETED_P (insn));
b36d64df 136 remove_insn (insn);
137 INSN_DELETED_P (insn) = 1;
138 }
139
140 /* If deleting a jump, decrement the use count of the label. Deleting
141 the label itself should happen in the normal course of block merging. */
6d7dc5b9 142 if (JUMP_P (insn)
b36d64df 143 && JUMP_LABEL (insn)
6d7dc5b9 144 && LABEL_P (JUMP_LABEL (insn)))
b36d64df 145 LABEL_NUSES (JUMP_LABEL (insn))--;
146
147 /* Also if deleting an insn that references a label. */
242dafee 148 else
149 {
150 while ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
6d7dc5b9 151 && LABEL_P (XEXP (note, 0)))
242dafee 152 {
153 LABEL_NUSES (XEXP (note, 0))--;
154 remove_note (insn, note);
155 }
156 }
b36d64df 157
6d7dc5b9 158 if (JUMP_P (insn)
b36d64df 159 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
160 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
161 {
162 rtx pat = PATTERN (insn);
163 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
164 int len = XVECLEN (pat, diff_vec_p);
165 int i;
166
167 for (i = 0; i < len; i++)
fd0d7f8c 168 {
169 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
170
171 /* When deleting code in bulk (e.g. removing many unreachable
172 blocks) we can delete a label that's a target of the vector
173 before deleting the vector itself. */
6d7dc5b9 174 if (!NOTE_P (label))
fd0d7f8c 175 LABEL_NUSES (label)--;
176 }
b36d64df 177 }
178
179 return next;
180}
181
fb20d6fa 182/* Like delete_insn but also purge dead edges from BB. */
183rtx
4c9e08a4 184delete_insn_and_edges (rtx insn)
fb20d6fa 185{
186 rtx x;
187 bool purge = false;
188
ab87d1bc 189 if (INSN_P (insn)
fb20d6fa 190 && BLOCK_FOR_INSN (insn)
5496dbfc 191 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
fb20d6fa 192 purge = true;
193 x = delete_insn (insn);
194 if (purge)
195 purge_dead_edges (BLOCK_FOR_INSN (insn));
196 return x;
197}
198
b36d64df 199/* Unlink a chain of insns between START and FINISH, leaving notes
39257eb5 200 that must be paired. If CLEAR_BB is true, we set bb field for
201 insns that cannot be removed to NULL. */
b36d64df 202
203void
39257eb5 204delete_insn_chain (rtx start, rtx finish, bool clear_bb)
b36d64df 205{
b36d64df 206 rtx next;
207
5cc577b6 208 /* Unchain the insns one by one. It would be quicker to delete all of these
209 with a single unchaining, rather than one at a time, but we need to keep
210 the NOTE's. */
b36d64df 211 while (1)
212 {
213 next = NEXT_INSN (start);
6d7dc5b9 214 if (NOTE_P (start) && !can_delete_note_p (start))
b36d64df 215 ;
216 else
217 next = delete_insn (start);
218
39257eb5 219 if (clear_bb && !INSN_DELETED_P (start))
220 set_block_for_insn (start, NULL);
221
b36d64df 222 if (start == finish)
223 break;
224 start = next;
225 }
226}
fb20d6fa 227
228/* Like delete_insn but also purge dead edges from BB. */
229void
4c9e08a4 230delete_insn_chain_and_edges (rtx first, rtx last)
fb20d6fa 231{
232 bool purge = false;
233
ab87d1bc 234 if (INSN_P (last)
fb20d6fa 235 && BLOCK_FOR_INSN (last)
5496dbfc 236 && BB_END (BLOCK_FOR_INSN (last)) == last)
fb20d6fa 237 purge = true;
39257eb5 238 delete_insn_chain (first, last, false);
fb20d6fa 239 if (purge)
240 purge_dead_edges (BLOCK_FOR_INSN (last));
241}
b36d64df 242\f
5cc577b6 243/* Create a new basic block consisting of the instructions between HEAD and END
244 inclusive. This function is designed to allow fast BB construction - reuses
245 the note and basic block struct in BB_NOTE, if any and do not grow
246 BASIC_BLOCK chain and should be used directly only by CFG construction code.
247 END can be NULL in to create new empty basic block before HEAD. Both END
7fa55aef 248 and HEAD can be NULL to create basic block at the end of INSN chain.
249 AFTER is the basic block we should be put after. */
b36d64df 250
251basic_block
4c9e08a4 252create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
b36d64df 253{
254 basic_block bb;
255
256 if (bb_note
b36d64df 257 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
258 && bb->aux == NULL)
259 {
260 /* If we found an existing note, thread it back onto the chain. */
261
262 rtx after;
263
6d7dc5b9 264 if (LABEL_P (head))
b36d64df 265 after = head;
266 else
267 {
268 after = PREV_INSN (head);
269 head = bb_note;
270 }
271
272 if (after != bb_note && NEXT_INSN (after) != bb_note)
ab87d1bc 273 reorder_insns_nobb (bb_note, bb_note, after);
b36d64df 274 }
275 else
276 {
277 /* Otherwise we must create a note and a basic block structure. */
278
279 bb = alloc_block ();
280
e0dde8f8 281 init_rtl_bb_info (bb);
b36d64df 282 if (!head && !end)
5cc577b6 283 head = end = bb_note
284 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
6d7dc5b9 285 else if (LABEL_P (head) && end)
b36d64df 286 {
287 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
288 if (head == end)
289 end = bb_note;
290 }
291 else
292 {
293 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
294 head = bb_note;
295 if (!end)
296 end = head;
297 }
5cc577b6 298
b36d64df 299 NOTE_BASIC_BLOCK (bb_note) = bb;
300 }
301
302 /* Always include the bb note in the block. */
303 if (NEXT_INSN (end) == bb_note)
304 end = bb_note;
305
5496dbfc 306 BB_HEAD (bb) = head;
307 BB_END (bb) = end;
f23d9a22 308 bb->index = last_basic_block++;
e0dde8f8 309 bb->flags = BB_NEW | BB_RTL;
7fa55aef 310 link_block (bb, after);
85b938d0 311 SET_BASIC_BLOCK (bb->index, bb);
3072d30e 312 df_bb_refs_record (bb->index, false);
ab87d1bc 313 update_bb_for_insn (bb);
7562ed74 314 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
b36d64df 315
316 /* Tag the block so that we know it has been used when considering
317 other basic block notes. */
318 bb->aux = bb;
319
320 return bb;
321}
322
5cc577b6 323/* Create new basic block consisting of instructions in between HEAD and END
7fa55aef 324 and place it to the BB chain after block AFTER. END can be NULL in to
5cc577b6 325 create new empty basic block before HEAD. Both END and HEAD can be NULL to
326 create basic block at the end of INSN chain. */
b36d64df 327
c60fa3a7 328static basic_block
329rtl_create_basic_block (void *headp, void *endp, basic_block after)
b36d64df 330{
f780cc25 331 rtx head = (rtx) headp, end = (rtx) endp;
b36d64df 332 basic_block bb;
b3d6de89 333
743ff5bb 334 /* Grow the basic block array if needed. */
85b938d0 335 if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
743ff5bb 336 {
337 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
e85c2c2d 338 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
743ff5bb 339 }
b3d6de89 340
3c0a32c9 341 n_basic_blocks++;
b36d64df 342
f23d9a22 343 bb = create_basic_block_structure (head, end, NULL, after);
b36d64df 344 bb->aux = NULL;
345 return bb;
346}
c60fa3a7 347
348static basic_block
349cfg_layout_create_basic_block (void *head, void *end, basic_block after)
350{
351 basic_block newbb = rtl_create_basic_block (head, end, after);
352
c60fa3a7 353 return newbb;
354}
b36d64df 355\f
356/* Delete the insns in a (non-live) block. We physically delete every
357 non-deleted-note insn, and update the flow graph appropriately.
358
359 Return nonzero if we deleted an exception handler. */
360
361/* ??? Preserving all such notes strikes me as wrong. It would be nice
362 to post-process the stream to remove empty blocks, loops, ranges, etc. */
363
5f704f9f 364static void
4c9e08a4 365rtl_delete_block (basic_block b)
b36d64df 366{
5d65a39c 367 rtx insn, end;
b36d64df 368
369 /* If the head of this block is a CODE_LABEL, then it might be the
57548910 370 label for an exception handler which can't be reached. We need
371 to remove the label from the exception_handler_label list. */
5496dbfc 372 insn = BB_HEAD (b);
6d7dc5b9 373 if (LABEL_P (insn))
b36d64df 374 maybe_remove_eh_handler (insn);
375
5d65a39c 376 end = get_last_bb_insn (b);
b36d64df 377
378 /* Selectively delete the entire chain. */
5496dbfc 379 BB_HEAD (b) = NULL;
39257eb5 380 delete_insn_chain (insn, end, true);
381
3072d30e 382
383 if (dump_file)
384 fprintf (dump_file, "deleting block %d\n", b->index);
385 df_bb_delete (b->index);
b36d64df 386}
387\f
f23d9a22 388/* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
b36d64df 389
390void
4c9e08a4 391compute_bb_for_insn (void)
b36d64df 392{
4c26117a 393 basic_block bb;
b36d64df 394
4c26117a 395 FOR_EACH_BB (bb)
b36d64df 396 {
5496dbfc 397 rtx end = BB_END (bb);
5cc577b6 398 rtx insn;
b36d64df 399
5496dbfc 400 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
b36d64df 401 {
ab87d1bc 402 BLOCK_FOR_INSN (insn) = bb;
b36d64df 403 if (insn == end)
404 break;
b36d64df 405 }
406 }
407}
408
409/* Release the basic_block_for_insn array. */
410
2a1990e9 411unsigned int
4c9e08a4 412free_bb_for_insn (void)
b36d64df 413{
ab87d1bc 414 rtx insn;
415 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6d7dc5b9 416 if (!BARRIER_P (insn))
ab87d1bc 417 BLOCK_FOR_INSN (insn) = NULL;
2a1990e9 418 return 0;
b36d64df 419}
420
77fce4cd 421struct tree_opt_pass pass_free_cfg =
422{
423 NULL, /* name */
424 NULL, /* gate */
425 free_bb_for_insn, /* execute */
426 NULL, /* sub */
427 NULL, /* next */
428 0, /* static_pass_number */
429 0, /* tv_id */
430 0, /* properties_required */
431 0, /* properties_provided */
432 PROP_cfg, /* properties_destroyed */
433 0, /* todo_flags_start */
434 0, /* todo_flags_finish */
435 0 /* letter */
436};
437
a0fee14a 438/* Return RTX to emit after when we want to emit code on the entry of function. */
439rtx
440entry_of_function (void)
441{
a0c938f0 442 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
4d2e5d52 443 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
a0fee14a 444}
445
aa02fa19 446/* Emit INSN at the entry point of the function, ensuring that it is only
447 executed once per function. */
448void
449emit_insn_at_entry (rtx insn)
450{
451 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
452 edge e = ei_safe_edge (ei);
1db61145 453 gcc_assert (e->flags & EDGE_FALLTHRU);
aa02fa19 454
455 insert_insn_on_edge (insn, e);
456 commit_edge_insertions ();
457}
458
b36d64df 459/* Update insns block within BB. */
460
461void
4c9e08a4 462update_bb_for_insn (basic_block bb)
b36d64df 463{
464 rtx insn;
465
5496dbfc 466 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
b36d64df 467 {
6d7dc5b9 468 if (!BARRIER_P (insn))
3072d30e 469 {
470 set_block_for_insn (insn, bb);
471 df_insn_change_bb (insn);
472 }
5496dbfc 473 if (insn == BB_END (bb))
b36d64df 474 break;
475 }
476}
b36d64df 477\f
3072d30e 478/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
479 note associated with the BLOCK. */
480
481static rtx
482first_insn_after_basic_block_note (basic_block block)
483{
484 rtx insn;
485
486 /* Get the first instruction in the block. */
487 insn = BB_HEAD (block);
488
489 if (insn == NULL_RTX)
490 return NULL_RTX;
491 if (LABEL_P (insn))
492 insn = NEXT_INSN (insn);
493 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
494
495 return NEXT_INSN (insn);
496}
497
5f5d4cd1 498/* Creates a new basic block just after basic block B by splitting
499 everything after specified instruction I. */
b36d64df 500
5f5d4cd1 501static basic_block
4c9e08a4 502rtl_split_block (basic_block bb, void *insnp)
b36d64df 503{
504 basic_block new_bb;
f780cc25 505 rtx insn = (rtx) insnp;
5f5d4cd1 506 edge e;
cd665a06 507 edge_iterator ei;
b36d64df 508
5f5d4cd1 509 if (!insn)
510 {
511 insn = first_insn_after_basic_block_note (bb);
512
513 if (insn)
514 insn = PREV_INSN (insn);
515 else
516 insn = get_last_insn ();
517 }
518
519 /* We probably should check type of the insn so that we do not create
520 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
521 bother. */
522 if (insn == BB_END (bb))
523 emit_note_after (NOTE_INSN_DELETED, insn);
b36d64df 524
525 /* Create the new basic block. */
5496dbfc 526 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
7562ed74 527 BB_COPY_PARTITION (new_bb, bb);
5496dbfc 528 BB_END (bb) = insn;
b36d64df 529
530 /* Redirect the outgoing edges. */
cd665a06 531 new_bb->succs = bb->succs;
532 bb->succs = NULL;
533 FOR_EACH_EDGE (e, ei, new_bb->succs)
b36d64df 534 e->src = new_bb;
535
3072d30e 536 /* The new block starts off being dirty. */
537 df_set_bb_dirty (bb);
5f5d4cd1 538 return new_bb;
c60fa3a7 539}
540
b36d64df 541/* Blocks A and B are to be merged into a single block A. The insns
c60fa3a7 542 are already contiguous. */
b36d64df 543
c60fa3a7 544static void
545rtl_merge_blocks (basic_block a, basic_block b)
b36d64df 546{
5496dbfc 547 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
b36d64df 548 rtx del_first = NULL_RTX, del_last = NULL_RTX;
549 int b_empty = 0;
550
3072d30e 551 if (dump_file)
552 fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);
553
b36d64df 554 /* If there was a CODE_LABEL beginning B, delete it. */
6d7dc5b9 555 if (LABEL_P (b_head))
b36d64df 556 {
7b07eae7 557 /* This might have been an EH label that no longer has incoming
558 EH edges. Update data structures to match. */
559 maybe_remove_eh_handler (b_head);
a0c938f0 560
b36d64df 561 /* Detect basic blocks with nothing but a label. This can happen
562 in particular at the end of a function. */
563 if (b_head == b_end)
564 b_empty = 1;
5cc577b6 565
b36d64df 566 del_first = del_last = b_head;
567 b_head = NEXT_INSN (b_head);
568 }
569
5cc577b6 570 /* Delete the basic block note and handle blocks containing just that
571 note. */
b36d64df 572 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
573 {
574 if (b_head == b_end)
575 b_empty = 1;
576 if (! del_last)
577 del_first = b_head;
5cc577b6 578
b36d64df 579 del_last = b_head;
580 b_head = NEXT_INSN (b_head);
581 }
582
583 /* If there was a jump out of A, delete it. */
6d7dc5b9 584 if (JUMP_P (a_end))
b36d64df 585 {
586 rtx prev;
587
588 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
6d7dc5b9 589 if (!NOTE_P (prev)
ad4583d9 590 || NOTE_INSN_BASIC_BLOCK_P (prev)
5496dbfc 591 || prev == BB_HEAD (a))
b36d64df 592 break;
593
594 del_first = a_end;
595
596#ifdef HAVE_cc0
597 /* If this was a conditional jump, we need to also delete
598 the insn that set cc0. */
599 if (only_sets_cc0_p (prev))
600 {
601 rtx tmp = prev;
5cc577b6 602
b36d64df 603 prev = prev_nonnote_insn (prev);
604 if (!prev)
5496dbfc 605 prev = BB_HEAD (a);
b36d64df 606 del_first = tmp;
607 }
608#endif
609
610 a_end = PREV_INSN (del_first);
611 }
6d7dc5b9 612 else if (BARRIER_P (NEXT_INSN (a_end)))
b36d64df 613 del_first = NEXT_INSN (a_end);
614
b36d64df 615 /* Delete everything marked above as well as crap that might be
616 hanging out between the two blocks. */
5f5d4cd1 617 BB_HEAD (b) = NULL;
39257eb5 618 delete_insn_chain (del_first, del_last, true);
b36d64df 619
620 /* Reassociate the insns of B with A. */
621 if (!b_empty)
622 {
ab87d1bc 623 rtx x;
5cc577b6 624
ab87d1bc 625 for (x = a_end; x != b_end; x = NEXT_INSN (x))
3072d30e 626 {
627 set_block_for_insn (x, a);
628 df_insn_change_bb (x);
629 }
5cc577b6 630
ab87d1bc 631 set_block_for_insn (b_end, a);
3072d30e 632 df_insn_change_bb (b_end);
5cc577b6 633
b36d64df 634 a_end = b_end;
635 }
5cc577b6 636
3072d30e 637 df_bb_delete (b->index);
5496dbfc 638 BB_END (a) = a_end;
b36d64df 639}
c60fa3a7 640
3072d30e 641
c60fa3a7 642/* Return true when block A and B can be merged. */
643static bool
644rtl_can_merge_blocks (basic_block a,basic_block b)
645{
4f18499c 646 /* If we are partitioning hot/cold basic blocks, we don't want to
647 mess up unconditional or indirect jumps that cross between hot
7562ed74 648 and cold sections.
649
1118aef7 650 Basic block partitioning may result in some jumps that appear to
a0c938f0 651 be optimizable (or blocks that appear to be mergeable), but which really
652 must be left untouched (they are required to make it safely across
653 partition boundaries). See the comments at the top of
1118aef7 654 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
7562ed74 655
1897b881 656 if (BB_PARTITION (a) != BB_PARTITION (b))
7562ed74 657 return false;
4f18499c 658
c60fa3a7 659 /* There must be exactly one edge in between the blocks. */
ea091dfd 660 return (single_succ_p (a)
661 && single_succ (a) == b
662 && single_pred_p (b)
cd665a06 663 && a != b
c60fa3a7 664 /* Must be simple edge. */
ea091dfd 665 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
c60fa3a7 666 && a->next_bb == b
667 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
668 /* If the jump insn has side effects,
669 we can't kill the edge. */
6d7dc5b9 670 && (!JUMP_P (BB_END (a))
18f811c9 671 || (reload_completed
5496dbfc 672 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
c60fa3a7 673}
b36d64df 674\f
5cc577b6 675/* Return the label in the head of basic block BLOCK. Create one if it doesn't
676 exist. */
b36d64df 677
678rtx
4c9e08a4 679block_label (basic_block block)
b36d64df 680{
681 if (block == EXIT_BLOCK_PTR)
682 return NULL_RTX;
5cc577b6 683
6d7dc5b9 684 if (!LABEL_P (BB_HEAD (block)))
b36d64df 685 {
5496dbfc 686 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
b36d64df 687 }
5cc577b6 688
5496dbfc 689 return BB_HEAD (block);
b36d64df 690}
691
692/* Attempt to perform edge redirection by replacing possibly complex jump
5cc577b6 693 instruction by unconditional jump or removing jump completely. This can
694 apply only if all edges now point to the same block. The parameters and
695 return values are equivalent to redirect_edge_and_branch. */
b36d64df 696
4ee9c684 697edge
c60fa3a7 698try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
b36d64df 699{
700 basic_block src = e->src;
5496dbfc 701 rtx insn = BB_END (src), kill_from;
b19beda9 702 rtx set;
b36d64df 703 int fallthru = 0;
4f18499c 704
705 /* If we are partitioning hot/cold basic blocks, we don't want to
706 mess up unconditional or indirect jumps that cross between hot
1118aef7 707 and cold sections.
708
709 Basic block partitioning may result in some jumps that appear to
a0c938f0 710 be optimizable (or blocks that appear to be mergeable), but which really
711 must be left untouched (they are required to make it safely across
712 partition boundaries). See the comments at the top of
1118aef7 713 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
a0c938f0 714
1897b881 715 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
716 || BB_PARTITION (src) != BB_PARTITION (target))
53f8294f 717 return NULL;
4f18499c 718
34f05bfa 719 /* We can replace or remove a complex jump only when we have exactly
720 two edges. Also, if we have exactly one outgoing edge, we can
721 redirect that. */
722 if (EDGE_COUNT (src->succs) >= 3
723 /* Verify that all targets will be TARGET. Specifically, the
724 edge that is not E must also go to TARGET. */
725 || (EDGE_COUNT (src->succs) == 2
726 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
727 return NULL;
5cc577b6 728
34f05bfa 729 if (!onlyjump_p (insn))
4ee9c684 730 return NULL;
e5562ab8 731 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
4ee9c684 732 return NULL;
b36d64df 733
734 /* Avoid removing branch with side effects. */
735 set = single_set (insn);
736 if (!set || side_effects_p (set))
4ee9c684 737 return NULL;
b36d64df 738
739 /* In case we zap a conditional jump, we'll need to kill
740 the cc0 setter too. */
741 kill_from = insn;
742#ifdef HAVE_cc0
3d5a3e76 743 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
744 && only_sets_cc0_p (PREV_INSN (insn)))
b36d64df 745 kill_from = PREV_INSN (insn);
746#endif
747
748 /* See if we can create the fallthru edge. */
c60fa3a7 749 if (in_cfglayout || can_fallthru (src, target))
b36d64df 750 {
450d042a 751 if (dump_file)
752 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
b36d64df 753 fallthru = 1;
754
4a82352a 755 /* Selectively unlink whole insn chain. */
c60fa3a7 756 if (in_cfglayout)
757 {
bc5f266a 758 rtx insn = src->il.rtl->footer;
c60fa3a7 759
39257eb5 760 delete_insn_chain (kill_from, BB_END (src), false);
c60fa3a7 761
762 /* Remove barriers but keep jumptables. */
763 while (insn)
764 {
6d7dc5b9 765 if (BARRIER_P (insn))
c60fa3a7 766 {
767 if (PREV_INSN (insn))
768 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
769 else
bc5f266a 770 src->il.rtl->footer = NEXT_INSN (insn);
c60fa3a7 771 if (NEXT_INSN (insn))
772 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
773 }
6d7dc5b9 774 if (LABEL_P (insn))
c60fa3a7 775 break;
776 insn = NEXT_INSN (insn);
777 }
778 }
779 else
39257eb5 780 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
781 false);
b36d64df 782 }
5cc577b6 783
b36d64df 784 /* If this already is simplejump, redirect it. */
785 else if (simplejump_p (insn))
786 {
787 if (e->dest == target)
4ee9c684 788 return NULL;
450d042a 789 if (dump_file)
790 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
b3d6de89 791 INSN_UID (insn), e->dest->index, target->index);
8963581a 792 if (!redirect_jump (insn, block_label (target), 0))
793 {
cc636d56 794 gcc_assert (target == EXIT_BLOCK_PTR);
795 return NULL;
8963581a 796 }
b36d64df 797 }
5cc577b6 798
8963581a 799 /* Cannot do anything for target exit block. */
800 else if (target == EXIT_BLOCK_PTR)
4ee9c684 801 return NULL;
8963581a 802
b36d64df 803 /* Or replace possibly complicated jump insn by simple jump insn. */
804 else
805 {
806 rtx target_label = block_label (target);
669f9131 807 rtx barrier, label, table;
b36d64df 808
0891f67c 809 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
5496dbfc 810 JUMP_LABEL (BB_END (src)) = target_label;
b36d64df 811 LABEL_NUSES (target_label)++;
450d042a 812 if (dump_file)
813 fprintf (dump_file, "Replacing insn %i by jump %i\n",
5496dbfc 814 INSN_UID (insn), INSN_UID (BB_END (src)));
b36d64df 815
1ed8ccdb 816
39257eb5 817 delete_insn_chain (kill_from, insn, false);
b36d64df 818
1ed8ccdb 819 /* Recognize a tablejump that we are converting to a
820 simple jump and remove its associated CODE_LABEL
821 and ADDR_VEC or ADDR_DIFF_VEC. */
e5562ab8 822 if (tablejump_p (insn, &label, &table))
39257eb5 823 delete_insn_chain (label, table, false);
669f9131 824
5496dbfc 825 barrier = next_nonnote_insn (BB_END (src));
6d7dc5b9 826 if (!barrier || !BARRIER_P (barrier))
5496dbfc 827 emit_barrier_after (BB_END (src));
1824678b 828 else
829 {
5496dbfc 830 if (barrier != NEXT_INSN (BB_END (src)))
1824678b 831 {
832 /* Move the jump before barrier so that the notes
833 which originally were or were created before jump table are
834 inside the basic block. */
5496dbfc 835 rtx new_insn = BB_END (src);
1824678b 836 rtx tmp;
837
5496dbfc 838 for (tmp = NEXT_INSN (BB_END (src)); tmp != barrier;
1824678b 839 tmp = NEXT_INSN (tmp))
3072d30e 840 {
841 set_block_for_insn (tmp, src);
842 df_insn_change_bb (tmp);
843 }
1824678b 844
845 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
846 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
847
848 NEXT_INSN (new_insn) = barrier;
849 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
850
851 PREV_INSN (new_insn) = PREV_INSN (barrier);
852 PREV_INSN (barrier) = new_insn;
853 }
854 }
b36d64df 855 }
856
857 /* Keep only one edge out and set proper flags. */
ea091dfd 858 if (!single_succ_p (src))
cd665a06 859 remove_edge (e);
ea091dfd 860 gcc_assert (single_succ_p (src));
cd665a06 861
ea091dfd 862 e = single_succ_edge (src);
b36d64df 863 if (fallthru)
864 e->flags = EDGE_FALLTHRU;
865 else
866 e->flags = 0;
5cc577b6 867
b36d64df 868 e->probability = REG_BR_PROB_BASE;
869 e->count = src->count;
870
b36d64df 871 if (e->dest != target)
872 redirect_edge_succ (e, target);
4ee9c684 873 return e;
b36d64df 874}
875
4ee9c684 876/* Redirect edge representing branch of (un)conditional jump or tablejump,
877 NULL on failure */
878static edge
c60fa3a7 879redirect_branch_edge (edge e, basic_block target)
b36d64df 880{
881 rtx tmp;
5496dbfc 882 rtx old_label = BB_HEAD (e->dest);
b36d64df 883 basic_block src = e->src;
5496dbfc 884 rtx insn = BB_END (src);
b36d64df 885
b36d64df 886 /* We can only redirect non-fallthru edges of jump insn. */
887 if (e->flags & EDGE_FALLTHRU)
4ee9c684 888 return NULL;
6d7dc5b9 889 else if (!JUMP_P (insn))
4ee9c684 890 return NULL;
b36d64df 891
892 /* Recognize a tablejump and adjust all matching cases. */
b19beda9 893 if (tablejump_p (insn, NULL, &tmp))
b36d64df 894 {
895 rtvec vec;
896 int j;
897 rtx new_label = block_label (target);
898
8963581a 899 if (target == EXIT_BLOCK_PTR)
4ee9c684 900 return NULL;
b36d64df 901 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
902 vec = XVEC (PATTERN (tmp), 0);
903 else
904 vec = XVEC (PATTERN (tmp), 1);
905
906 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
907 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
908 {
909 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
910 --LABEL_NUSES (old_label);
911 ++LABEL_NUSES (new_label);
912 }
913
2358393e 914 /* Handle casesi dispatch insns. */
b36d64df 915 if ((tmp = single_set (insn)) != NULL
916 && SET_DEST (tmp) == pc_rtx
917 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
918 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
919 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
920 {
514b43f8 921 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
b36d64df 922 new_label);
923 --LABEL_NUSES (old_label);
924 ++LABEL_NUSES (new_label);
925 }
926 }
927 else
928 {
929 /* ?? We may play the games with moving the named labels from
930 one basic block to the other in case only one computed_jump is
931 available. */
5cc577b6 932 if (computed_jump_p (insn)
933 /* A return instruction can't be redirected. */
934 || returnjump_p (insn))
4ee9c684 935 return NULL;
b36d64df 936
937 /* If the insn doesn't go where we think, we're confused. */
cc636d56 938 gcc_assert (JUMP_LABEL (insn) == old_label);
8963581a 939
940 /* If the substitution doesn't succeed, die. This can happen
941 if the back end emitted unrecognizable instructions or if
942 target is exit block on some arches. */
943 if (!redirect_jump (insn, block_label (target), 0))
944 {
cc636d56 945 gcc_assert (target == EXIT_BLOCK_PTR);
946 return NULL;
8963581a 947 }
b36d64df 948 }
949
450d042a 950 if (dump_file)
951 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
b3d6de89 952 e->src->index, e->dest->index, target->index);
5cc577b6 953
b36d64df 954 if (e->dest != target)
4ee9c684 955 e = redirect_edge_succ_nodup (e, target);
3072d30e 956
4ee9c684 957 return e;
c60fa3a7 958}
959
960/* Attempt to change code to redirect edge E to TARGET. Don't do that on
961 expense of adding new instructions or reordering basic blocks.
962
963 Function can be also called with edge destination equivalent to the TARGET.
964 Then it should try the simplifications and do nothing if none is possible.
965
4ee9c684 966 Return edge representing the branch if transformation succeeded. Return NULL
967 on failure.
968 We still return NULL in case E already destinated TARGET and we didn't
969 managed to simplify instruction stream. */
c60fa3a7 970
4ee9c684 971static edge
f82b06e0 972rtl_redirect_edge_and_branch (edge e, basic_block target)
c60fa3a7 973{
4ee9c684 974 edge ret;
adee86a0 975 basic_block src = e->src;
976
c60fa3a7 977 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4ee9c684 978 return NULL;
c60fa3a7 979
e5562ab8 980 if (e->dest == target)
4ee9c684 981 return e;
e5562ab8 982
4ee9c684 983 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
adee86a0 984 {
3072d30e 985 df_set_bb_dirty (src);
4ee9c684 986 return ret;
adee86a0 987 }
c60fa3a7 988
4ee9c684 989 ret = redirect_branch_edge (e, target);
990 if (!ret)
991 return NULL;
5cc577b6 992
3072d30e 993 df_set_bb_dirty (src);
4ee9c684 994 return ret;
b36d64df 995}
996
20dd417a 997/* Like force_nonfallthru below, but additionally performs redirection
b36d64df 998 Used by redirect_edge_and_branch_force. */
999
109c6afa 1000static basic_block
4c9e08a4 1001force_nonfallthru_and_redirect (edge e, basic_block target)
b36d64df 1002{
498d173d 1003 basic_block jump_block, new_bb = NULL, src = e->src;
b36d64df 1004 rtx note;
1005 edge new_edge;
498d173d 1006 int abnormal_edge_flags = 0;
b36d64df 1007
accd6c44 1008 /* In the case the last instruction is conditional jump to the next
1009 instruction, first redirect the jump itself and then continue
df07c3ae 1010 by creating a basic block afterwards to redirect fallthru edge. */
accd6c44 1011 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
5496dbfc 1012 && any_condjump_p (BB_END (e->src))
5496dbfc 1013 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
accd6c44 1014 {
1015 rtx note;
d9aa52be 1016 edge b = unchecked_make_edge (e->src, target, 0);
cc636d56 1017 bool redirected;
accd6c44 1018
cc636d56 1019 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1020 gcc_assert (redirected);
a0c938f0 1021
5496dbfc 1022 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
accd6c44 1023 if (note)
1024 {
1025 int prob = INTVAL (XEXP (note, 0));
1026
1027 b->probability = prob;
1028 b->count = e->count * prob / REG_BR_PROB_BASE;
1029 e->probability -= e->probability;
1030 e->count -= b->count;
1031 if (e->probability < 0)
1032 e->probability = 0;
1033 if (e->count < 0)
1034 e->count = 0;
1035 }
1036 }
1037
b36d64df 1038 if (e->flags & EDGE_ABNORMAL)
498d173d 1039 {
1040 /* Irritating special case - fallthru edge to the same block as abnormal
1041 edge.
1042 We can't redirect abnormal edge, but we still can split the fallthru
4c9e08a4 1043 one and create separate abnormal edge to original destination.
498d173d 1044 This allows bb-reorder to make such edge non-fallthru. */
cc636d56 1045 gcc_assert (e->dest == target);
498d173d 1046 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1047 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1048 }
cc636d56 1049 else
76cc9616 1050 {
cc636d56 1051 gcc_assert (e->flags & EDGE_FALLTHRU);
1052 if (e->src == ENTRY_BLOCK_PTR)
1053 {
1054 /* We can't redirect the entry block. Create an empty block
cd665a06 1055 at the start of the function which we use to add the new
1056 jump. */
1057 edge tmp;
1058 edge_iterator ei;
1059 bool found = false;
a0c938f0 1060
cd665a06 1061 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
a0c938f0 1062
cc636d56 1063 /* Change the existing edge's source to be the new block, and add
1064 a new edge from the entry block to the new block. */
1065 e->src = bb;
cd665a06 1066 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1067 {
1068 if (tmp == e)
1069 {
55b71d24 1070 VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
cd665a06 1071 found = true;
1072 break;
1073 }
1074 else
1075 ei_next (&ei);
1076 }
a0c938f0 1077
cd665a06 1078 gcc_assert (found);
a0c938f0 1079
046bfc77 1080 VEC_safe_push (edge, gc, bb->succs, e);
cc636d56 1081 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1082 }
76cc9616 1083 }
1084
cd665a06 1085 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags)
b36d64df 1086 {
1087 /* Create the new structures. */
3160014e 1088
7a1fee99 1089 /* If the old block ended with a tablejump, skip its table
1090 by searching forward from there. Otherwise start searching
1091 forward from the last instruction of the old block. */
5496dbfc 1092 if (!tablejump_p (BB_END (e->src), NULL, &note))
1093 note = BB_END (e->src);
3160014e 1094 note = NEXT_INSN (note);
1095
3160014e 1096 jump_block = create_basic_block (note, NULL, e->src);
b36d64df 1097 jump_block->count = e->count;
1098 jump_block->frequency = EDGE_FREQUENCY (e);
1099 jump_block->loop_depth = target->loop_depth;
1100
4f18499c 1101 /* Make sure new block ends up in correct hot/cold section. */
1102
7562ed74 1103 BB_COPY_PARTITION (jump_block, e->src);
065efcb1 1104 if (flag_reorder_blocks_and_partition
1897b881 1105 && targetm.have_named_sections
1106 && JUMP_P (BB_END (jump_block))
1107 && !any_condjump_p (BB_END (jump_block))
1108 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1109 REG_NOTES (BB_END (jump_block)) = gen_rtx_EXPR_LIST (REG_CROSSING_JUMP,
1110 NULL_RTX,
1111 REG_NOTES
1112 (BB_END
a0c938f0 1113 (jump_block)));
1114
b36d64df 1115 /* Wire edge in. */
1116 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1117 new_edge->probability = e->probability;
1118 new_edge->count = e->count;
1119
1120 /* Redirect old edge. */
1121 redirect_edge_pred (e, jump_block);
1122 e->probability = REG_BR_PROB_BASE;
1123
1124 new_bb = jump_block;
1125 }
1126 else
1127 jump_block = e->src;
5cc577b6 1128
b36d64df 1129 e->flags &= ~EDGE_FALLTHRU;
1130 if (target == EXIT_BLOCK_PTR)
1131 {
38c95e2f 1132#ifdef HAVE_return
0891f67c 1133 emit_jump_insn_after_noloc (gen_return (), BB_END (jump_block));
38c95e2f 1134#else
cc636d56 1135 gcc_unreachable ();
38c95e2f 1136#endif
b36d64df 1137 }
1138 else
1139 {
1140 rtx label = block_label (target);
0891f67c 1141 emit_jump_insn_after_noloc (gen_jump (label), BB_END (jump_block));
5496dbfc 1142 JUMP_LABEL (BB_END (jump_block)) = label;
b36d64df 1143 LABEL_NUSES (label)++;
1144 }
5cc577b6 1145
5496dbfc 1146 emit_barrier_after (BB_END (jump_block));
b36d64df 1147 redirect_edge_succ_nodup (e, target);
1148
498d173d 1149 if (abnormal_edge_flags)
1150 make_edge (src, target, abnormal_edge_flags);
1151
3072d30e 1152 df_mark_solutions_dirty ();
b36d64df 1153 return new_bb;
1154}
1155
1156/* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1157 (and possibly create new basic block) to make edge non-fallthru.
1158 Return newly created BB or NULL if none. */
5cc577b6 1159
b36d64df 1160basic_block
4c9e08a4 1161force_nonfallthru (edge e)
b36d64df 1162{
1163 return force_nonfallthru_and_redirect (e, e->dest);
1164}
1165
1166/* Redirect edge even at the expense of creating new jump insn or
1167 basic block. Return new basic block if created, NULL otherwise.
a53ff4c1 1168 Conversion must be possible. */
b36d64df 1169
1026363d 1170static basic_block
4c9e08a4 1171rtl_redirect_edge_and_branch_force (edge e, basic_block target)
b36d64df 1172{
5cc577b6 1173 if (redirect_edge_and_branch (e, target)
1174 || e->dest == target)
b36d64df 1175 return NULL;
1176
1177 /* In case the edge redirection failed, try to force it to be non-fallthru
1178 and redirect newly created simplejump. */
3072d30e 1179 df_set_bb_dirty (e->src);
5cc577b6 1180 return force_nonfallthru_and_redirect (e, target);
b36d64df 1181}
1182
1183/* The given edge should potentially be a fallthru edge. If that is in
1184 fact true, delete the jump and barriers that are in the way. */
1185
5f5d4cd1 1186static void
1187rtl_tidy_fallthru_edge (edge e)
b36d64df 1188{
1189 rtx q;
5f5d4cd1 1190 basic_block b = e->src, c = b->next_bb;
1191
b36d64df 1192 /* ??? In a late-running flow pass, other folks may have deleted basic
1193 blocks by nopping out blocks, leaving multiple BARRIERs between here
442e3cb9 1194 and the target label. They ought to be chastised and fixed.
b36d64df 1195
1196 We can also wind up with a sequence of undeletable labels between
1197 one block and the next.
1198
1199 So search through a sequence of barriers, labels, and notes for
1200 the head of block C and assert that we really do fall through. */
1201
5496dbfc 1202 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
ac25a327 1203 if (INSN_P (q))
1204 return;
b36d64df 1205
1206 /* Remove what will soon cease being the jump insn from the source block.
1207 If block B consisted only of this single jump, turn it into a deleted
1208 note. */
5496dbfc 1209 q = BB_END (b);
6d7dc5b9 1210 if (JUMP_P (q)
b36d64df 1211 && onlyjump_p (q)
1212 && (any_uncondjump_p (q)
ea091dfd 1213 || single_succ_p (b)))
b36d64df 1214 {
1215#ifdef HAVE_cc0
1216 /* If this was a conditional jump, we need to also delete
1217 the insn that set cc0. */
1218 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1219 q = PREV_INSN (q);
1220#endif
1221
1222 q = PREV_INSN (q);
b36d64df 1223 }
1224
1225 /* Selectively unlink the sequence. */
5496dbfc 1226 if (q != PREV_INSN (BB_HEAD (c)))
39257eb5 1227 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
b36d64df 1228
1229 e->flags |= EDGE_FALLTHRU;
1230}
b36d64df 1231\f
5f5d4cd1 1232/* Should move basic block BB after basic block AFTER. NIY. */
1233
1234static bool
1235rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1236 basic_block after ATTRIBUTE_UNUSED)
1237{
1238 return false;
1239}
1240
b36d64df 1241/* Split a (typically critical) edge. Return the new block.
a53ff4c1 1242 The edge must not be abnormal.
b36d64df 1243
1244 ??? The code generally expects to be called on critical edges.
1245 The case of a block ending in an unconditional jump to a
1246 block with multiple predecessors is not handled optimally. */
1247
9140e457 1248static basic_block
4c9e08a4 1249rtl_split_edge (edge edge_in)
b36d64df 1250{
1251 basic_block bb;
b36d64df 1252 rtx before;
1253
1254 /* Abnormal edges cannot be split. */
cc636d56 1255 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
b36d64df 1256
1257 /* We are going to place the new block in front of edge destination.
4a82352a 1258 Avoid existence of fallthru predecessors. */
b36d64df 1259 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1260 {
1261 edge e;
cd665a06 1262 edge_iterator ei;
5cc577b6 1263
cd665a06 1264 FOR_EACH_EDGE (e, ei, edge_in->dest->preds)
b36d64df 1265 if (e->flags & EDGE_FALLTHRU)
1266 break;
1267
1268 if (e)
1269 force_nonfallthru (e);
1270 }
1271
588aadb7 1272 /* Create the basic block note. */
1273 if (edge_in->dest != EXIT_BLOCK_PTR)
5496dbfc 1274 before = BB_HEAD (edge_in->dest);
b36d64df 1275 else
1276 before = NULL_RTX;
1277
9bb8a4af 1278 /* If this is a fall through edge to the exit block, the blocks might be
1279 not adjacent, and the right place is the after the source. */
1280 if (edge_in->flags & EDGE_FALLTHRU && edge_in->dest == EXIT_BLOCK_PTR)
1281 {
1282 before = NEXT_INSN (BB_END (edge_in->src));
9bb8a4af 1283 bb = create_basic_block (before, NULL, edge_in->src);
7562ed74 1284 BB_COPY_PARTITION (bb, edge_in->src);
9bb8a4af 1285 }
1286 else
065efcb1 1287 {
1288 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
7562ed74 1289 /* ??? Why not edge_in->dest->prev_bb here? */
1290 BB_COPY_PARTITION (bb, edge_in->dest);
065efcb1 1291 }
b36d64df 1292
805e22b2 1293 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
b36d64df 1294
917bbcab 1295 /* For non-fallthru edges, we must adjust the predecessor's
b36d64df 1296 jump instruction to target our new block. */
1297 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1298 {
cc636d56 1299 edge redirected = redirect_edge_and_branch (edge_in, bb);
1300 gcc_assert (redirected);
b36d64df 1301 }
1302 else
1303 redirect_edge_succ (edge_in, bb);
1304
1305 return bb;
1306}
1307
1308/* Queue instructions for insertion on an edge between two basic blocks.
1309 The new instructions and basic blocks (if any) will not appear in the
1310 CFG until commit_edge_insertions is called. */
1311
1312void
4c9e08a4 1313insert_insn_on_edge (rtx pattern, edge e)
b36d64df 1314{
1315 /* We cannot insert instructions on an abnormal critical edge.
1316 It will be easier to find the culprit if we die now. */
cc636d56 1317 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
b36d64df 1318
4ee9c684 1319 if (e->insns.r == NULL_RTX)
b36d64df 1320 start_sequence ();
1321 else
4ee9c684 1322 push_to_sequence (e->insns.r);
b36d64df 1323
1324 emit_insn (pattern);
1325
4ee9c684 1326 e->insns.r = get_insns ();
b36d64df 1327 end_sequence ();
1328}
1329
1330/* Update the CFG for the instructions queued on edge E. */
1331
1332static void
ca462ec0 1333commit_one_edge_insertion (edge e)
b36d64df 1334{
1335 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
f1593de0 1336 basic_block bb = NULL;
b36d64df 1337
1338 /* Pull the insns off the edge now since the edge might go away. */
4ee9c684 1339 insns = e->insns.r;
1340 e->insns.r = NULL_RTX;
b36d64df 1341
fb20d6fa 1342 if (!before && !after)
b36d64df 1343 {
fb20d6fa 1344 /* Figure out where to put these things. If the destination has
a0c938f0 1345 one predecessor, insert there. Except for the exit block. */
ea091dfd 1346 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
b36d64df 1347 {
fb20d6fa 1348 bb = e->dest;
1349
1350 /* Get the location correct wrt a code label, and "nice" wrt
1351 a basic block note, and before everything else. */
5496dbfc 1352 tmp = BB_HEAD (bb);
6d7dc5b9 1353 if (LABEL_P (tmp))
fb20d6fa 1354 tmp = NEXT_INSN (tmp);
1355 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1356 tmp = NEXT_INSN (tmp);
5496dbfc 1357 if (tmp == BB_HEAD (bb))
fb20d6fa 1358 before = tmp;
1359 else if (tmp)
1360 after = PREV_INSN (tmp);
1361 else
1362 after = get_last_insn ();
1363 }
1364
1365 /* If the source has one successor and the edge is not abnormal,
a0c938f0 1366 insert there. Except for the entry block. */
fb20d6fa 1367 else if ((e->flags & EDGE_ABNORMAL) == 0
ea091dfd 1368 && single_succ_p (e->src)
fb20d6fa 1369 && e->src != ENTRY_BLOCK_PTR)
1370 {
1371 bb = e->src;
1372
1373 /* It is possible to have a non-simple jump here. Consider a target
1374 where some forms of unconditional jumps clobber a register. This
1375 happens on the fr30 for example.
1376
1377 We know this block has a single successor, so we can just emit
1378 the queued insns before the jump. */
6d7dc5b9 1379 if (JUMP_P (BB_END (bb)))
588aadb7 1380 before = BB_END (bb);
fb20d6fa 1381 else
1382 {
cc636d56 1383 /* We'd better be fallthru, or we've lost track of
1384 what's what. */
1385 gcc_assert (e->flags & EDGE_FALLTHRU);
b36d64df 1386
5496dbfc 1387 after = BB_END (bb);
fb20d6fa 1388 }
1389 }
1390 /* Otherwise we must split the edge. */
1391 else
1392 {
1393 bb = split_edge (e);
5496dbfc 1394 after = BB_END (bb);
4f18499c 1395
4f18499c 1396 if (flag_reorder_blocks_and_partition
065efcb1 1397 && targetm.have_named_sections
4f18499c 1398 && e->src != ENTRY_BLOCK_PTR
7562ed74 1399 && BB_PARTITION (e->src) == BB_COLD_PARTITION
9858d888 1400 && !(e->flags & EDGE_CROSSING))
4f18499c 1401 {
1897b881 1402 rtx bb_note, cur_insn;
4f18499c 1403
1404 bb_note = NULL_RTX;
1405 for (cur_insn = BB_HEAD (bb); cur_insn != NEXT_INSN (BB_END (bb));
1406 cur_insn = NEXT_INSN (cur_insn))
ad4583d9 1407 if (NOTE_INSN_BASIC_BLOCK_P (cur_insn))
4f18499c 1408 {
1409 bb_note = cur_insn;
1410 break;
1411 }
1412
6d7dc5b9 1413 if (JUMP_P (BB_END (bb))
4f18499c 1414 && !any_condjump_p (BB_END (bb))
a0c938f0 1415 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1416 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
4f18499c 1417 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
4f18499c 1418 }
b36d64df 1419 }
1420 }
1421
b36d64df 1422 /* Now that we've found the spot, do the insertion. */
1423
1424 if (before)
1425 {
3072d30e 1426 emit_insn_before_noloc (insns, before, bb);
b36d64df 1427 last = prev_nonnote_insn (before);
1428 }
1429 else
3072d30e 1430 last = emit_insn_after_noloc (insns, after, bb);
b36d64df 1431
1432 if (returnjump_p (last))
1433 {
1434 /* ??? Remove all outgoing edges from BB and add one for EXIT.
a0c938f0 1435 This is not currently a problem because this only happens
1436 for the (single) epilogue, which already has a fallthru edge
1437 to EXIT. */
b36d64df 1438
ea091dfd 1439 e = single_succ_edge (bb);
cc636d56 1440 gcc_assert (e->dest == EXIT_BLOCK_PTR
ea091dfd 1441 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
b36d64df 1442
5cc577b6 1443 e->flags &= ~EDGE_FALLTHRU;
b36d64df 1444 emit_barrier_after (last);
b3d6de89 1445
b36d64df 1446 if (before)
1447 delete_insn (before);
1448 }
cc636d56 1449 else
1450 gcc_assert (!JUMP_P (last));
5cc577b6 1451
794d8e3f 1452 /* Mark the basic block for find_many_sub_basic_blocks. */
eea7b156 1453 if (current_ir_type () != IR_RTL_CFGLAYOUT)
1454 bb->aux = &bb->aux;
b36d64df 1455}
1456
1457/* Update the CFG for all queued instructions. */
1458
1459void
4c9e08a4 1460commit_edge_insertions (void)
b36d64df 1461{
b36d64df 1462 basic_block bb;
43585301 1463 sbitmap blocks;
c96040c6 1464 bool changed = false;
b36d64df 1465
1466#ifdef ENABLE_CHECKING
1467 verify_flow_info ();
1468#endif
1469
4c26117a 1470 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
b36d64df 1471 {
cd665a06 1472 edge e;
1473 edge_iterator ei;
b36d64df 1474
cd665a06 1475 FOR_EACH_EDGE (e, ei, bb->succs)
1476 if (e->insns.r)
1477 {
1478 changed = true;
ca462ec0 1479 commit_one_edge_insertion (e);
cd665a06 1480 }
fb20d6fa 1481 }
43585301 1482
c96040c6 1483 if (!changed)
1484 return;
1485
eea7b156 1486 /* In the old rtl CFG API, it was OK to insert control flow on an
1487 edge, apparently? In cfglayout mode, this will *not* work, and
1488 the caller is responsible for making sure that control flow is
1489 valid at all times. */
1490 if (current_ir_type () == IR_RTL_CFGLAYOUT)
1491 return;
1492
c96040c6 1493 blocks = sbitmap_alloc (last_basic_block);
1494 sbitmap_zero (blocks);
1495 FOR_EACH_BB (bb)
1496 if (bb->aux)
1497 {
a0c938f0 1498 SET_BIT (blocks, bb->index);
c96040c6 1499 /* Check for forgotten bb->aux values before commit_edge_insertions
1500 call. */
cc636d56 1501 gcc_assert (bb->aux == &bb->aux);
c96040c6 1502 bb->aux = NULL;
1503 }
1504 find_many_sub_basic_blocks (blocks);
1505 sbitmap_free (blocks);
b36d64df 1506}
1507\f
3072d30e 1508
5f5d4cd1 1509/* Print out RTL-specific basic block information (live information
1510 at start and end). */
b36d64df 1511
028f8cc7 1512static void
5f5d4cd1 1513rtl_dump_bb (basic_block bb, FILE *outf, int indent)
b36d64df 1514{
1515 rtx insn;
1516 rtx last;
5f5d4cd1 1517 char *s_indent;
b36d64df 1518
f780cc25 1519 s_indent = (char *) alloca ((size_t) indent + 1);
4acc30e5 1520 memset (s_indent, ' ', (size_t) indent);
5f5d4cd1 1521 s_indent[indent] = '\0';
3072d30e 1522
1523 if (df)
1524 {
1525 df_dump_top (bb, outf);
1526 putc ('\n', outf);
1527 }
b36d64df 1528
5496dbfc 1529 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
b36d64df 1530 insn = NEXT_INSN (insn))
1531 print_rtl_single (outf, insn);
1532
3072d30e 1533 if (df)
1534 {
1535 df_dump_bottom (bb, outf);
1536 putc ('\n', outf);
1537 }
1538
b36d64df 1539}
1540\f
1541/* Like print_rtl, but also print out live information for the start of each
1542 basic block. */
1543
1544void
4c9e08a4 1545print_rtl_with_bb (FILE *outf, rtx rtx_first)
b36d64df 1546{
19cb6b50 1547 rtx tmp_rtx;
b36d64df 1548 if (rtx_first == 0)
1549 fprintf (outf, "(nil)\n");
1550 else
1551 {
b36d64df 1552 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1553 int max_uid = get_max_uid ();
4c36ffe6 1554 basic_block *start = XCNEWVEC (basic_block, max_uid);
1555 basic_block *end = XCNEWVEC (basic_block, max_uid);
1556 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
b36d64df 1557
4c26117a 1558 basic_block bb;
1559
3072d30e 1560 if (df)
1561 df_dump_start (outf);
1562
4c26117a 1563 FOR_EACH_BB_REVERSE (bb)
b36d64df 1564 {
b36d64df 1565 rtx x;
1566
5496dbfc 1567 start[INSN_UID (BB_HEAD (bb))] = bb;
1568 end[INSN_UID (BB_END (bb))] = bb;
1569 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
b36d64df 1570 {
1571 enum bb_state state = IN_MULTIPLE_BB;
5cc577b6 1572
b36d64df 1573 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1574 state = IN_ONE_BB;
1575 in_bb_p[INSN_UID (x)] = state;
1576
5496dbfc 1577 if (x == BB_END (bb))
b36d64df 1578 break;
1579 }
1580 }
1581
1582 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1583 {
1584 int did_output;
b36d64df 1585 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1586 {
3072d30e 1587 edge e;
1588 edge_iterator ei;
1589
1590 fprintf (outf, ";; Start of basic block (");
1591 FOR_EACH_EDGE (e, ei, bb->preds)
1592 fprintf (outf, " %d", e->src->index);
1593 fprintf (outf, ") -> %d\n", bb->index);
1594
1595 if (df)
1596 {
1597 df_dump_top (bb, outf);
1598 putc ('\n', outf);
1599 }
3471582e 1600 FOR_EACH_EDGE (e, ei, bb->preds)
1601 {
1602 fputs (";; Pred edge ", outf);
1603 dump_edge_info (outf, e, 0);
1604 fputc ('\n', outf);
1605 }
b36d64df 1606 }
1607
1608 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
6d7dc5b9 1609 && !NOTE_P (tmp_rtx)
1610 && !BARRIER_P (tmp_rtx))
b36d64df 1611 fprintf (outf, ";; Insn is not within a basic block\n");
1612 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1613 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1614
1615 did_output = print_rtl_single (outf, tmp_rtx);
1616
1617 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1618 {
3072d30e 1619 edge e;
1620 edge_iterator ei;
1621
1622 fprintf (outf, ";; End of basic block %d -> (", bb->index);
1623 FOR_EACH_EDGE (e, ei, bb->succs)
1624 fprintf (outf, " %d", e->dest->index);
1625 fprintf (outf, ")\n");
1626
1627 if (df)
1628 {
1629 df_dump_bottom (bb, outf);
1630 putc ('\n', outf);
1631 }
b36d64df 1632 putc ('\n', outf);
3471582e 1633 FOR_EACH_EDGE (e, ei, bb->succs)
1634 {
1635 fputs (";; Succ edge ", outf);
1636 dump_edge_info (outf, e, 1);
1637 fputc ('\n', outf);
1638 }
b36d64df 1639 }
b36d64df 1640 if (did_output)
1641 putc ('\n', outf);
1642 }
1643
1644 free (start);
1645 free (end);
1646 free (in_bb_p);
1647 }
1648
1649 if (current_function_epilogue_delay_list != 0)
1650 {
1651 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1652 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1653 tmp_rtx = XEXP (tmp_rtx, 1))
1654 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1655 }
1656}
1657\f
f884e43f 1658void
4c9e08a4 1659update_br_prob_note (basic_block bb)
f884e43f 1660{
1661 rtx note;
6d7dc5b9 1662 if (!JUMP_P (BB_END (bb)))
f884e43f 1663 return;
5496dbfc 1664 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
f884e43f 1665 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1666 return;
1667 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1668}
5d65a39c 1669
1670/* Get the last insn associated with block BB (that includes barriers and
1671 tablejumps after BB). */
1672rtx
1673get_last_bb_insn (basic_block bb)
1674{
1675 rtx tmp;
1676 rtx end = BB_END (bb);
1677
1678 /* Include any jump table following the basic block. */
1679 if (tablejump_p (end, NULL, &tmp))
1680 end = tmp;
1681
1682 /* Include any barriers that may follow the basic block. */
1683 tmp = next_nonnote_insn (end);
1684 while (tmp && BARRIER_P (tmp))
1685 {
1686 end = tmp;
1687 tmp = next_nonnote_insn (end);
1688 }
1689
1690 return end;
1691}
f884e43f 1692\f
028f8cc7 1693/* Verify the CFG and RTL consistency common for both underlying RTL and
1694 cfglayout RTL.
b36d64df 1695
1696 Currently it does following checks:
1697
b36d64df 1698 - overlapping of basic blocks
fcde5c9e 1699 - insns with wrong BLOCK_FOR_INSN pointers
b36d64df 1700 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
dd5b4b36 1701 - tails of basic blocks (ensure that boundary is necessary)
b36d64df 1702 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1703 and NOTE_INSN_BASIC_BLOCK
4f18499c 1704 - verify that no fall_thru edge crosses hot/cold partition boundaries
fcde5c9e 1705 - verify that there are no pending RTL branch predictions
b36d64df 1706
1707 In future it can be extended check a lot of other stuff as well
1708 (reachability of basic blocks, life information, etc. etc.). */
5f5d4cd1 1709
028f8cc7 1710static int
4c9e08a4 1711rtl_verify_flow_info_1 (void)
b36d64df 1712{
b36d64df 1713 rtx x;
028f8cc7 1714 int err = 0;
d207f783 1715 basic_block bb;
b36d64df 1716
fcde5c9e 1717 /* Check the general integrity of the basic blocks. */
4c26117a 1718 FOR_EACH_BB_REVERSE (bb)
b36d64df 1719 {
fcde5c9e 1720 rtx insn;
5cc577b6 1721
e0dde8f8 1722 if (!(bb->flags & BB_RTL))
1723 {
1724 error ("BB_RTL flag not set for block %d", bb->index);
1725 err = 1;
1726 }
1727
fcde5c9e 1728 FOR_BB_INSNS (bb, insn)
1729 if (BLOCK_FOR_INSN (insn) != bb)
1730 {
1731 error ("insn %d basic block pointer is %d, should be %d",
1732 INSN_UID (insn),
1733 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
1734 bb->index);
1735 err = 1;
1736 }
39257eb5 1737
1738 for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
1739 if (!BARRIER_P (insn)
1740 && BLOCK_FOR_INSN (insn) != NULL)
1741 {
1742 error ("insn %d in header of bb %d has non-NULL basic block",
1743 INSN_UID (insn), bb->index);
1744 err = 1;
1745 }
1746 for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
1747 if (!BARRIER_P (insn)
1748 && BLOCK_FOR_INSN (insn) != NULL)
1749 {
1750 error ("insn %d in footer of bb %d has non-NULL basic block",
1751 INSN_UID (insn), bb->index);
1752 err = 1;
1753 }
b36d64df 1754 }
1755
1756 /* Now check the basic blocks (boundaries etc.) */
4c26117a 1757 FOR_EACH_BB_REVERSE (bb)
b36d64df 1758 {
fb20d6fa 1759 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
e6261ab0 1760 edge e, fallthru = NULL;
a2e42321 1761 rtx note;
cd665a06 1762 edge_iterator ei;
b36d64df 1763
e6d1500f 1764 if (JUMP_P (BB_END (bb))
5496dbfc 1765 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
cd665a06 1766 && EDGE_COUNT (bb->succs) >= 2
5496dbfc 1767 && any_condjump_p (BB_END (bb)))
a2e42321 1768 {
d8c70625 1769 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
1770 && profile_status != PROFILE_ABSENT)
a2e42321 1771 {
d9f38790 1772 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
a2e42321 1773 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1774 err = 1;
1775 }
1776 }
cd665a06 1777 FOR_EACH_EDGE (e, ei, bb->succs)
b36d64df 1778 {
b36d64df 1779 if (e->flags & EDGE_FALLTHRU)
4f18499c 1780 {
1781 n_fallthru++, fallthru = e;
9858d888 1782 if ((e->flags & EDGE_CROSSING)
7562ed74 1783 || (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
065efcb1 1784 && e->src != ENTRY_BLOCK_PTR
1785 && e->dest != EXIT_BLOCK_PTR))
a0c938f0 1786 {
0a81f5a0 1787 error ("fallthru edge crosses section boundary (bb %i)",
4f18499c 1788 e->src->index);
1789 err = 1;
1790 }
1791 }
fb20d6fa 1792
7f42fe24 1793 if ((e->flags & ~(EDGE_DFS_BACK
1794 | EDGE_CAN_FALLTHRU
1795 | EDGE_IRREDUCIBLE_LOOP
f50b5bd0 1796 | EDGE_LOOP_EXIT
1797 | EDGE_CROSSING)) == 0)
fb20d6fa 1798 n_branch++;
1799
1800 if (e->flags & EDGE_ABNORMAL_CALL)
1801 n_call++;
1802
1803 if (e->flags & EDGE_EH)
1804 n_eh++;
1805 else if (e->flags & EDGE_ABNORMAL)
1806 n_abnormal++;
b36d64df 1807 }
5cc577b6 1808
5496dbfc 1809 if (n_eh && GET_CODE (PATTERN (BB_END (bb))) != RESX
1810 && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
fb20d6fa 1811 {
0a81f5a0 1812 error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
fb20d6fa 1813 err = 1;
1814 }
1815 if (n_branch
6d7dc5b9 1816 && (!JUMP_P (BB_END (bb))
5496dbfc 1817 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1818 || any_condjump_p (BB_END (bb))))))
fb20d6fa 1819 {
0a81f5a0 1820 error ("too many outgoing branch edges from bb %i", bb->index);
fb20d6fa 1821 err = 1;
1822 }
5496dbfc 1823 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
fb20d6fa 1824 {
0a81f5a0 1825 error ("fallthru edge after unconditional jump %i", bb->index);
fb20d6fa 1826 err = 1;
1827 }
5496dbfc 1828 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
fb20d6fa 1829 {
0a81f5a0 1830 error ("wrong amount of branch edges after unconditional jump %i", bb->index);
fb20d6fa 1831 err = 1;
1832 }
5496dbfc 1833 if (n_branch != 1 && any_condjump_p (BB_END (bb))
8345ac1a 1834 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
fb20d6fa 1835 {
8345ac1a 1836 error ("wrong amount of branch edges after conditional jump %i",
1837 bb->index);
fb20d6fa 1838 err = 1;
1839 }
6d7dc5b9 1840 if (n_call && !CALL_P (BB_END (bb)))
fb20d6fa 1841 {
0a81f5a0 1842 error ("call edges for non-call insn in bb %i", bb->index);
fb20d6fa 1843 err = 1;
1844 }
1845 if (n_abnormal
6d7dc5b9 1846 && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
1847 && (!JUMP_P (BB_END (bb))
5496dbfc 1848 || any_condjump_p (BB_END (bb))
1849 || any_uncondjump_p (BB_END (bb))))
fb20d6fa 1850 {
0a81f5a0 1851 error ("abnormal edges for no purpose in bb %i", bb->index);
fb20d6fa 1852 err = 1;
1853 }
db34a109 1854
5496dbfc 1855 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
fdd27295 1856 /* We may have a barrier inside a basic block before dead code
f5310810 1857 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
1858 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
5cc577b6 1859 {
1860 debug_rtx (x);
1861 if (! BLOCK_FOR_INSN (x))
1862 error
1863 ("insn %d inside basic block %d but block_for_insn is NULL",
b3d6de89 1864 INSN_UID (x), bb->index);
5cc577b6 1865 else
1866 error
1867 ("insn %d inside basic block %d but block_for_insn is %i",
b3d6de89 1868 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
5cc577b6 1869
1870 err = 1;
1871 }
b36d64df 1872
1873 /* OK pointers are correct. Now check the header of basic
a0c938f0 1874 block. It ought to contain optional CODE_LABEL followed
b36d64df 1875 by NOTE_BASIC_BLOCK. */
5496dbfc 1876 x = BB_HEAD (bb);
6d7dc5b9 1877 if (LABEL_P (x))
b36d64df 1878 {
5496dbfc 1879 if (BB_END (bb) == x)
b36d64df 1880 {
1881 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
b3d6de89 1882 bb->index);
b36d64df 1883 err = 1;
1884 }
5cc577b6 1885
b36d64df 1886 x = NEXT_INSN (x);
1887 }
5cc577b6 1888
b36d64df 1889 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
1890 {
1891 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
b3d6de89 1892 bb->index);
b36d64df 1893 err = 1;
1894 }
1895
5496dbfc 1896 if (BB_END (bb) == x)
f3e2a363 1897 /* Do checks for empty blocks here. */
5cc577b6 1898 ;
b36d64df 1899 else
5cc577b6 1900 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
1901 {
1902 if (NOTE_INSN_BASIC_BLOCK_P (x))
1903 {
1904 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
b3d6de89 1905 INSN_UID (x), bb->index);
5cc577b6 1906 err = 1;
1907 }
1908
5496dbfc 1909 if (x == BB_END (bb))
5cc577b6 1910 break;
b36d64df 1911
aaf52ffb 1912 if (control_flow_insn_p (x))
5cc577b6 1913 {
b3d6de89 1914 error ("in basic block %d:", bb->index);
5cc577b6 1915 fatal_insn ("flow control insn inside a basic block", x);
1916 }
1917 }
b36d64df 1918 }
1919
028f8cc7 1920 /* Clean up. */
028f8cc7 1921 return err;
1922}
5cc577b6 1923
028f8cc7 1924/* Verify the CFG and RTL consistency common for both underlying RTL and
1925 cfglayout RTL.
5cc577b6 1926
028f8cc7 1927 Currently it does following checks:
1928 - all checks of rtl_verify_flow_info_1
fcde5c9e 1929 - test head/end pointers
028f8cc7 1930 - check that all insns are in the basic blocks
1931 (except the switch handling code, barriers and notes)
1932 - check that all returns are followed by barriers
1933 - check that all fallthru edge points to the adjacent blocks. */
fcde5c9e 1934
028f8cc7 1935static int
4c9e08a4 1936rtl_verify_flow_info (void)
028f8cc7 1937{
1938 basic_block bb;
1939 int err = rtl_verify_flow_info_1 ();
1940 rtx x;
fcde5c9e 1941 rtx last_head = get_last_insn ();
1942 basic_block *bb_info;
028f8cc7 1943 int num_bb_notes;
1944 const rtx rtx_first = get_insns ();
1945 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
fcde5c9e 1946 const int max_uid = get_max_uid ();
1947
1948 bb_info = XCNEWVEC (basic_block, max_uid);
b36d64df 1949
028f8cc7 1950 FOR_EACH_BB_REVERSE (bb)
1951 {
1952 edge e;
cd665a06 1953 edge_iterator ei;
fcde5c9e 1954 rtx head = BB_HEAD (bb);
1955 rtx end = BB_END (bb);
cd665a06 1956
fcde5c9e 1957 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
39257eb5 1958 {
1959 /* Verify the end of the basic block is in the INSN chain. */
1960 if (x == end)
1961 break;
1962
1963 /* And that the code outside of basic blocks has NULL bb field. */
1964 if (!BARRIER_P (x)
1965 && BLOCK_FOR_INSN (x) != NULL)
1966 {
1967 error ("insn %d outside of basic blocks has non-NULL bb field",
1968 INSN_UID (x));
1969 err = 1;
1970 }
1971 }
fcde5c9e 1972
1973 if (!x)
1974 {
1975 error ("end insn %d for block %d not found in the insn stream",
1976 INSN_UID (end), bb->index);
1977 err = 1;
1978 }
1979
1980 /* Work backwards from the end to the head of the basic block
1981 to verify the head is in the RTL chain. */
1982 for (; x != NULL_RTX; x = PREV_INSN (x))
d5043f32 1983 {
fcde5c9e 1984 /* While walking over the insn chain, verify insns appear
1985 in only one basic block. */
1986 if (bb_info[INSN_UID (x)] != NULL)
1987 {
1988 error ("insn %d is in multiple basic blocks (%d and %d)",
1989 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1990 err = 1;
1991 }
1992
1993 bb_info[INSN_UID (x)] = bb;
1994
1995 if (x == head)
1996 break;
1997 }
1998 if (!x)
1999 {
2000 error ("head insn %d for block %d not found in the insn stream",
2001 INSN_UID (head), bb->index);
d5043f32 2002 err = 1;
2003 }
2004
39257eb5 2005 last_head = PREV_INSN (x);
fcde5c9e 2006
cd665a06 2007 FOR_EACH_EDGE (e, ei, bb->succs)
028f8cc7 2008 if (e->flags & EDGE_FALLTHRU)
2009 break;
2010 if (!e)
2011 {
2012 rtx insn;
2013
2014 /* Ensure existence of barrier in BB with no fallthru edges. */
6d7dc5b9 2015 for (insn = BB_END (bb); !insn || !BARRIER_P (insn);
028f8cc7 2016 insn = NEXT_INSN (insn))
2017 if (!insn
ad4583d9 2018 || NOTE_INSN_BASIC_BLOCK_P (insn))
028f8cc7 2019 {
2020 error ("missing barrier after block %i", bb->index);
2021 err = 1;
2022 break;
2023 }
2024 }
2025 else if (e->src != ENTRY_BLOCK_PTR
2026 && e->dest != EXIT_BLOCK_PTR)
a0c938f0 2027 {
028f8cc7 2028 rtx insn;
2029
2030 if (e->src->next_bb != e->dest)
2031 {
2032 error
2033 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2034 e->src->index, e->dest->index);
2035 err = 1;
2036 }
2037 else
5496dbfc 2038 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
028f8cc7 2039 insn = NEXT_INSN (insn))
ac7d3688 2040 if (BARRIER_P (insn) || INSN_P (insn))
028f8cc7 2041 {
2042 error ("verify_flow_info: Incorrect fallthru %i->%i",
2043 e->src->index, e->dest->index);
2044 fatal_insn ("wrong insn in the fallthru edge", insn);
2045 err = 1;
2046 }
a0c938f0 2047 }
028f8cc7 2048 }
b36d64df 2049
39257eb5 2050 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2051 {
2052 /* Check that the code before the first basic block has NULL
2053 bb field. */
2054 if (!BARRIER_P (x)
2055 && BLOCK_FOR_INSN (x) != NULL)
2056 {
2057 error ("insn %d outside of basic blocks has non-NULL bb field",
2058 INSN_UID (x));
2059 err = 1;
2060 }
2061 }
fcde5c9e 2062 free (bb_info);
2063
b36d64df 2064 num_bb_notes = 0;
4c26117a 2065 last_bb_seen = ENTRY_BLOCK_PTR;
2066
5cc577b6 2067 for (x = rtx_first; x; x = NEXT_INSN (x))
b36d64df 2068 {
2069 if (NOTE_INSN_BASIC_BLOCK_P (x))
2070 {
3c0a32c9 2071 bb = NOTE_BASIC_BLOCK (x);
5cc577b6 2072
b36d64df 2073 num_bb_notes++;
4c26117a 2074 if (bb != last_bb_seen->next_bb)
028f8cc7 2075 internal_error ("basic blocks not laid down consecutively");
b36d64df 2076
028f8cc7 2077 curr_bb = last_bb_seen = bb;
b36d64df 2078 }
2079
028f8cc7 2080 if (!curr_bb)
b36d64df 2081 {
2082 switch (GET_CODE (x))
2083 {
2084 case BARRIER:
2085 case NOTE:
2086 break;
2087
2088 case CODE_LABEL:
af6ed417 2089 /* An addr_vec is placed outside any basic block. */
b36d64df 2090 if (NEXT_INSN (x)
6d7dc5b9 2091 && JUMP_P (NEXT_INSN (x))
b36d64df 2092 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2093 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5cc577b6 2094 x = NEXT_INSN (x);
b36d64df 2095
2096 /* But in any case, non-deletable labels can appear anywhere. */
2097 break;
2098
2099 default:
cb8bacb6 2100 fatal_insn ("insn outside basic block", x);
b36d64df 2101 }
2102 }
2103
345f9e8c 2104 if (JUMP_P (x)
b36d64df 2105 && returnjump_p (x) && ! condjump_p (x)
8fcc0f14 2106 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
cb8bacb6 2107 fatal_insn ("return not followed by barrier", x);
5496dbfc 2108 if (curr_bb && x == BB_END (curr_bb))
028f8cc7 2109 curr_bb = NULL;
b36d64df 2110 }
2111
4d2e5d52 2112 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
b36d64df 2113 internal_error
b3d6de89 2114 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2115 num_bb_notes, n_basic_blocks);
b36d64df 2116
028f8cc7 2117 return err;
b36d64df 2118}
2119\f
4a82352a 2120/* Assume that the preceding pass has possibly eliminated jump instructions
b36d64df 2121 or converted the unconditional jumps. Eliminate the edges from CFG.
2122 Return true if any edges are eliminated. */
2123
2124bool
4c9e08a4 2125purge_dead_edges (basic_block bb)
b36d64df 2126{
cd665a06 2127 edge e;
5496dbfc 2128 rtx insn = BB_END (bb), note;
b36d64df 2129 bool purged = false;
cd665a06 2130 bool found;
2131 edge_iterator ei;
b36d64df 2132
11808932 2133 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
6d7dc5b9 2134 if (NONJUMP_INSN_P (insn)
11808932 2135 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2136 {
2137 rtx eqnote;
2138
2139 if (! may_trap_p (PATTERN (insn))
2140 || ((eqnote = find_reg_equal_equiv_note (insn))
2141 && ! may_trap_p (XEXP (eqnote, 0))))
2142 remove_note (insn, note);
2143 }
2144
f178f44b 2145 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
cd665a06 2146 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
f178f44b 2147 {
04166d08 2148 /* There are three types of edges we need to handle correctly here: EH
2149 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2150 latter can appear when nonlocal gotos are used. */
2151 if (e->flags & EDGE_EH)
f178f44b 2152 {
04166d08 2153 if (can_throw_internal (BB_END (bb))
2154 /* If this is a call edge, verify that this is a call insn. */
2155 && (! (e->flags & EDGE_ABNORMAL_CALL)
2156 || CALL_P (BB_END (bb))))
cd665a06 2157 {
2158 ei_next (&ei);
2159 continue;
2160 }
f178f44b 2161 }
04166d08 2162 else if (e->flags & EDGE_ABNORMAL_CALL)
f178f44b 2163 {
04166d08 2164 if (CALL_P (BB_END (bb))
2165 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2166 || INTVAL (XEXP (note, 0)) >= 0))
cd665a06 2167 {
2168 ei_next (&ei);
2169 continue;
2170 }
f178f44b 2171 }
2172 else
cd665a06 2173 {
2174 ei_next (&ei);
2175 continue;
2176 }
f178f44b 2177
2178 remove_edge (e);
3072d30e 2179 df_set_bb_dirty (bb);
f178f44b 2180 purged = true;
2181 }
5cc577b6 2182
6d7dc5b9 2183 if (JUMP_P (insn))
b36d64df 2184 {
2185 rtx note;
2186 edge b,f;
cd665a06 2187 edge_iterator ei;
5cc577b6 2188
b36d64df 2189 /* We do care only about conditional jumps and simplejumps. */
2190 if (!any_condjump_p (insn)
2191 && !returnjump_p (insn)
2192 && !simplejump_p (insn))
461eeb14 2193 return purged;
5cc577b6 2194
a2e42321 2195 /* Branch probability/prediction notes are defined only for
2196 condjumps. We've possibly turned condjump into simplejump. */
2197 if (simplejump_p (insn))
2198 {
2199 note = find_reg_note (insn, REG_BR_PROB, NULL);
2200 if (note)
2201 remove_note (insn, note);
2202 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2203 remove_note (insn, note);
2204 }
2205
cd665a06 2206 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
b36d64df 2207 {
4270b4b9 2208 /* Avoid abnormal flags to leak from computed jumps turned
2209 into simplejumps. */
db34a109 2210
a0fb7382 2211 e->flags &= ~EDGE_ABNORMAL;
4270b4b9 2212
14abf923 2213 /* See if this edge is one we should keep. */
2214 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2215 /* A conditional jump can fall through into the next
2216 block, so we should keep the edge. */
cd665a06 2217 {
2218 ei_next (&ei);
2219 continue;
2220 }
5cc577b6 2221 else if (e->dest != EXIT_BLOCK_PTR
5496dbfc 2222 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
14abf923 2223 /* If the destination block is the target of the jump,
2224 keep the edge. */
cd665a06 2225 {
2226 ei_next (&ei);
2227 continue;
2228 }
14abf923 2229 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2230 /* If the destination block is the exit block, and this
2231 instruction is a return, then keep the edge. */
cd665a06 2232 {
2233 ei_next (&ei);
2234 continue;
2235 }
14abf923 2236 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2237 /* Keep the edges that correspond to exceptions thrown by
1b64239d 2238 this instruction and rematerialize the EDGE_ABNORMAL
2239 flag we just cleared above. */
2240 {
2241 e->flags |= EDGE_ABNORMAL;
cd665a06 2242 ei_next (&ei);
1b64239d 2243 continue;
2244 }
5cc577b6 2245
14abf923 2246 /* We do not need this edge. */
3072d30e 2247 df_set_bb_dirty (bb);
b36d64df 2248 purged = true;
2249 remove_edge (e);
2250 }
5cc577b6 2251
cd665a06 2252 if (EDGE_COUNT (bb->succs) == 0 || !purged)
461eeb14 2253 return purged;
5cc577b6 2254
450d042a 2255 if (dump_file)
2256 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
5cc577b6 2257
b36d64df 2258 if (!optimize)
2259 return purged;
2260
2261 /* Redistribute probabilities. */
ea091dfd 2262 if (single_succ_p (bb))
b36d64df 2263 {
ea091dfd 2264 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2265 single_succ_edge (bb)->count = bb->count;
db34a109 2266 }
b36d64df 2267 else
2268 {
2269 note = find_reg_note (insn, REG_BR_PROB, NULL);
2270 if (!note)
2271 return purged;
5cc577b6 2272
b36d64df 2273 b = BRANCH_EDGE (bb);
2274 f = FALLTHRU_EDGE (bb);
2275 b->probability = INTVAL (XEXP (note, 0));
2276 f->probability = REG_BR_PROB_BASE - b->probability;
2277 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2278 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2279 }
5cc577b6 2280
b36d64df 2281 return purged;
2282 }
6d7dc5b9 2283 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
bf4311e9 2284 {
2285 /* First, there should not be any EH or ABCALL edges resulting
2286 from non-local gotos and the like. If there were, we shouldn't
2287 have created the sibcall in the first place. Second, there
2288 should of course never have been a fallthru edge. */
ea091dfd 2289 gcc_assert (single_succ_p (bb));
2290 gcc_assert (single_succ_edge (bb)->flags
2291 == (EDGE_SIBCALL | EDGE_ABNORMAL));
bf4311e9 2292
2293 return 0;
2294 }
b36d64df 2295
b36d64df 2296 /* If we don't see a jump insn, we don't know exactly why the block would
2297 have been broken at this point. Look for a simple, non-fallthru edge,
2298 as these are only created by conditional branches. If we find such an
2299 edge we know that there used to be a jump here and can then safely
2300 remove all non-fallthru edges. */
cd665a06 2301 found = false;
2302 FOR_EACH_EDGE (e, ei, bb->succs)
2303 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2304 {
2305 found = true;
2306 break;
2307 }
5cc577b6 2308
cd665a06 2309 if (!found)
b36d64df 2310 return purged;
5cc577b6 2311
6e81a369 2312 /* Remove all but the fake and fallthru edges. The fake edge may be
2313 the only successor for this block in the case of noreturn
2314 calls. */
cd665a06 2315 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
b36d64df 2316 {
6e81a369 2317 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
461eeb14 2318 {
3072d30e 2319 df_set_bb_dirty (bb);
461eeb14 2320 remove_edge (e);
2321 purged = true;
2322 }
cd665a06 2323 else
2324 ei_next (&ei);
b36d64df 2325 }
5cc577b6 2326
ea091dfd 2327 gcc_assert (single_succ_p (bb));
5cc577b6 2328
ea091dfd 2329 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2330 single_succ_edge (bb)->count = bb->count;
b36d64df 2331
450d042a 2332 if (dump_file)
2333 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
b3d6de89 2334 bb->index);
b36d64df 2335 return purged;
2336}
2337
5cc577b6 2338/* Search all basic blocks for potentially dead edges and purge them. Return
2339 true if some edge has been eliminated. */
b36d64df 2340
2341bool
94ee50e8 2342purge_all_dead_edges (void)
b36d64df 2343{
4c26117a 2344 int purged = false;
4c26117a 2345 basic_block bb;
0922c912 2346
4c26117a 2347 FOR_EACH_BB (bb)
0922c912 2348 {
4c26117a 2349 bool purged_here = purge_dead_edges (bb);
5cc577b6 2350
0922c912 2351 purged |= purged_here;
0922c912 2352 }
5cc577b6 2353
b36d64df 2354 return purged;
2355}
1026363d 2356
2357/* Same as split_block but update cfg_layout structures. */
5f5d4cd1 2358
2359static basic_block
4c9e08a4 2360cfg_layout_split_block (basic_block bb, void *insnp)
1026363d 2361{
f780cc25 2362 rtx insn = (rtx) insnp;
5f5d4cd1 2363 basic_block new_bb = rtl_split_block (bb, insn);
1026363d 2364
bc5f266a 2365 new_bb->il.rtl->footer = bb->il.rtl->footer;
2366 bb->il.rtl->footer = NULL;
1026363d 2367
5f5d4cd1 2368 return new_bb;
1026363d 2369}
2370
1026363d 2371/* Redirect Edge to DEST. */
4ee9c684 2372static edge
4c9e08a4 2373cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
1026363d 2374{
2375 basic_block src = e->src;
4ee9c684 2376 edge ret;
1026363d 2377
c60fa3a7 2378 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4ee9c684 2379 return NULL;
c60fa3a7 2380
e5562ab8 2381 if (e->dest == dest)
4ee9c684 2382 return e;
c60fa3a7 2383
e5562ab8 2384 if (e->src != ENTRY_BLOCK_PTR
4ee9c684 2385 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
adee86a0 2386 {
3072d30e 2387 df_set_bb_dirty (src);
4ee9c684 2388 return ret;
adee86a0 2389 }
c60fa3a7 2390
2391 if (e->src == ENTRY_BLOCK_PTR
2392 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2393 {
450d042a 2394 if (dump_file)
2395 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
c60fa3a7 2396 e->src->index, dest->index);
2397
3072d30e 2398 df_set_bb_dirty (e->src);
c60fa3a7 2399 redirect_edge_succ (e, dest);
4ee9c684 2400 return e;
c60fa3a7 2401 }
2402
1026363d 2403 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2404 in the case the basic block appears to be in sequence. Avoid this
2405 transformation. */
2406
1026363d 2407 if (e->flags & EDGE_FALLTHRU)
2408 {
2409 /* Redirect any branch edges unified with the fallthru one. */
6d7dc5b9 2410 if (JUMP_P (BB_END (src))
b9de5542 2411 && label_is_jump_target_p (BB_HEAD (e->dest),
2412 BB_END (src)))
1026363d 2413 {
cc636d56 2414 edge redirected;
a0c938f0 2415
450d042a 2416 if (dump_file)
2417 fprintf (dump_file, "Fallthru edge unified with branch "
b9de5542 2418 "%i->%i redirected to %i\n",
2419 e->src->index, e->dest->index, dest->index);
2420 e->flags &= ~EDGE_FALLTHRU;
cc636d56 2421 redirected = redirect_branch_edge (e, dest);
2422 gcc_assert (redirected);
b9de5542 2423 e->flags |= EDGE_FALLTHRU;
3072d30e 2424 df_set_bb_dirty (e->src);
4ee9c684 2425 return e;
1026363d 2426 }
2427 /* In case we are redirecting fallthru edge to the branch edge
a0c938f0 2428 of conditional jump, remove it. */
cd665a06 2429 if (EDGE_COUNT (src->succs) == 2)
1026363d 2430 {
9ea57afb 2431 /* Find the edge that is different from E. */
2432 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
cd665a06 2433
1026363d 2434 if (s->dest == dest
5496dbfc 2435 && any_condjump_p (BB_END (src))
2436 && onlyjump_p (BB_END (src)))
2437 delete_insn (BB_END (src));
1026363d 2438 }
4ee9c684 2439 ret = redirect_edge_succ_nodup (e, dest);
450d042a 2440 if (dump_file)
2441 fprintf (dump_file, "Fallthru edge %i->%i redirected to %i\n",
c60fa3a7 2442 e->src->index, e->dest->index, dest->index);
1026363d 2443 }
2444 else
c60fa3a7 2445 ret = redirect_branch_edge (e, dest);
1026363d 2446
2447 /* We don't want simplejumps in the insn stream during cfglayout. */
cc636d56 2448 gcc_assert (!simplejump_p (BB_END (src)));
1026363d 2449
3072d30e 2450 df_set_bb_dirty (src);
1026363d 2451 return ret;
2452}
2453
2454/* Simple wrapper as we always can redirect fallthru edges. */
2455static basic_block
4c9e08a4 2456cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
1026363d 2457{
cc636d56 2458 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
2459
2460 gcc_assert (redirected);
1026363d 2461 return NULL;
2462}
2463
5f5d4cd1 2464/* Same as delete_basic_block but update cfg_layout structures. */
2465
1026363d 2466static void
4c9e08a4 2467cfg_layout_delete_block (basic_block bb)
1026363d 2468{
5496dbfc 2469 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
1026363d 2470
bc5f266a 2471 if (bb->il.rtl->header)
1026363d 2472 {
5496dbfc 2473 next = BB_HEAD (bb);
1026363d 2474 if (prev)
bc5f266a 2475 NEXT_INSN (prev) = bb->il.rtl->header;
1026363d 2476 else
bc5f266a 2477 set_first_insn (bb->il.rtl->header);
2478 PREV_INSN (bb->il.rtl->header) = prev;
2479 insn = bb->il.rtl->header;
1026363d 2480 while (NEXT_INSN (insn))
2481 insn = NEXT_INSN (insn);
2482 NEXT_INSN (insn) = next;
2483 PREV_INSN (next) = insn;
2484 }
5496dbfc 2485 next = NEXT_INSN (BB_END (bb));
bc5f266a 2486 if (bb->il.rtl->footer)
1026363d 2487 {
bc5f266a 2488 insn = bb->il.rtl->footer;
c60fa3a7 2489 while (insn)
2490 {
6d7dc5b9 2491 if (BARRIER_P (insn))
c60fa3a7 2492 {
2493 if (PREV_INSN (insn))
2494 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2495 else
bc5f266a 2496 bb->il.rtl->footer = NEXT_INSN (insn);
c60fa3a7 2497 if (NEXT_INSN (insn))
2498 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2499 }
6d7dc5b9 2500 if (LABEL_P (insn))
c60fa3a7 2501 break;
2502 insn = NEXT_INSN (insn);
2503 }
bc5f266a 2504 if (bb->il.rtl->footer)
c60fa3a7 2505 {
5496dbfc 2506 insn = BB_END (bb);
bc5f266a 2507 NEXT_INSN (insn) = bb->il.rtl->footer;
2508 PREV_INSN (bb->il.rtl->footer) = insn;
c60fa3a7 2509 while (NEXT_INSN (insn))
2510 insn = NEXT_INSN (insn);
2511 NEXT_INSN (insn) = next;
2512 if (next)
2513 PREV_INSN (next) = insn;
2514 else
2515 set_last_insn (insn);
2516 }
1026363d 2517 }
2518 if (bb->next_bb != EXIT_BLOCK_PTR)
bc5f266a 2519 to = &bb->next_bb->il.rtl->header;
1026363d 2520 else
2521 to = &cfg_layout_function_footer;
7a22afab 2522
1026363d 2523 rtl_delete_block (bb);
2524
2525 if (prev)
2526 prev = NEXT_INSN (prev);
4c9e08a4 2527 else
1026363d 2528 prev = get_insns ();
2529 if (next)
2530 next = PREV_INSN (next);
4c9e08a4 2531 else
1026363d 2532 next = get_last_insn ();
2533
2534 if (next && NEXT_INSN (next) != prev)
2535 {
2536 remaints = unlink_insn_chain (prev, next);
2537 insn = remaints;
2538 while (NEXT_INSN (insn))
2539 insn = NEXT_INSN (insn);
2540 NEXT_INSN (insn) = *to;
2541 if (*to)
2542 PREV_INSN (*to) = insn;
2543 *to = remaints;
2544 }
2545}
2546
139c3f48 2547/* Return true when blocks A and B can be safely merged. */
c60fa3a7 2548static bool
2549cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2550{
4f18499c 2551 /* If we are partitioning hot/cold basic blocks, we don't want to
2552 mess up unconditional or indirect jumps that cross between hot
7562ed74 2553 and cold sections.
2554
1118aef7 2555 Basic block partitioning may result in some jumps that appear to
a0c938f0 2556 be optimizable (or blocks that appear to be mergeable), but which really
2557 must be left untouched (they are required to make it safely across
2558 partition boundaries). See the comments at the top of
1118aef7 2559 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
2560
1897b881 2561 if (BB_PARTITION (a) != BB_PARTITION (b))
7562ed74 2562 return false;
4f18499c 2563
c60fa3a7 2564 /* There must be exactly one edge in between the blocks. */
ea091dfd 2565 return (single_succ_p (a)
2566 && single_succ (a) == b
2567 && single_pred_p (b) == 1
cd665a06 2568 && a != b
c60fa3a7 2569 /* Must be simple edge. */
ea091dfd 2570 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
c60fa3a7 2571 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
4aaec180 2572 /* If the jump insn has side effects, we can't kill the edge.
2573 When not optimizing, try_redirect_by_replacing_jump will
2574 not allow us to redirect an edge by replacing a table jump. */
6d7dc5b9 2575 && (!JUMP_P (BB_END (a))
4aaec180 2576 || ((!optimize || reload_completed)
5496dbfc 2577 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
c60fa3a7 2578}
2579
a53ff4c1 2580/* Merge block A and B. The blocks must be mergeable. */
2581
c60fa3a7 2582static void
2583cfg_layout_merge_blocks (basic_block a, basic_block b)
2584{
2585#ifdef ENABLE_CHECKING
cc636d56 2586 gcc_assert (cfg_layout_can_merge_blocks_p (a, b));
c60fa3a7 2587#endif
2588
3072d30e 2589 if (dump_file)
2590 fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);
2591
c60fa3a7 2592 /* If there was a CODE_LABEL beginning B, delete it. */
6d7dc5b9 2593 if (LABEL_P (BB_HEAD (b)))
7b07eae7 2594 {
2595 /* This might have been an EH label that no longer has incoming
2596 EH edges. Update data structures to match. */
2597 maybe_remove_eh_handler (BB_HEAD (b));
a0c938f0 2598
7b07eae7 2599 delete_insn (BB_HEAD (b));
2600 }
c60fa3a7 2601
2602 /* We should have fallthru edge in a, or we can do dummy redirection to get
2603 it cleaned up. */
6d7dc5b9 2604 if (JUMP_P (BB_END (a)))
cd665a06 2605 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
cc636d56 2606 gcc_assert (!JUMP_P (BB_END (a)));
c60fa3a7 2607
2608 /* Possible line number notes should appear in between. */
bc5f266a 2609 if (b->il.rtl->header)
c60fa3a7 2610 {
5496dbfc 2611 rtx first = BB_END (a), last;
c60fa3a7 2612
3072d30e 2613 last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a), a);
39257eb5 2614 delete_insn_chain (NEXT_INSN (first), last, false);
bc5f266a 2615 b->il.rtl->header = NULL;
c60fa3a7 2616 }
2617
2618 /* In the case basic blocks are not adjacent, move them around. */
5496dbfc 2619 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
c60fa3a7 2620 {
5496dbfc 2621 rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
c60fa3a7 2622
3072d30e 2623 emit_insn_after_noloc (first, BB_END (a), a);
c60fa3a7 2624 /* Skip possible DELETED_LABEL insn. */
2625 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2626 first = NEXT_INSN (first);
cc636d56 2627 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (first));
5496dbfc 2628 BB_HEAD (b) = NULL;
c60fa3a7 2629 delete_insn (first);
2630 }
2631 /* Otherwise just re-associate the instructions. */
2632 else
2633 {
2634 rtx insn;
2635
5496dbfc 2636 for (insn = BB_HEAD (b);
2637 insn != NEXT_INSN (BB_END (b));
2638 insn = NEXT_INSN (insn))
3072d30e 2639 {
2640 set_block_for_insn (insn, a);
2641 df_insn_change_bb (insn);
2642 }
2643
5496dbfc 2644 insn = BB_HEAD (b);
c60fa3a7 2645 /* Skip possible DELETED_LABEL insn. */
2646 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2647 insn = NEXT_INSN (insn);
cc636d56 2648 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
5496dbfc 2649 BB_HEAD (b) = NULL;
2650 BB_END (a) = BB_END (b);
c60fa3a7 2651 delete_insn (insn);
2652 }
2653
3072d30e 2654 df_bb_delete (b->index);
2655
c60fa3a7 2656 /* Possible tablejumps and barriers should appear after the block. */
bc5f266a 2657 if (b->il.rtl->footer)
c60fa3a7 2658 {
bc5f266a 2659 if (!a->il.rtl->footer)
2660 a->il.rtl->footer = b->il.rtl->footer;
c60fa3a7 2661 else
2662 {
bc5f266a 2663 rtx last = a->il.rtl->footer;
c60fa3a7 2664
2665 while (NEXT_INSN (last))
2666 last = NEXT_INSN (last);
bc5f266a 2667 NEXT_INSN (last) = b->il.rtl->footer;
2668 PREV_INSN (b->il.rtl->footer) = last;
c60fa3a7 2669 }
bc5f266a 2670 b->il.rtl->footer = NULL;
c60fa3a7 2671 }
2672
450d042a 2673 if (dump_file)
2674 fprintf (dump_file, "Merged blocks %d and %d.\n",
c60fa3a7 2675 a->index, b->index);
c60fa3a7 2676}
2677
2678/* Split edge E. */
5f5d4cd1 2679
c60fa3a7 2680static basic_block
2681cfg_layout_split_edge (edge e)
2682{
c60fa3a7 2683 basic_block new_bb =
2684 create_basic_block (e->src != ENTRY_BLOCK_PTR
5496dbfc 2685 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
c60fa3a7 2686 NULL_RTX, e->src);
2687
3bdc4312 2688 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
c60fa3a7 2689 redirect_edge_and_branch_force (e, new_bb);
2690
2691 return new_bb;
2692}
2693
5f5d4cd1 2694/* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
2695
2696static void
2697rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
2698{
2699}
2700
4ee9c684 2701/* Return 1 if BB ends with a call, possibly followed by some
2702 instructions that must stay with the call, 0 otherwise. */
2703
2704static bool
2705rtl_block_ends_with_call_p (basic_block bb)
2706{
2707 rtx insn = BB_END (bb);
2708
6d7dc5b9 2709 while (!CALL_P (insn)
4ee9c684 2710 && insn != BB_HEAD (bb)
2711 && keep_with_call_p (insn))
2712 insn = PREV_INSN (insn);
6d7dc5b9 2713 return (CALL_P (insn));
4ee9c684 2714}
2715
2716/* Return 1 if BB ends with a conditional branch, 0 otherwise. */
2717
2718static bool
2719rtl_block_ends_with_condjump_p (basic_block bb)
2720{
2721 return any_condjump_p (BB_END (bb));
2722}
2723
2724/* Return true if we need to add fake edge to exit.
2725 Helper function for rtl_flow_call_edges_add. */
2726
2727static bool
2728need_fake_edge_p (rtx insn)
2729{
2730 if (!INSN_P (insn))
2731 return false;
2732
6d7dc5b9 2733 if ((CALL_P (insn)
4ee9c684 2734 && !SIBLING_CALL_P (insn)
2735 && !find_reg_note (insn, REG_NORETURN, NULL)
4ee9c684 2736 && !CONST_OR_PURE_CALL_P (insn)))
2737 return true;
2738
2739 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2740 && MEM_VOLATILE_P (PATTERN (insn)))
2741 || (GET_CODE (PATTERN (insn)) == PARALLEL
2742 && asm_noperands (insn) != -1
2743 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2744 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2745}
2746
2747/* Add fake edges to the function exit for any non constant and non noreturn
2748 calls, volatile inline assembly in the bitmap of blocks specified by
2749 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
2750 that were split.
2751
2752 The goal is to expose cases in which entering a basic block does not imply
2753 that all subsequent instructions must be executed. */
2754
2755static int
2756rtl_flow_call_edges_add (sbitmap blocks)
2757{
2758 int i;
2759 int blocks_split = 0;
2760 int last_bb = last_basic_block;
2761 bool check_last_block = false;
2762
4d2e5d52 2763 if (n_basic_blocks == NUM_FIXED_BLOCKS)
4ee9c684 2764 return 0;
2765
2766 if (! blocks)
2767 check_last_block = true;
2768 else
2769 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
2770
2771 /* In the last basic block, before epilogue generation, there will be
2772 a fallthru edge to EXIT. Special care is required if the last insn
2773 of the last basic block is a call because make_edge folds duplicate
2774 edges, which would result in the fallthru edge also being marked
2775 fake, which would result in the fallthru edge being removed by
2776 remove_fake_edges, which would result in an invalid CFG.
2777
2778 Moreover, we can't elide the outgoing fake edge, since the block
2779 profiler needs to take this into account in order to solve the minimal
2780 spanning tree in the case that the call doesn't return.
2781
2782 Handle this by adding a dummy instruction in a new last basic block. */
2783 if (check_last_block)
2784 {
2785 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
2786 rtx insn = BB_END (bb);
2787
2788 /* Back up past insns that must be kept in the same block as a call. */
2789 while (insn != BB_HEAD (bb)
2790 && keep_with_call_p (insn))
2791 insn = PREV_INSN (insn);
2792
2793 if (need_fake_edge_p (insn))
2794 {
2795 edge e;
2796
c6356c17 2797 e = find_edge (bb, EXIT_BLOCK_PTR);
2798 if (e)
2799 {
2800 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
2801 commit_edge_insertions ();
2802 }
4ee9c684 2803 }
2804 }
2805
2806 /* Now add fake edges to the function exit for any non constant
2807 calls since there is no way that we can determine if they will
2808 return or not... */
2809
4d2e5d52 2810 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4ee9c684 2811 {
2812 basic_block bb = BASIC_BLOCK (i);
2813 rtx insn;
2814 rtx prev_insn;
2815
2816 if (!bb)
2817 continue;
2818
2819 if (blocks && !TEST_BIT (blocks, i))
2820 continue;
2821
2822 for (insn = BB_END (bb); ; insn = prev_insn)
2823 {
2824 prev_insn = PREV_INSN (insn);
2825 if (need_fake_edge_p (insn))
2826 {
2827 edge e;
2828 rtx split_at_insn = insn;
2829
2830 /* Don't split the block between a call and an insn that should
a0c938f0 2831 remain in the same block as the call. */
6d7dc5b9 2832 if (CALL_P (insn))
4ee9c684 2833 while (split_at_insn != BB_END (bb)
2834 && keep_with_call_p (NEXT_INSN (split_at_insn)))
2835 split_at_insn = NEXT_INSN (split_at_insn);
2836
2837 /* The handling above of the final block before the epilogue
a0c938f0 2838 should be enough to verify that there is no edge to the exit
4ee9c684 2839 block in CFG already. Calling make_edge in such case would
2840 cause us to mark that edge as fake and remove it later. */
2841
2842#ifdef ENABLE_CHECKING
2843 if (split_at_insn == BB_END (bb))
cd665a06 2844 {
c6356c17 2845 e = find_edge (bb, EXIT_BLOCK_PTR);
2846 gcc_assert (e == NULL);
cd665a06 2847 }
4ee9c684 2848#endif
2849
2850 /* Note that the following may create a new basic block
2851 and renumber the existing basic blocks. */
2852 if (split_at_insn != BB_END (bb))
2853 {
2854 e = split_block (bb, split_at_insn);
2855 if (e)
2856 blocks_split++;
2857 }
2858
2859 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2860 }
2861
2862 if (insn == BB_HEAD (bb))
2863 break;
2864 }
2865 }
2866
2867 if (blocks_split)
2868 verify_flow_info ();
2869
2870 return blocks_split;
2871}
2872
c50ae675 2873/* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
d90dbd3e 2874 the conditional branch target, SECOND_HEAD should be the fall-thru
c50ae675 2875 there is no need to handle this here the loop versioning code handles
2876 this. the reason for SECON_HEAD is that it is needed for condition
2877 in trees, and this should be of the same type since it is a hook. */
2878static void
2879rtl_lv_add_condition_to_bb (basic_block first_head ,
a0c938f0 2880 basic_block second_head ATTRIBUTE_UNUSED,
2881 basic_block cond_bb, void *comp_rtx)
c50ae675 2882{
2883 rtx label, seq, jump;
2884 rtx op0 = XEXP ((rtx)comp_rtx, 0);
2885 rtx op1 = XEXP ((rtx)comp_rtx, 1);
2886 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
2887 enum machine_mode mode;
2888
2889
2890 label = block_label (first_head);
2891 mode = GET_MODE (op0);
2892 if (mode == VOIDmode)
2893 mode = GET_MODE (op1);
2894
2895 start_sequence ();
2896 op0 = force_operand (op0, NULL_RTX);
2897 op1 = force_operand (op1, NULL_RTX);
2898 do_compare_rtx_and_jump (op0, op1, comp, 0,
2899 mode, NULL_RTX, NULL_RTX, label);
2900 jump = get_last_insn ();
2901 JUMP_LABEL (jump) = label;
2902 LABEL_NUSES (label)++;
2903 seq = get_insns ();
2904 end_sequence ();
2905
2906 /* Add the new cond , in the new head. */
2907 emit_insn_after(seq, BB_END(cond_bb));
2908}
2909
2910
2911/* Given a block B with unconditional branch at its end, get the
2912 store the return the branch edge and the fall-thru edge in
2913 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
2914static void
2915rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
2916 edge *fallthru_edge)
2917{
2918 edge e = EDGE_SUCC (b, 0);
2919
2920 if (e->flags & EDGE_FALLTHRU)
2921 {
2922 *fallthru_edge = e;
2923 *branch_edge = EDGE_SUCC (b, 1);
2924 }
2925 else
2926 {
2927 *branch_edge = e;
2928 *fallthru_edge = EDGE_SUCC (b, 1);
2929 }
2930}
2931
e0dde8f8 2932void
2933init_rtl_bb_info (basic_block bb)
2934{
2935 gcc_assert (!bb->il.rtl);
f780cc25 2936 bb->il.rtl = GGC_CNEW (struct rtl_bb_info);
e0dde8f8 2937}
2938
c50ae675 2939
2b74c150 2940/* Add EXPR to the end of basic block BB. */
2941
2942rtx
2943insert_insn_end_bb_new (rtx pat, basic_block bb)
2944{
2945 rtx insn = BB_END (bb);
2946 rtx new_insn;
2947 rtx pat_end = pat;
2948
2949 while (NEXT_INSN (pat_end) != NULL_RTX)
2950 pat_end = NEXT_INSN (pat_end);
2951
2952 /* If the last insn is a jump, insert EXPR in front [taking care to
2953 handle cc0, etc. properly]. Similarly we need to care trapping
2954 instructions in presence of non-call exceptions. */
2955
2956 if (JUMP_P (insn)
2957 || (NONJUMP_INSN_P (insn)
2958 && (!single_succ_p (bb)
2959 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
2960 {
2961#ifdef HAVE_cc0
2962 rtx note;
2963#endif
2964 /* If this is a jump table, then we can't insert stuff here. Since
2965 we know the previous real insn must be the tablejump, we insert
2966 the new instruction just before the tablejump. */
2967 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
2968 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
2969 insn = prev_real_insn (insn);
2970
2971#ifdef HAVE_cc0
2972 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
2973 if cc0 isn't set. */
2974 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2975 if (note)
2976 insn = XEXP (note, 0);
2977 else
2978 {
2979 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
2980 if (maybe_cc0_setter
2981 && INSN_P (maybe_cc0_setter)
2982 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
2983 insn = maybe_cc0_setter;
2984 }
2985#endif
2986 /* FIXME: What if something in cc0/jump uses value set in new
2987 insn? */
3072d30e 2988 new_insn = emit_insn_before_noloc (pat, insn, bb);
2b74c150 2989 }
2990
2991 /* Likewise if the last insn is a call, as will happen in the presence
2992 of exception handling. */
2993 else if (CALL_P (insn)
2994 && (!single_succ_p (bb)
2995 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
2996 {
2997 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
2998 we search backward and place the instructions before the first
2999 parameter is loaded. Do this for everyone for consistency and a
3000 presumption that we'll get better code elsewhere as well. */
3001
3002 /* Since different machines initialize their parameter registers
3003 in different orders, assume nothing. Collect the set of all
3004 parameter registers. */
3005 insn = find_first_parameter_load (insn, BB_HEAD (bb));
3006
3007 /* If we found all the parameter loads, then we want to insert
3008 before the first parameter load.
3009
3010 If we did not find all the parameter loads, then we might have
3011 stopped on the head of the block, which could be a CODE_LABEL.
3012 If we inserted before the CODE_LABEL, then we would be putting
3013 the insn in the wrong basic block. In that case, put the insn
3014 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
3015 while (LABEL_P (insn)
3016 || NOTE_INSN_BASIC_BLOCK_P (insn))
3017 insn = NEXT_INSN (insn);
3018
3072d30e 3019 new_insn = emit_insn_before_noloc (pat, insn, bb);
2b74c150 3020 }
3021 else
3072d30e 3022 new_insn = emit_insn_after_noloc (pat, insn, bb);
2b74c150 3023
3024 return new_insn;
3025}
3026
611d2ac1 3027/* Returns true if it is possible to remove edge E by redirecting
3028 it to the destination of the other edge from E->src. */
3029
3030static bool
3031rtl_can_remove_branch_p (edge e)
3032{
3033 basic_block src = e->src;
3034 basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
3035 rtx insn = BB_END (src), set;
3036
3037 /* The conditions are taken from try_redirect_by_replacing_jump. */
3038 if (target == EXIT_BLOCK_PTR)
3039 return false;
3040
3041 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3042 return false;
3043
3044 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
3045 || BB_PARTITION (src) != BB_PARTITION (target))
3046 return false;
3047
3048 if (!onlyjump_p (insn)
3049 || tablejump_p (insn, NULL, NULL))
3050 return false;
3051
3052 set = single_set (insn);
3053 if (!set || side_effects_p (set))
3054 return false;
3055
3056 return true;
3057}
3058
1026363d 3059/* Implementation of CFG manipulation for linearized RTL. */
3060struct cfg_hooks rtl_cfg_hooks = {
5f5d4cd1 3061 "rtl",
1026363d 3062 rtl_verify_flow_info,
028f8cc7 3063 rtl_dump_bb,
c60fa3a7 3064 rtl_create_basic_block,
1026363d 3065 rtl_redirect_edge_and_branch,
3066 rtl_redirect_edge_and_branch_force,
611d2ac1 3067 rtl_can_remove_branch_p,
1026363d 3068 rtl_delete_block,
3069 rtl_split_block,
5f5d4cd1 3070 rtl_move_block_after,
c60fa3a7 3071 rtl_can_merge_blocks, /* can_merge_blocks_p */
3072 rtl_merge_blocks,
4ee9c684 3073 rtl_predict_edge,
3074 rtl_predicted_by_p,
3075 NULL, /* can_duplicate_block_p */
3076 NULL, /* duplicate_block */
5f5d4cd1 3077 rtl_split_edge,
3078 rtl_make_forwarder_block,
4ee9c684 3079 rtl_tidy_fallthru_edge,
3080 rtl_block_ends_with_call_p,
3081 rtl_block_ends_with_condjump_p,
26b12c91 3082 rtl_flow_call_edges_add,
3083 NULL, /* execute_on_growing_pred */
c50ae675 3084 NULL, /* execute_on_shrinking_pred */
3085 NULL, /* duplicate loop for trees */
3086 NULL, /* lv_add_condition_to_bb */
3087 NULL, /* lv_adjust_loop_header_phi*/
3088 NULL, /* extract_cond_bb_edges */
a0c938f0 3089 NULL /* flush_pending_stmts */
1026363d 3090};
3091
3092/* Implementation of CFG manipulation for cfg layout RTL, where
3093 basic block connected via fallthru edges does not have to be adjacent.
3094 This representation will hopefully become the default one in future
3095 version of the compiler. */
4ee9c684 3096
3097/* We do not want to declare these functions in a header file, since they
3098 should only be used through the cfghooks interface, and we do not want to
3099 move them here since it would require also moving quite a lot of related
3072d30e 3100 code. They are in cfglayout.c. */
4ee9c684 3101extern bool cfg_layout_can_duplicate_bb_p (basic_block);
3102extern basic_block cfg_layout_duplicate_bb (basic_block);
3103
1026363d 3104struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5f5d4cd1 3105 "cfglayout mode",
c60fa3a7 3106 rtl_verify_flow_info_1,
028f8cc7 3107 rtl_dump_bb,
c60fa3a7 3108 cfg_layout_create_basic_block,
1026363d 3109 cfg_layout_redirect_edge_and_branch,
3110 cfg_layout_redirect_edge_and_branch_force,
611d2ac1 3111 rtl_can_remove_branch_p,
1026363d 3112 cfg_layout_delete_block,
3113 cfg_layout_split_block,
5f5d4cd1 3114 rtl_move_block_after,
c60fa3a7 3115 cfg_layout_can_merge_blocks_p,
3116 cfg_layout_merge_blocks,
4ee9c684 3117 rtl_predict_edge,
3118 rtl_predicted_by_p,
3119 cfg_layout_can_duplicate_bb_p,
3120 cfg_layout_duplicate_bb,
5f5d4cd1 3121 cfg_layout_split_edge,
3122 rtl_make_forwarder_block,
4ee9c684 3123 NULL,
3124 rtl_block_ends_with_call_p,
3125 rtl_block_ends_with_condjump_p,
26b12c91 3126 rtl_flow_call_edges_add,
3127 NULL, /* execute_on_growing_pred */
c50ae675 3128 NULL, /* execute_on_shrinking_pred */
3129 duplicate_loop_to_header_edge, /* duplicate loop for trees */
3130 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
3131 NULL, /* lv_adjust_loop_header_phi*/
3132 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
a0c938f0 3133 NULL /* flush_pending_stmts */
1026363d 3134};