]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgrtl.c
2012-11-01 Sharad Singhai <singhai@google.com>
[thirdparty/gcc.git] / gcc / cfgrtl.c
CommitLineData
b36d64df 1/* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
d7ff3ab6 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 2011, 2012 Free Software Foundation, Inc.
b36d64df 5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
b36d64df 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
b36d64df 21
5cc577b6 22/* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
b36d64df 24
25 Available functionality:
c60fa3a7 26 - Basic CFG/RTL manipulation API documented in cfghooks.h
5cc577b6 27 - CFG-aware instruction chain manipulation
b36d64df 28 delete_insn, delete_insn_chain
c60fa3a7 29 - Edge splitting and committing to edges
30 insert_insn_on_edge, commit_edge_insertions
31 - CFG updating after insn simplification
32 purge_dead_edges, purge_all_dead_edges
54f21e20 33 - CFG fixing after coarse manipulation
34 fixup_abnormal_edges
c60fa3a7 35
36 Functions not supposed for generic use:
5cc577b6 37 - Infrastructure to determine quickly basic block for insn
b36d64df 38 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
5cc577b6 39 - Edge redirection with updating and optimizing of insn chain
c60fa3a7 40 block_label, tidy_fallthru_edge, force_nonfallthru */
b36d64df 41\f
42#include "config.h"
43#include "system.h"
805e22b2 44#include "coretypes.h"
45#include "tm.h"
b36d64df 46#include "tree.h"
b36d64df 47#include "hard-reg-set.h"
48#include "basic-block.h"
49#include "regs.h"
50#include "flags.h"
b36d64df 51#include "function.h"
52#include "except.h"
d7091a76 53#include "rtl-error.h"
b36d64df 54#include "tm_p.h"
55#include "obstack.h"
225e5f6f 56#include "insn-attr.h"
26fb1781 57#include "insn-config.h"
804e497b 58#include "expr.h"
065efcb1 59#include "target.h"
218e3e4e 60#include "common/common-target.h"
c50ae675 61#include "cfgloop.h"
e0dde8f8 62#include "ggc.h"
77fce4cd 63#include "tree-pass.h"
3072d30e 64#include "df.h"
b36d64df 65
23a070f3 66/* Holds the interesting leading and trailing notes for the function.
67 Only applicable if the CFG is in cfglayout mode. */
68static GTY(()) rtx cfg_layout_function_footer;
69static GTY(()) rtx cfg_layout_function_header;
70
71static rtx skip_insns_after_block (basic_block);
72static void record_effective_endpoints (void);
73static rtx label_for_bb (basic_block);
74static void fixup_reorder_chain (void);
75
76void verify_insn_chain (void);
77static void fixup_fallthru_exit_predecessor (void);
5493cb9a 78static int can_delete_note_p (const_rtx);
79static int can_delete_label_p (const_rtx);
4c9e08a4 80static basic_block rtl_split_edge (edge);
5f5d4cd1 81static bool rtl_move_block_after (basic_block, basic_block);
4c9e08a4 82static int rtl_verify_flow_info (void);
5f5d4cd1 83static basic_block cfg_layout_split_block (basic_block, void *);
4ee9c684 84static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
4c9e08a4 85static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
86static void cfg_layout_delete_block (basic_block);
87static void rtl_delete_block (basic_block);
88static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
4ee9c684 89static edge rtl_redirect_edge_and_branch (edge, basic_block);
5f5d4cd1 90static basic_block rtl_split_block (basic_block, void *);
5147ec07 91static void rtl_dump_bb (FILE *, basic_block, int, int);
4c9e08a4 92static int rtl_verify_flow_info_1 (void);
5f5d4cd1 93static void rtl_make_forwarder_block (edge);
b36d64df 94\f
95/* Return true if NOTE is not one of the ones that must be kept paired,
5cc577b6 96 so that we may simply delete it. */
b36d64df 97
98static int
5493cb9a 99can_delete_note_p (const_rtx note)
b36d64df 100{
25e880b1 101 switch (NOTE_KIND (note))
102 {
103 case NOTE_INSN_DELETED:
104 case NOTE_INSN_BASIC_BLOCK:
105 case NOTE_INSN_EPILOGUE_BEG:
106 return true;
107
108 default:
109 return false;
110 }
b36d64df 111}
112
113/* True if a given label can be deleted. */
114
115static int
5493cb9a 116can_delete_label_p (const_rtx label)
b36d64df 117{
5cc577b6 118 return (!LABEL_PRESERVE_P (label)
119 /* User declared labels must be preserved. */
120 && LABEL_NAME (label) == 0
2f138c1c 121 && !in_expr_list_p (forced_labels, label));
b36d64df 122}
123
a2b85e40 124/* Delete INSN by patching it out. */
b36d64df 125
a2b85e40 126void
4c9e08a4 127delete_insn (rtx insn)
b36d64df 128{
b36d64df 129 rtx note;
130 bool really_delete = true;
131
6d7dc5b9 132 if (LABEL_P (insn))
b36d64df 133 {
134 /* Some labels can't be directly removed from the INSN chain, as they
a0c938f0 135 might be references via variables, constant pool etc.
136 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
b36d64df 137 if (! can_delete_label_p (insn))
138 {
139 const char *name = LABEL_NAME (insn);
a2b85e40 140 basic_block bb = BLOCK_FOR_INSN (insn);
141 rtx bb_note = NEXT_INSN (insn);
b36d64df 142
143 really_delete = false;
144 PUT_CODE (insn, NOTE);
ad4583d9 145 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
7bd3dcc4 146 NOTE_DELETED_LABEL_NAME (insn) = name;
a2b85e40 147
148 if (bb_note != NULL_RTX && NOTE_INSN_BASIC_BLOCK_P (bb_note)
149 && BLOCK_FOR_INSN (bb_note) == bb)
150 {
151 reorder_insns_nobb (insn, insn, bb_note);
152 BB_HEAD (bb) = bb_note;
153 if (BB_END (bb) == bb_note)
154 BB_END (bb) = insn;
155 }
b36d64df 156 }
5cc577b6 157
b36d64df 158 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
159 }
160
161 if (really_delete)
162 {
e172357f 163 /* If this insn has already been deleted, something is very wrong. */
cc636d56 164 gcc_assert (!INSN_DELETED_P (insn));
b36d64df 165 remove_insn (insn);
166 INSN_DELETED_P (insn) = 1;
167 }
168
169 /* If deleting a jump, decrement the use count of the label. Deleting
170 the label itself should happen in the normal course of block merging. */
19d2fe05 171 if (JUMP_P (insn))
242dafee 172 {
19d2fe05 173 if (JUMP_LABEL (insn)
174 && LABEL_P (JUMP_LABEL (insn)))
175 LABEL_NUSES (JUMP_LABEL (insn))--;
176
177 /* If there are more targets, remove them too. */
178 while ((note
179 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
6d7dc5b9 180 && LABEL_P (XEXP (note, 0)))
242dafee 181 {
182 LABEL_NUSES (XEXP (note, 0))--;
183 remove_note (insn, note);
184 }
185 }
b36d64df 186
19d2fe05 187 /* Also if deleting any insn that references a label as an operand. */
188 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
189 && LABEL_P (XEXP (note, 0)))
190 {
191 LABEL_NUSES (XEXP (note, 0))--;
192 remove_note (insn, note);
193 }
194
971ba038 195 if (JUMP_TABLE_DATA_P (insn))
b36d64df 196 {
197 rtx pat = PATTERN (insn);
198 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
199 int len = XVECLEN (pat, diff_vec_p);
200 int i;
201
202 for (i = 0; i < len; i++)
fd0d7f8c 203 {
204 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
205
206 /* When deleting code in bulk (e.g. removing many unreachable
207 blocks) we can delete a label that's a target of the vector
208 before deleting the vector itself. */
6d7dc5b9 209 if (!NOTE_P (label))
fd0d7f8c 210 LABEL_NUSES (label)--;
211 }
b36d64df 212 }
b36d64df 213}
214
fb20d6fa 215/* Like delete_insn but also purge dead edges from BB. */
ebc94641 216
a2b85e40 217void
4c9e08a4 218delete_insn_and_edges (rtx insn)
fb20d6fa 219{
fb20d6fa 220 bool purge = false;
221
ab87d1bc 222 if (INSN_P (insn)
fb20d6fa 223 && BLOCK_FOR_INSN (insn)
5496dbfc 224 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
fb20d6fa 225 purge = true;
a2b85e40 226 delete_insn (insn);
fb20d6fa 227 if (purge)
228 purge_dead_edges (BLOCK_FOR_INSN (insn));
fb20d6fa 229}
230
b36d64df 231/* Unlink a chain of insns between START and FINISH, leaving notes
39257eb5 232 that must be paired. If CLEAR_BB is true, we set bb field for
233 insns that cannot be removed to NULL. */
b36d64df 234
235void
39257eb5 236delete_insn_chain (rtx start, rtx finish, bool clear_bb)
b36d64df 237{
a2b85e40 238 rtx prev, current;
b36d64df 239
5cc577b6 240 /* Unchain the insns one by one. It would be quicker to delete all of these
241 with a single unchaining, rather than one at a time, but we need to keep
242 the NOTE's. */
a2b85e40 243 current = finish;
b36d64df 244 while (1)
245 {
a2b85e40 246 prev = PREV_INSN (current);
247 if (NOTE_P (current) && !can_delete_note_p (current))
b36d64df 248 ;
249 else
a2b85e40 250 delete_insn (current);
b36d64df 251
a2b85e40 252 if (clear_bb && !INSN_DELETED_P (current))
253 set_block_for_insn (current, NULL);
39257eb5 254
a2b85e40 255 if (current == start)
b36d64df 256 break;
a2b85e40 257 current = prev;
b36d64df 258 }
259}
260\f
5cc577b6 261/* Create a new basic block consisting of the instructions between HEAD and END
262 inclusive. This function is designed to allow fast BB construction - reuses
263 the note and basic block struct in BB_NOTE, if any and do not grow
264 BASIC_BLOCK chain and should be used directly only by CFG construction code.
265 END can be NULL in to create new empty basic block before HEAD. Both END
7fa55aef 266 and HEAD can be NULL to create basic block at the end of INSN chain.
267 AFTER is the basic block we should be put after. */
b36d64df 268
269basic_block
4c9e08a4 270create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
b36d64df 271{
272 basic_block bb;
273
274 if (bb_note
b36d64df 275 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
276 && bb->aux == NULL)
277 {
278 /* If we found an existing note, thread it back onto the chain. */
279
280 rtx after;
281
6d7dc5b9 282 if (LABEL_P (head))
b36d64df 283 after = head;
284 else
285 {
286 after = PREV_INSN (head);
287 head = bb_note;
288 }
289
290 if (after != bb_note && NEXT_INSN (after) != bb_note)
ab87d1bc 291 reorder_insns_nobb (bb_note, bb_note, after);
b36d64df 292 }
293 else
294 {
295 /* Otherwise we must create a note and a basic block structure. */
296
297 bb = alloc_block ();
298
e0dde8f8 299 init_rtl_bb_info (bb);
b36d64df 300 if (!head && !end)
5cc577b6 301 head = end = bb_note
302 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
6d7dc5b9 303 else if (LABEL_P (head) && end)
b36d64df 304 {
305 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
306 if (head == end)
307 end = bb_note;
308 }
309 else
310 {
311 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
312 head = bb_note;
313 if (!end)
314 end = head;
315 }
5cc577b6 316
b36d64df 317 NOTE_BASIC_BLOCK (bb_note) = bb;
318 }
319
320 /* Always include the bb note in the block. */
321 if (NEXT_INSN (end) == bb_note)
322 end = bb_note;
323
5496dbfc 324 BB_HEAD (bb) = head;
325 BB_END (bb) = end;
f23d9a22 326 bb->index = last_basic_block++;
e0dde8f8 327 bb->flags = BB_NEW | BB_RTL;
7fa55aef 328 link_block (bb, after);
85b938d0 329 SET_BASIC_BLOCK (bb->index, bb);
3072d30e 330 df_bb_refs_record (bb->index, false);
ab87d1bc 331 update_bb_for_insn (bb);
7562ed74 332 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
b36d64df 333
334 /* Tag the block so that we know it has been used when considering
335 other basic block notes. */
336 bb->aux = bb;
337
338 return bb;
339}
340
5cc577b6 341/* Create new basic block consisting of instructions in between HEAD and END
0a55d497 342 and place it to the BB chain after block AFTER. END can be NULL to
343 create a new empty basic block before HEAD. Both END and HEAD can be
344 NULL to create basic block at the end of INSN chain. */
b36d64df 345
c60fa3a7 346static basic_block
347rtl_create_basic_block (void *headp, void *endp, basic_block after)
b36d64df 348{
f780cc25 349 rtx head = (rtx) headp, end = (rtx) endp;
b36d64df 350 basic_block bb;
b3d6de89 351
743ff5bb 352 /* Grow the basic block array if needed. */
85b938d0 353 if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
743ff5bb 354 {
355 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
e85c2c2d 356 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
743ff5bb 357 }
b3d6de89 358
3c0a32c9 359 n_basic_blocks++;
b36d64df 360
f23d9a22 361 bb = create_basic_block_structure (head, end, NULL, after);
b36d64df 362 bb->aux = NULL;
363 return bb;
364}
c60fa3a7 365
366static basic_block
367cfg_layout_create_basic_block (void *head, void *end, basic_block after)
368{
369 basic_block newbb = rtl_create_basic_block (head, end, after);
370
c60fa3a7 371 return newbb;
372}
b36d64df 373\f
374/* Delete the insns in a (non-live) block. We physically delete every
375 non-deleted-note insn, and update the flow graph appropriately.
376
377 Return nonzero if we deleted an exception handler. */
378
379/* ??? Preserving all such notes strikes me as wrong. It would be nice
380 to post-process the stream to remove empty blocks, loops, ranges, etc. */
381
5f704f9f 382static void
4c9e08a4 383rtl_delete_block (basic_block b)
b36d64df 384{
5d65a39c 385 rtx insn, end;
b36d64df 386
387 /* If the head of this block is a CODE_LABEL, then it might be the
57548910 388 label for an exception handler which can't be reached. We need
389 to remove the label from the exception_handler_label list. */
5496dbfc 390 insn = BB_HEAD (b);
b36d64df 391
5d65a39c 392 end = get_last_bb_insn (b);
b36d64df 393
394 /* Selectively delete the entire chain. */
5496dbfc 395 BB_HEAD (b) = NULL;
39257eb5 396 delete_insn_chain (insn, end, true);
397
3072d30e 398
399 if (dump_file)
400 fprintf (dump_file, "deleting block %d\n", b->index);
401 df_bb_delete (b->index);
b36d64df 402}
403\f
f23d9a22 404/* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
b36d64df 405
406void
4c9e08a4 407compute_bb_for_insn (void)
b36d64df 408{
4c26117a 409 basic_block bb;
b36d64df 410
4c26117a 411 FOR_EACH_BB (bb)
b36d64df 412 {
5496dbfc 413 rtx end = BB_END (bb);
5cc577b6 414 rtx insn;
b36d64df 415
5496dbfc 416 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
b36d64df 417 {
ab87d1bc 418 BLOCK_FOR_INSN (insn) = bb;
b36d64df 419 if (insn == end)
420 break;
b36d64df 421 }
422 }
423}
424
425/* Release the basic_block_for_insn array. */
426
2a1990e9 427unsigned int
4c9e08a4 428free_bb_for_insn (void)
b36d64df 429{
ab87d1bc 430 rtx insn;
431 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6d7dc5b9 432 if (!BARRIER_P (insn))
ab87d1bc 433 BLOCK_FOR_INSN (insn) = NULL;
2a1990e9 434 return 0;
b36d64df 435}
436
225e5f6f 437static unsigned int
438rest_of_pass_free_cfg (void)
439{
440#ifdef DELAY_SLOTS
441 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
442 valid at that point so it would be too late to call df_analyze. */
443 if (optimize > 0 && flag_delayed_branch)
db8f6977 444 {
445 df_note_add_problem ();
446 df_analyze ();
447 }
225e5f6f 448#endif
449
450 free_bb_for_insn ();
451 return 0;
452}
453
20099e35 454struct rtl_opt_pass pass_free_cfg =
77fce4cd 455{
20099e35 456 {
457 RTL_PASS,
0c297edc 458 "*free_cfg", /* name */
c7875731 459 OPTGROUP_NONE, /* optinfo_flags */
77fce4cd 460 NULL, /* gate */
225e5f6f 461 rest_of_pass_free_cfg, /* execute */
77fce4cd 462 NULL, /* sub */
463 NULL, /* next */
464 0, /* static_pass_number */
0b1615c1 465 TV_NONE, /* tv_id */
77fce4cd 466 0, /* properties_required */
467 0, /* properties_provided */
468 PROP_cfg, /* properties_destroyed */
469 0, /* todo_flags_start */
470 0, /* todo_flags_finish */
20099e35 471 }
77fce4cd 472};
473
a0fee14a 474/* Return RTX to emit after when we want to emit code on the entry of function. */
475rtx
476entry_of_function (void)
477{
a0c938f0 478 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
4d2e5d52 479 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
a0fee14a 480}
481
aa02fa19 482/* Emit INSN at the entry point of the function, ensuring that it is only
483 executed once per function. */
484void
485emit_insn_at_entry (rtx insn)
486{
487 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
488 edge e = ei_safe_edge (ei);
1db61145 489 gcc_assert (e->flags & EDGE_FALLTHRU);
aa02fa19 490
491 insert_insn_on_edge (insn, e);
492 commit_edge_insertions ();
493}
494
f41d4683 495/* Update BLOCK_FOR_INSN of insns between BEGIN and END
48e1416a 496 (or BARRIER if found) and notify df of the bb change.
f41d4683 497 The insn chain range is inclusive
498 (i.e. both BEGIN and END will be updated. */
b36d64df 499
f41d4683 500static void
501update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
b36d64df 502{
503 rtx insn;
504
f41d4683 505 end = NEXT_INSN (end);
506 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
a2bdd643 507 if (!BARRIER_P (insn))
508 df_insn_change_bb (insn, bb);
b36d64df 509}
f41d4683 510
511/* Update BLOCK_FOR_INSN of insns in BB to BB,
512 and notify df of the change. */
513
514void
515update_bb_for_insn (basic_block bb)
516{
517 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
518}
519
4a020a8c 520\f
521/* Like active_insn_p, except keep the return value clobber around
522 even after reload. */
523
524static bool
525flow_active_insn_p (const_rtx insn)
526{
527 if (active_insn_p (insn))
528 return true;
529
530 /* A clobber of the function return value exists for buggy
531 programs that fail to return a value. Its effect is to
532 keep the return value from being live across the entire
533 function. If we allow it to be skipped, we introduce the
534 possibility for register lifetime confusion. */
535 if (GET_CODE (PATTERN (insn)) == CLOBBER
536 && REG_P (XEXP (PATTERN (insn), 0))
537 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
538 return true;
539
540 return false;
541}
542
543/* Return true if the block has no effect and only forwards control flow to
544 its single destination. */
4a020a8c 545
546bool
bbdfcf34 547contains_no_active_insn_p (const_basic_block bb)
4a020a8c 548{
549 rtx insn;
550
551 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
552 || !single_succ_p (bb))
553 return false;
554
bbdfcf34 555 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
556 if (INSN_P (insn) && flow_active_insn_p (insn))
557 return false;
558
559 return (!INSN_P (insn)
560 || (JUMP_P (insn) && simplejump_p (insn))
561 || !flow_active_insn_p (insn));
562}
563
564/* Likewise, but protect loop latches, headers and preheaders. */
565/* FIXME: Make this a cfg hook. */
566
567bool
568forwarder_block_p (const_basic_block bb)
569{
570 if (!contains_no_active_insn_p (bb))
571 return false;
572
4a020a8c 573 /* Protect loop latches, headers and preheaders. */
574 if (current_loops)
575 {
576 basic_block dest;
577 if (bb->loop_father->header == bb)
578 return false;
579 dest = EDGE_SUCC (bb, 0)->dest;
580 if (dest->loop_father->header == dest)
581 return false;
582 }
583
bbdfcf34 584 return true;
4a020a8c 585}
586
587/* Return nonzero if we can reach target from src by falling through. */
588/* FIXME: Make this a cfg hook. */
589
590bool
591can_fallthru (basic_block src, basic_block target)
592{
593 rtx insn = BB_END (src);
594 rtx insn2;
595 edge e;
596 edge_iterator ei;
597
598 if (target == EXIT_BLOCK_PTR)
599 return true;
600 if (src->next_bb != target)
601 return 0;
602 FOR_EACH_EDGE (e, ei, src->succs)
603 if (e->dest == EXIT_BLOCK_PTR
604 && e->flags & EDGE_FALLTHRU)
605 return 0;
606
607 insn2 = BB_HEAD (target);
608 if (insn2 && !active_insn_p (insn2))
609 insn2 = next_active_insn (insn2);
610
611 /* ??? Later we may add code to move jump tables offline. */
612 return next_active_insn (insn) == insn2;
613}
614
615/* Return nonzero if we could reach target from src by falling through,
616 if the target was made adjacent. If we already have a fall-through
617 edge to the exit block, we can't do that. */
618static bool
619could_fall_through (basic_block src, basic_block target)
620{
621 edge e;
622 edge_iterator ei;
623
624 if (target == EXIT_BLOCK_PTR)
625 return true;
626 FOR_EACH_EDGE (e, ei, src->succs)
627 if (e->dest == EXIT_BLOCK_PTR
628 && e->flags & EDGE_FALLTHRU)
629 return 0;
630 return true;
631}
b36d64df 632\f
018f0595 633/* Return the NOTE_INSN_BASIC_BLOCK of BB. */
634rtx
635bb_note (basic_block bb)
636{
637 rtx note;
638
639 note = BB_HEAD (bb);
640 if (LABEL_P (note))
641 note = NEXT_INSN (note);
642
643 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
644 return note;
645}
646
3072d30e 647/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
648 note associated with the BLOCK. */
649
650static rtx
651first_insn_after_basic_block_note (basic_block block)
652{
653 rtx insn;
654
655 /* Get the first instruction in the block. */
656 insn = BB_HEAD (block);
657
658 if (insn == NULL_RTX)
659 return NULL_RTX;
660 if (LABEL_P (insn))
661 insn = NEXT_INSN (insn);
662 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
663
664 return NEXT_INSN (insn);
665}
666
5f5d4cd1 667/* Creates a new basic block just after basic block B by splitting
668 everything after specified instruction I. */
b36d64df 669
5f5d4cd1 670static basic_block
4c9e08a4 671rtl_split_block (basic_block bb, void *insnp)
b36d64df 672{
673 basic_block new_bb;
f780cc25 674 rtx insn = (rtx) insnp;
5f5d4cd1 675 edge e;
cd665a06 676 edge_iterator ei;
b36d64df 677
5f5d4cd1 678 if (!insn)
679 {
680 insn = first_insn_after_basic_block_note (bb);
681
682 if (insn)
9845d120 683 {
684 rtx next = insn;
685
686 insn = PREV_INSN (insn);
687
688 /* If the block contains only debug insns, insn would have
689 been NULL in a non-debug compilation, and then we'd end
690 up emitting a DELETED note. For -fcompare-debug
691 stability, emit the note too. */
692 if (insn != BB_END (bb)
693 && DEBUG_INSN_P (next)
694 && DEBUG_INSN_P (BB_END (bb)))
695 {
696 while (next != BB_END (bb) && DEBUG_INSN_P (next))
697 next = NEXT_INSN (next);
698
699 if (next == BB_END (bb))
700 emit_note_after (NOTE_INSN_DELETED, next);
701 }
702 }
5f5d4cd1 703 else
704 insn = get_last_insn ();
705 }
706
707 /* We probably should check type of the insn so that we do not create
708 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
709 bother. */
710 if (insn == BB_END (bb))
711 emit_note_after (NOTE_INSN_DELETED, insn);
b36d64df 712
713 /* Create the new basic block. */
5496dbfc 714 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
7562ed74 715 BB_COPY_PARTITION (new_bb, bb);
5496dbfc 716 BB_END (bb) = insn;
b36d64df 717
718 /* Redirect the outgoing edges. */
cd665a06 719 new_bb->succs = bb->succs;
720 bb->succs = NULL;
721 FOR_EACH_EDGE (e, ei, new_bb->succs)
b36d64df 722 e->src = new_bb;
723
3072d30e 724 /* The new block starts off being dirty. */
725 df_set_bb_dirty (bb);
5f5d4cd1 726 return new_bb;
c60fa3a7 727}
728
8ddad41d 729/* Return true if the single edge between blocks A and B is the only place
730 in RTL which holds some unique locus. */
731
732static bool
733unique_locus_on_edge_between_p (basic_block a, basic_block b)
734{
5169661d 735 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
8ddad41d 736 rtx insn, end;
737
8e7408e3 738 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
8ddad41d 739 return false;
740
741 /* First scan block A backward. */
742 insn = BB_END (a);
743 end = PREV_INSN (BB_HEAD (a));
5169661d 744 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
8ddad41d 745 insn = PREV_INSN (insn);
746
5169661d 747 if (insn != end && INSN_LOCATION (insn) == goto_locus)
8ddad41d 748 return false;
749
750 /* Then scan block B forward. */
751 insn = BB_HEAD (b);
752 if (insn)
753 {
754 end = NEXT_INSN (BB_END (b));
755 while (insn != end && !NONDEBUG_INSN_P (insn))
756 insn = NEXT_INSN (insn);
757
5169661d 758 if (insn != end && INSN_HAS_LOCATION (insn)
759 && INSN_LOCATION (insn) == goto_locus)
8ddad41d 760 return false;
761 }
762
763 return true;
764}
765
766/* If the single edge between blocks A and B is the only place in RTL which
767 holds some unique locus, emit a nop with that locus between the blocks. */
768
769static void
770emit_nop_for_unique_locus_between (basic_block a, basic_block b)
771{
772 if (!unique_locus_on_edge_between_p (a, b))
773 return;
774
775 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
5169661d 776 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
8ddad41d 777}
778
b36d64df 779/* Blocks A and B are to be merged into a single block A. The insns
c60fa3a7 780 are already contiguous. */
b36d64df 781
c60fa3a7 782static void
783rtl_merge_blocks (basic_block a, basic_block b)
b36d64df 784{
5496dbfc 785 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
b36d64df 786 rtx del_first = NULL_RTX, del_last = NULL_RTX;
9845d120 787 rtx b_debug_start = b_end, b_debug_end = b_end;
18b762f0 788 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
b36d64df 789 int b_empty = 0;
790
3072d30e 791 if (dump_file)
18b762f0 792 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
793 a->index);
3072d30e 794
9845d120 795 while (DEBUG_INSN_P (b_end))
796 b_end = PREV_INSN (b_debug_start = b_end);
797
b36d64df 798 /* If there was a CODE_LABEL beginning B, delete it. */
6d7dc5b9 799 if (LABEL_P (b_head))
b36d64df 800 {
801 /* Detect basic blocks with nothing but a label. This can happen
802 in particular at the end of a function. */
803 if (b_head == b_end)
804 b_empty = 1;
5cc577b6 805
b36d64df 806 del_first = del_last = b_head;
807 b_head = NEXT_INSN (b_head);
808 }
809
5cc577b6 810 /* Delete the basic block note and handle blocks containing just that
811 note. */
b36d64df 812 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
813 {
814 if (b_head == b_end)
815 b_empty = 1;
816 if (! del_last)
817 del_first = b_head;
5cc577b6 818
b36d64df 819 del_last = b_head;
820 b_head = NEXT_INSN (b_head);
821 }
822
823 /* If there was a jump out of A, delete it. */
6d7dc5b9 824 if (JUMP_P (a_end))
b36d64df 825 {
826 rtx prev;
827
828 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
6d7dc5b9 829 if (!NOTE_P (prev)
ad4583d9 830 || NOTE_INSN_BASIC_BLOCK_P (prev)
5496dbfc 831 || prev == BB_HEAD (a))
b36d64df 832 break;
833
834 del_first = a_end;
835
836#ifdef HAVE_cc0
837 /* If this was a conditional jump, we need to also delete
838 the insn that set cc0. */
839 if (only_sets_cc0_p (prev))
840 {
841 rtx tmp = prev;
5cc577b6 842
b36d64df 843 prev = prev_nonnote_insn (prev);
844 if (!prev)
5496dbfc 845 prev = BB_HEAD (a);
b36d64df 846 del_first = tmp;
847 }
848#endif
849
850 a_end = PREV_INSN (del_first);
851 }
6d7dc5b9 852 else if (BARRIER_P (NEXT_INSN (a_end)))
b36d64df 853 del_first = NEXT_INSN (a_end);
854
b36d64df 855 /* Delete everything marked above as well as crap that might be
856 hanging out between the two blocks. */
8ddad41d 857 BB_END (a) = a_end;
858 BB_HEAD (b) = b_empty ? NULL_RTX : b_head;
39257eb5 859 delete_insn_chain (del_first, del_last, true);
b36d64df 860
8ddad41d 861 /* When not optimizing CFG and the edge is the only place in RTL which holds
862 some unique locus, emit a nop with that locus in between. */
863 if (!optimize)
864 {
865 emit_nop_for_unique_locus_between (a, b);
866 a_end = BB_END (a);
867 }
868
b36d64df 869 /* Reassociate the insns of B with A. */
870 if (!b_empty)
871 {
9845d120 872 update_bb_for_insn_chain (a_end, b_debug_end, a);
5cc577b6 873
8ddad41d 874 BB_END (a) = b_debug_end;
875 BB_HEAD (b) = NULL_RTX;
9845d120 876 }
877 else if (b_end != b_debug_end)
878 {
879 /* Move any deleted labels and other notes between the end of A
880 and the debug insns that make up B after the debug insns,
881 bringing the debug insns into A while keeping the notes after
882 the end of A. */
883 if (NEXT_INSN (a_end) != b_debug_start)
884 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
885 b_debug_end);
886 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
8ddad41d 887 BB_END (a) = b_debug_end;
b36d64df 888 }
5cc577b6 889
3072d30e 890 df_bb_delete (b->index);
18b762f0 891
892 /* If B was a forwarder block, propagate the locus on the edge. */
893 if (forwarder_p && !EDGE_SUCC (b, 0)->goto_locus)
894 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
895
896 if (dump_file)
897 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
b36d64df 898}
c60fa3a7 899
3072d30e 900
c60fa3a7 901/* Return true when block A and B can be merged. */
5493cb9a 902
47aaf6e6 903static bool
904rtl_can_merge_blocks (basic_block a, basic_block b)
c60fa3a7 905{
4f18499c 906 /* If we are partitioning hot/cold basic blocks, we don't want to
907 mess up unconditional or indirect jumps that cross between hot
7562ed74 908 and cold sections.
909
1118aef7 910 Basic block partitioning may result in some jumps that appear to
a0c938f0 911 be optimizable (or blocks that appear to be mergeable), but which really
912 must be left untouched (they are required to make it safely across
913 partition boundaries). See the comments at the top of
1118aef7 914 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
7562ed74 915
1897b881 916 if (BB_PARTITION (a) != BB_PARTITION (b))
7562ed74 917 return false;
4f18499c 918
79f958cb 919 /* Protect the loop latches. */
920 if (current_loops && b->loop_father->latch == b)
921 return false;
922
c60fa3a7 923 /* There must be exactly one edge in between the blocks. */
ea091dfd 924 return (single_succ_p (a)
925 && single_succ (a) == b
926 && single_pred_p (b)
cd665a06 927 && a != b
c60fa3a7 928 /* Must be simple edge. */
ea091dfd 929 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
c60fa3a7 930 && a->next_bb == b
931 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
932 /* If the jump insn has side effects,
933 we can't kill the edge. */
6d7dc5b9 934 && (!JUMP_P (BB_END (a))
18f811c9 935 || (reload_completed
5496dbfc 936 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
c60fa3a7 937}
b36d64df 938\f
5cc577b6 939/* Return the label in the head of basic block BLOCK. Create one if it doesn't
940 exist. */
b36d64df 941
942rtx
4c9e08a4 943block_label (basic_block block)
b36d64df 944{
945 if (block == EXIT_BLOCK_PTR)
946 return NULL_RTX;
5cc577b6 947
6d7dc5b9 948 if (!LABEL_P (BB_HEAD (block)))
b36d64df 949 {
5496dbfc 950 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
b36d64df 951 }
5cc577b6 952
5496dbfc 953 return BB_HEAD (block);
b36d64df 954}
955
956/* Attempt to perform edge redirection by replacing possibly complex jump
5cc577b6 957 instruction by unconditional jump or removing jump completely. This can
958 apply only if all edges now point to the same block. The parameters and
959 return values are equivalent to redirect_edge_and_branch. */
b36d64df 960
4ee9c684 961edge
c60fa3a7 962try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
b36d64df 963{
964 basic_block src = e->src;
5496dbfc 965 rtx insn = BB_END (src), kill_from;
b19beda9 966 rtx set;
b36d64df 967 int fallthru = 0;
4f18499c 968
969 /* If we are partitioning hot/cold basic blocks, we don't want to
970 mess up unconditional or indirect jumps that cross between hot
1118aef7 971 and cold sections.
972
973 Basic block partitioning may result in some jumps that appear to
a0c938f0 974 be optimizable (or blocks that appear to be mergeable), but which really
975 must be left untouched (they are required to make it safely across
976 partition boundaries). See the comments at the top of
1118aef7 977 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
a0c938f0 978
1897b881 979 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
980 || BB_PARTITION (src) != BB_PARTITION (target))
53f8294f 981 return NULL;
4f18499c 982
34f05bfa 983 /* We can replace or remove a complex jump only when we have exactly
984 two edges. Also, if we have exactly one outgoing edge, we can
985 redirect that. */
986 if (EDGE_COUNT (src->succs) >= 3
987 /* Verify that all targets will be TARGET. Specifically, the
988 edge that is not E must also go to TARGET. */
989 || (EDGE_COUNT (src->succs) == 2
990 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
991 return NULL;
5cc577b6 992
34f05bfa 993 if (!onlyjump_p (insn))
4ee9c684 994 return NULL;
e5562ab8 995 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
4ee9c684 996 return NULL;
b36d64df 997
998 /* Avoid removing branch with side effects. */
999 set = single_set (insn);
1000 if (!set || side_effects_p (set))
4ee9c684 1001 return NULL;
b36d64df 1002
1003 /* In case we zap a conditional jump, we'll need to kill
1004 the cc0 setter too. */
1005 kill_from = insn;
1006#ifdef HAVE_cc0
3d5a3e76 1007 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
1008 && only_sets_cc0_p (PREV_INSN (insn)))
b36d64df 1009 kill_from = PREV_INSN (insn);
1010#endif
1011
1012 /* See if we can create the fallthru edge. */
c60fa3a7 1013 if (in_cfglayout || can_fallthru (src, target))
b36d64df 1014 {
450d042a 1015 if (dump_file)
1016 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
b36d64df 1017 fallthru = 1;
1018
4a82352a 1019 /* Selectively unlink whole insn chain. */
c60fa3a7 1020 if (in_cfglayout)
1021 {
43e94e51 1022 rtx insn = BB_FOOTER (src);
c60fa3a7 1023
39257eb5 1024 delete_insn_chain (kill_from, BB_END (src), false);
c60fa3a7 1025
1026 /* Remove barriers but keep jumptables. */
1027 while (insn)
1028 {
6d7dc5b9 1029 if (BARRIER_P (insn))
c60fa3a7 1030 {
1031 if (PREV_INSN (insn))
1032 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1033 else
43e94e51 1034 BB_FOOTER (src) = NEXT_INSN (insn);
c60fa3a7 1035 if (NEXT_INSN (insn))
1036 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1037 }
6d7dc5b9 1038 if (LABEL_P (insn))
c60fa3a7 1039 break;
1040 insn = NEXT_INSN (insn);
1041 }
1042 }
1043 else
39257eb5 1044 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1045 false);
b36d64df 1046 }
5cc577b6 1047
b36d64df 1048 /* If this already is simplejump, redirect it. */
1049 else if (simplejump_p (insn))
1050 {
1051 if (e->dest == target)
4ee9c684 1052 return NULL;
450d042a 1053 if (dump_file)
1054 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
b3d6de89 1055 INSN_UID (insn), e->dest->index, target->index);
8963581a 1056 if (!redirect_jump (insn, block_label (target), 0))
1057 {
cc636d56 1058 gcc_assert (target == EXIT_BLOCK_PTR);
1059 return NULL;
8963581a 1060 }
b36d64df 1061 }
5cc577b6 1062
8963581a 1063 /* Cannot do anything for target exit block. */
1064 else if (target == EXIT_BLOCK_PTR)
4ee9c684 1065 return NULL;
8963581a 1066
b36d64df 1067 /* Or replace possibly complicated jump insn by simple jump insn. */
1068 else
1069 {
1070 rtx target_label = block_label (target);
669f9131 1071 rtx barrier, label, table;
b36d64df 1072
0891f67c 1073 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
5496dbfc 1074 JUMP_LABEL (BB_END (src)) = target_label;
b36d64df 1075 LABEL_NUSES (target_label)++;
450d042a 1076 if (dump_file)
1077 fprintf (dump_file, "Replacing insn %i by jump %i\n",
5496dbfc 1078 INSN_UID (insn), INSN_UID (BB_END (src)));
b36d64df 1079
1ed8ccdb 1080
201f6961 1081 delete_insn_chain (kill_from, insn, false);
1082
1ed8ccdb 1083 /* Recognize a tablejump that we are converting to a
1084 simple jump and remove its associated CODE_LABEL
1085 and ADDR_VEC or ADDR_DIFF_VEC. */
201f6961 1086 if (tablejump_p (insn, &label, &table))
1087 delete_insn_chain (label, table, false);
669f9131 1088
5496dbfc 1089 barrier = next_nonnote_insn (BB_END (src));
6d7dc5b9 1090 if (!barrier || !BARRIER_P (barrier))
5496dbfc 1091 emit_barrier_after (BB_END (src));
1824678b 1092 else
1093 {
5496dbfc 1094 if (barrier != NEXT_INSN (BB_END (src)))
1824678b 1095 {
1096 /* Move the jump before barrier so that the notes
1097 which originally were or were created before jump table are
1098 inside the basic block. */
5496dbfc 1099 rtx new_insn = BB_END (src);
1824678b 1100
f41d4683 1101 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1102 PREV_INSN (barrier), src);
1824678b 1103
1104 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1105 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1106
1107 NEXT_INSN (new_insn) = barrier;
1108 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1109
1110 PREV_INSN (new_insn) = PREV_INSN (barrier);
1111 PREV_INSN (barrier) = new_insn;
1112 }
1113 }
b36d64df 1114 }
1115
1116 /* Keep only one edge out and set proper flags. */
ea091dfd 1117 if (!single_succ_p (src))
cd665a06 1118 remove_edge (e);
ea091dfd 1119 gcc_assert (single_succ_p (src));
cd665a06 1120
ea091dfd 1121 e = single_succ_edge (src);
b36d64df 1122 if (fallthru)
1123 e->flags = EDGE_FALLTHRU;
1124 else
1125 e->flags = 0;
5cc577b6 1126
b36d64df 1127 e->probability = REG_BR_PROB_BASE;
1128 e->count = src->count;
1129
b36d64df 1130 if (e->dest != target)
1131 redirect_edge_succ (e, target);
4ee9c684 1132 return e;
b36d64df 1133}
1134
a8dd994c 1135/* Subroutine of redirect_branch_edge that tries to patch the jump
1136 instruction INSN so that it reaches block NEW. Do this
1137 only when it originally reached block OLD. Return true if this
1138 worked or the original target wasn't OLD, return false if redirection
1139 doesn't work. */
1140
1141static bool
1142patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
b36d64df 1143{
1144 rtx tmp;
b36d64df 1145 /* Recognize a tablejump and adjust all matching cases. */
b19beda9 1146 if (tablejump_p (insn, NULL, &tmp))
b36d64df 1147 {
1148 rtvec vec;
1149 int j;
a8dd994c 1150 rtx new_label = block_label (new_bb);
b36d64df 1151
a8dd994c 1152 if (new_bb == EXIT_BLOCK_PTR)
1153 return false;
b36d64df 1154 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1155 vec = XVEC (PATTERN (tmp), 0);
1156 else
1157 vec = XVEC (PATTERN (tmp), 1);
1158
1159 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1160 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1161 {
1162 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1163 --LABEL_NUSES (old_label);
1164 ++LABEL_NUSES (new_label);
1165 }
1166
2358393e 1167 /* Handle casesi dispatch insns. */
b36d64df 1168 if ((tmp = single_set (insn)) != NULL
1169 && SET_DEST (tmp) == pc_rtx
1170 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1171 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1172 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1173 {
514b43f8 1174 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
b36d64df 1175 new_label);
1176 --LABEL_NUSES (old_label);
1177 ++LABEL_NUSES (new_label);
1178 }
1179 }
78f55ca8 1180 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1181 {
1182 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1183 rtx new_label, note;
1184
1185 if (new_bb == EXIT_BLOCK_PTR)
1186 return false;
1187 new_label = block_label (new_bb);
1188
1189 for (i = 0; i < n; ++i)
1190 {
1191 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1192 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1193 if (XEXP (old_ref, 0) == old_label)
1194 {
1195 ASM_OPERANDS_LABEL (tmp, i)
1196 = gen_rtx_LABEL_REF (Pmode, new_label);
1197 --LABEL_NUSES (old_label);
1198 ++LABEL_NUSES (new_label);
1199 }
1200 }
1201
1202 if (JUMP_LABEL (insn) == old_label)
1203 {
1204 JUMP_LABEL (insn) = new_label;
1205 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1206 if (note)
1207 remove_note (insn, note);
1208 }
1209 else
1210 {
1211 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1212 if (note)
1213 remove_note (insn, note);
1214 if (JUMP_LABEL (insn) != new_label
1215 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1216 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1217 }
dcb172b4 1218 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1219 != NULL_RTX)
1220 XEXP (note, 0) = new_label;
78f55ca8 1221 }
b36d64df 1222 else
1223 {
1224 /* ?? We may play the games with moving the named labels from
1225 one basic block to the other in case only one computed_jump is
1226 available. */
5cc577b6 1227 if (computed_jump_p (insn)
1228 /* A return instruction can't be redirected. */
1229 || returnjump_p (insn))
a8dd994c 1230 return false;
8963581a 1231
a8dd994c 1232 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
8963581a 1233 {
a8dd994c 1234 /* If the insn doesn't go where we think, we're confused. */
1235 gcc_assert (JUMP_LABEL (insn) == old_label);
1236
1237 /* If the substitution doesn't succeed, die. This can happen
1238 if the back end emitted unrecognizable instructions or if
1239 target is exit block on some arches. */
1240 if (!redirect_jump (insn, block_label (new_bb), 0))
1241 {
1242 gcc_assert (new_bb == EXIT_BLOCK_PTR);
1243 return false;
1244 }
8963581a 1245 }
b36d64df 1246 }
a8dd994c 1247 return true;
1248}
1249
1250
1251/* Redirect edge representing branch of (un)conditional jump or tablejump,
1252 NULL on failure */
1253static edge
1254redirect_branch_edge (edge e, basic_block target)
1255{
1256 rtx old_label = BB_HEAD (e->dest);
1257 basic_block src = e->src;
1258 rtx insn = BB_END (src);
1259
1260 /* We can only redirect non-fallthru edges of jump insn. */
1261 if (e->flags & EDGE_FALLTHRU)
1262 return NULL;
1263 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1264 return NULL;
1265
1266 if (!currently_expanding_to_rtl)
1267 {
1268 if (!patch_jump_insn (insn, old_label, target))
1269 return NULL;
1270 }
1271 else
1272 /* When expanding this BB might actually contain multiple
1273 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1274 Redirect all of those that match our label. */
50b15e04 1275 FOR_BB_INSNS (src, insn)
a8dd994c 1276 if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1277 return NULL;
b36d64df 1278
450d042a 1279 if (dump_file)
1280 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
b3d6de89 1281 e->src->index, e->dest->index, target->index);
5cc577b6 1282
b36d64df 1283 if (e->dest != target)
4ee9c684 1284 e = redirect_edge_succ_nodup (e, target);
3072d30e 1285
4ee9c684 1286 return e;
c60fa3a7 1287}
1288
1289/* Attempt to change code to redirect edge E to TARGET. Don't do that on
1290 expense of adding new instructions or reordering basic blocks.
1291
1292 Function can be also called with edge destination equivalent to the TARGET.
1293 Then it should try the simplifications and do nothing if none is possible.
1294
4ee9c684 1295 Return edge representing the branch if transformation succeeded. Return NULL
1296 on failure.
1297 We still return NULL in case E already destinated TARGET and we didn't
1298 managed to simplify instruction stream. */
c60fa3a7 1299
4ee9c684 1300static edge
f82b06e0 1301rtl_redirect_edge_and_branch (edge e, basic_block target)
c60fa3a7 1302{
4ee9c684 1303 edge ret;
adee86a0 1304 basic_block src = e->src;
1305
c60fa3a7 1306 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4ee9c684 1307 return NULL;
c60fa3a7 1308
e5562ab8 1309 if (e->dest == target)
4ee9c684 1310 return e;
e5562ab8 1311
4ee9c684 1312 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
adee86a0 1313 {
3072d30e 1314 df_set_bb_dirty (src);
4ee9c684 1315 return ret;
adee86a0 1316 }
c60fa3a7 1317
4ee9c684 1318 ret = redirect_branch_edge (e, target);
1319 if (!ret)
1320 return NULL;
5cc577b6 1321
3072d30e 1322 df_set_bb_dirty (src);
4ee9c684 1323 return ret;
b36d64df 1324}
1325
20dd417a 1326/* Like force_nonfallthru below, but additionally performs redirection
9cb2517e 1327 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1328 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1329 simple_return_rtx, indicating which kind of returnjump to create.
1330 It should be NULL otherwise. */
b36d64df 1331
4115ac36 1332basic_block
9cb2517e 1333force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
b36d64df 1334{
498d173d 1335 basic_block jump_block, new_bb = NULL, src = e->src;
b36d64df 1336 rtx note;
1337 edge new_edge;
498d173d 1338 int abnormal_edge_flags = 0;
d7ff3ab6 1339 bool asm_goto_edge = false;
9c388755 1340 int loc;
b36d64df 1341
accd6c44 1342 /* In the case the last instruction is conditional jump to the next
1343 instruction, first redirect the jump itself and then continue
df07c3ae 1344 by creating a basic block afterwards to redirect fallthru edge. */
accd6c44 1345 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
5496dbfc 1346 && any_condjump_p (BB_END (e->src))
5496dbfc 1347 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
accd6c44 1348 {
1349 rtx note;
d9aa52be 1350 edge b = unchecked_make_edge (e->src, target, 0);
cc636d56 1351 bool redirected;
accd6c44 1352
cc636d56 1353 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1354 gcc_assert (redirected);
a0c938f0 1355
5496dbfc 1356 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
accd6c44 1357 if (note)
1358 {
1359 int prob = INTVAL (XEXP (note, 0));
1360
1361 b->probability = prob;
1362 b->count = e->count * prob / REG_BR_PROB_BASE;
1363 e->probability -= e->probability;
1364 e->count -= b->count;
1365 if (e->probability < 0)
1366 e->probability = 0;
1367 if (e->count < 0)
1368 e->count = 0;
1369 }
1370 }
1371
b36d64df 1372 if (e->flags & EDGE_ABNORMAL)
498d173d 1373 {
1374 /* Irritating special case - fallthru edge to the same block as abnormal
1375 edge.
1376 We can't redirect abnormal edge, but we still can split the fallthru
4c9e08a4 1377 one and create separate abnormal edge to original destination.
498d173d 1378 This allows bb-reorder to make such edge non-fallthru. */
cc636d56 1379 gcc_assert (e->dest == target);
4ecbba1a 1380 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1381 e->flags &= EDGE_FALLTHRU;
498d173d 1382 }
cc636d56 1383 else
76cc9616 1384 {
cc636d56 1385 gcc_assert (e->flags & EDGE_FALLTHRU);
1386 if (e->src == ENTRY_BLOCK_PTR)
1387 {
1388 /* We can't redirect the entry block. Create an empty block
cd665a06 1389 at the start of the function which we use to add the new
1390 jump. */
1391 edge tmp;
1392 edge_iterator ei;
1393 bool found = false;
a0c938f0 1394
cd665a06 1395 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
a0c938f0 1396
cc636d56 1397 /* Change the existing edge's source to be the new block, and add
1398 a new edge from the entry block to the new block. */
1399 e->src = bb;
cd665a06 1400 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1401 {
1402 if (tmp == e)
1403 {
55b71d24 1404 VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
cd665a06 1405 found = true;
1406 break;
1407 }
1408 else
1409 ei_next (&ei);
1410 }
a0c938f0 1411
cd665a06 1412 gcc_assert (found);
a0c938f0 1413
046bfc77 1414 VEC_safe_push (edge, gc, bb->succs, e);
cc636d56 1415 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1416 }
76cc9616 1417 }
1418
d7ff3ab6 1419 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
92f67f6e 1420 don't point to the target or fallthru label. */
d7ff3ab6 1421 if (JUMP_P (BB_END (e->src))
1422 && target != EXIT_BLOCK_PTR
d7ff3ab6 1423 && (e->flags & EDGE_FALLTHRU)
1424 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
b36d64df 1425 {
d7ff3ab6 1426 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1427
1428 for (i = 0; i < n; ++i)
92f67f6e 1429 {
1430 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1431 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1432 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
d7ff3ab6 1433 asm_goto_edge = true;
92f67f6e 1434 }
d7ff3ab6 1435 }
1436
1437 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1438 {
1439 gcov_type count = e->count;
1440 int probability = e->probability;
b36d64df 1441 /* Create the new structures. */
3160014e 1442
7a1fee99 1443 /* If the old block ended with a tablejump, skip its table
1444 by searching forward from there. Otherwise start searching
1445 forward from the last instruction of the old block. */
5496dbfc 1446 if (!tablejump_p (BB_END (e->src), NULL, &note))
1447 note = BB_END (e->src);
3160014e 1448 note = NEXT_INSN (note);
1449
3160014e 1450 jump_block = create_basic_block (note, NULL, e->src);
d7ff3ab6 1451 jump_block->count = count;
b36d64df 1452 jump_block->frequency = EDGE_FREQUENCY (e);
b36d64df 1453
4f18499c 1454 /* Make sure new block ends up in correct hot/cold section. */
1455
7562ed74 1456 BB_COPY_PARTITION (jump_block, e->src);
065efcb1 1457 if (flag_reorder_blocks_and_partition
218e3e4e 1458 && targetm_common.have_named_sections
1897b881 1459 && JUMP_P (BB_END (jump_block))
1460 && !any_condjump_p (BB_END (jump_block))
1461 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
a1ddb869 1462 add_reg_note (BB_END (jump_block), REG_CROSSING_JUMP, NULL_RTX);
a0c938f0 1463
b36d64df 1464 /* Wire edge in. */
1465 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
d7ff3ab6 1466 new_edge->probability = probability;
1467 new_edge->count = count;
b36d64df 1468
1469 /* Redirect old edge. */
1470 redirect_edge_pred (e, jump_block);
1471 e->probability = REG_BR_PROB_BASE;
1472
d7ff3ab6 1473 /* If asm goto has any label refs to target's label,
1474 add also edge from asm goto bb to target. */
1475 if (asm_goto_edge)
1476 {
1477 new_edge->probability /= 2;
1478 new_edge->count /= 2;
1479 jump_block->count /= 2;
1480 jump_block->frequency /= 2;
1481 new_edge = make_edge (new_edge->src, target,
1482 e->flags & ~EDGE_FALLTHRU);
1483 new_edge->probability = probability - probability / 2;
1484 new_edge->count = count - count / 2;
1485 }
1486
b36d64df 1487 new_bb = jump_block;
1488 }
1489 else
1490 jump_block = e->src;
5cc577b6 1491
8e7408e3 1492 loc = e->goto_locus;
b36d64df 1493 e->flags &= ~EDGE_FALLTHRU;
1494 if (target == EXIT_BLOCK_PTR)
1495 {
9cb2517e 1496 if (jump_label == ret_rtx)
1497 {
38c95e2f 1498#ifdef HAVE_return
9cb2517e 1499 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
38c95e2f 1500#else
9cb2517e 1501 gcc_unreachable ();
38c95e2f 1502#endif
9cb2517e 1503 }
1504 else
1505 {
1506 gcc_assert (jump_label == simple_return_rtx);
1507#ifdef HAVE_simple_return
1508 emit_jump_insn_after_setloc (gen_simple_return (),
1509 BB_END (jump_block), loc);
1510#else
1511 gcc_unreachable ();
1512#endif
1513 }
31a53363 1514 set_return_jump_label (BB_END (jump_block));
b36d64df 1515 }
1516 else
1517 {
1518 rtx label = block_label (target);
9c388755 1519 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
5496dbfc 1520 JUMP_LABEL (BB_END (jump_block)) = label;
b36d64df 1521 LABEL_NUSES (label)++;
1522 }
5cc577b6 1523
5496dbfc 1524 emit_barrier_after (BB_END (jump_block));
b36d64df 1525 redirect_edge_succ_nodup (e, target);
1526
498d173d 1527 if (abnormal_edge_flags)
1528 make_edge (src, target, abnormal_edge_flags);
1529
48e1416a 1530 df_mark_solutions_dirty ();
b36d64df 1531 return new_bb;
1532}
1533
1534/* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1535 (and possibly create new basic block) to make edge non-fallthru.
1536 Return newly created BB or NULL if none. */
5cc577b6 1537
202bbc06 1538static basic_block
1539rtl_force_nonfallthru (edge e)
b36d64df 1540{
9cb2517e 1541 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
b36d64df 1542}
1543
1544/* Redirect edge even at the expense of creating new jump insn or
1545 basic block. Return new basic block if created, NULL otherwise.
a53ff4c1 1546 Conversion must be possible. */
b36d64df 1547
1026363d 1548static basic_block
4c9e08a4 1549rtl_redirect_edge_and_branch_force (edge e, basic_block target)
b36d64df 1550{
5cc577b6 1551 if (redirect_edge_and_branch (e, target)
1552 || e->dest == target)
b36d64df 1553 return NULL;
1554
1555 /* In case the edge redirection failed, try to force it to be non-fallthru
1556 and redirect newly created simplejump. */
3072d30e 1557 df_set_bb_dirty (e->src);
9cb2517e 1558 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
b36d64df 1559}
1560
1561/* The given edge should potentially be a fallthru edge. If that is in
1562 fact true, delete the jump and barriers that are in the way. */
1563
5f5d4cd1 1564static void
1565rtl_tidy_fallthru_edge (edge e)
b36d64df 1566{
1567 rtx q;
5f5d4cd1 1568 basic_block b = e->src, c = b->next_bb;
1569
b36d64df 1570 /* ??? In a late-running flow pass, other folks may have deleted basic
1571 blocks by nopping out blocks, leaving multiple BARRIERs between here
442e3cb9 1572 and the target label. They ought to be chastised and fixed.
b36d64df 1573
1574 We can also wind up with a sequence of undeletable labels between
1575 one block and the next.
1576
1577 So search through a sequence of barriers, labels, and notes for
1578 the head of block C and assert that we really do fall through. */
1579
5496dbfc 1580 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
ac25a327 1581 if (INSN_P (q))
1582 return;
b36d64df 1583
1584 /* Remove what will soon cease being the jump insn from the source block.
1585 If block B consisted only of this single jump, turn it into a deleted
1586 note. */
5496dbfc 1587 q = BB_END (b);
6d7dc5b9 1588 if (JUMP_P (q)
b36d64df 1589 && onlyjump_p (q)
1590 && (any_uncondjump_p (q)
ea091dfd 1591 || single_succ_p (b)))
b36d64df 1592 {
1593#ifdef HAVE_cc0
1594 /* If this was a conditional jump, we need to also delete
1595 the insn that set cc0. */
1596 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1597 q = PREV_INSN (q);
1598#endif
1599
1600 q = PREV_INSN (q);
b36d64df 1601 }
1602
1603 /* Selectively unlink the sequence. */
5496dbfc 1604 if (q != PREV_INSN (BB_HEAD (c)))
39257eb5 1605 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
b36d64df 1606
1607 e->flags |= EDGE_FALLTHRU;
1608}
b36d64df 1609\f
5f5d4cd1 1610/* Should move basic block BB after basic block AFTER. NIY. */
1611
1612static bool
1613rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1614 basic_block after ATTRIBUTE_UNUSED)
1615{
1616 return false;
1617}
1618
b36d64df 1619/* Split a (typically critical) edge. Return the new block.
a53ff4c1 1620 The edge must not be abnormal.
b36d64df 1621
1622 ??? The code generally expects to be called on critical edges.
1623 The case of a block ending in an unconditional jump to a
1624 block with multiple predecessors is not handled optimally. */
1625
9140e457 1626static basic_block
4c9e08a4 1627rtl_split_edge (edge edge_in)
b36d64df 1628{
1629 basic_block bb;
b36d64df 1630 rtx before;
1631
1632 /* Abnormal edges cannot be split. */
cc636d56 1633 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
b36d64df 1634
1635 /* We are going to place the new block in front of edge destination.
4a82352a 1636 Avoid existence of fallthru predecessors. */
b36d64df 1637 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1638 {
7f58c05e 1639 edge e = find_fallthru_edge (edge_in->dest->preds);
b36d64df 1640
1641 if (e)
1642 force_nonfallthru (e);
1643 }
1644
588aadb7 1645 /* Create the basic block note. */
1646 if (edge_in->dest != EXIT_BLOCK_PTR)
5496dbfc 1647 before = BB_HEAD (edge_in->dest);
b36d64df 1648 else
1649 before = NULL_RTX;
1650
9bb8a4af 1651 /* If this is a fall through edge to the exit block, the blocks might be
0a55d497 1652 not adjacent, and the right place is after the source. */
1653 if ((edge_in->flags & EDGE_FALLTHRU) && edge_in->dest == EXIT_BLOCK_PTR)
9bb8a4af 1654 {
1655 before = NEXT_INSN (BB_END (edge_in->src));
9bb8a4af 1656 bb = create_basic_block (before, NULL, edge_in->src);
7562ed74 1657 BB_COPY_PARTITION (bb, edge_in->src);
9bb8a4af 1658 }
1659 else
065efcb1 1660 {
1661 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
7562ed74 1662 /* ??? Why not edge_in->dest->prev_bb here? */
1663 BB_COPY_PARTITION (bb, edge_in->dest);
065efcb1 1664 }
b36d64df 1665
805e22b2 1666 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
b36d64df 1667
917bbcab 1668 /* For non-fallthru edges, we must adjust the predecessor's
b36d64df 1669 jump instruction to target our new block. */
1670 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1671 {
cc636d56 1672 edge redirected = redirect_edge_and_branch (edge_in, bb);
1673 gcc_assert (redirected);
b36d64df 1674 }
1675 else
dcb172b4 1676 {
1677 if (edge_in->src != ENTRY_BLOCK_PTR)
1678 {
1679 /* For asm goto even splitting of fallthru edge might
1680 need insn patching, as other labels might point to the
1681 old label. */
1682 rtx last = BB_END (edge_in->src);
1683 if (last
1684 && JUMP_P (last)
1685 && edge_in->dest != EXIT_BLOCK_PTR
1686 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1687 && patch_jump_insn (last, before, bb))
1688 df_set_bb_dirty (edge_in->src);
1689 }
1690 redirect_edge_succ (edge_in, bb);
1691 }
b36d64df 1692
1693 return bb;
1694}
1695
1696/* Queue instructions for insertion on an edge between two basic blocks.
1697 The new instructions and basic blocks (if any) will not appear in the
1698 CFG until commit_edge_insertions is called. */
1699
1700void
4c9e08a4 1701insert_insn_on_edge (rtx pattern, edge e)
b36d64df 1702{
1703 /* We cannot insert instructions on an abnormal critical edge.
1704 It will be easier to find the culprit if we die now. */
cc636d56 1705 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
b36d64df 1706
4ee9c684 1707 if (e->insns.r == NULL_RTX)
b36d64df 1708 start_sequence ();
1709 else
4ee9c684 1710 push_to_sequence (e->insns.r);
b36d64df 1711
1712 emit_insn (pattern);
1713
4ee9c684 1714 e->insns.r = get_insns ();
b36d64df 1715 end_sequence ();
1716}
1717
1718/* Update the CFG for the instructions queued on edge E. */
1719
a8dd994c 1720void
ca462ec0 1721commit_one_edge_insertion (edge e)
b36d64df 1722{
1723 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
53105d88 1724 basic_block bb;
b36d64df 1725
1726 /* Pull the insns off the edge now since the edge might go away. */
4ee9c684 1727 insns = e->insns.r;
1728 e->insns.r = NULL_RTX;
b36d64df 1729
53105d88 1730 /* Figure out where to put these insns. If the destination has
1731 one predecessor, insert there. Except for the exit block. */
1732 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
b36d64df 1733 {
53105d88 1734 bb = e->dest;
1735
1736 /* Get the location correct wrt a code label, and "nice" wrt
1737 a basic block note, and before everything else. */
1738 tmp = BB_HEAD (bb);
1739 if (LABEL_P (tmp))
1740 tmp = NEXT_INSN (tmp);
1741 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1742 tmp = NEXT_INSN (tmp);
1743 if (tmp == BB_HEAD (bb))
1744 before = tmp;
1745 else if (tmp)
1746 after = PREV_INSN (tmp);
1747 else
1748 after = get_last_insn ();
1749 }
fb20d6fa 1750
53105d88 1751 /* If the source has one successor and the edge is not abnormal,
1752 insert there. Except for the entry block. */
1753 else if ((e->flags & EDGE_ABNORMAL) == 0
1754 && single_succ_p (e->src)
1755 && e->src != ENTRY_BLOCK_PTR)
1756 {
1757 bb = e->src;
fb20d6fa 1758
53105d88 1759 /* It is possible to have a non-simple jump here. Consider a target
1760 where some forms of unconditional jumps clobber a register. This
1761 happens on the fr30 for example.
b36d64df 1762
53105d88 1763 We know this block has a single successor, so we can just emit
1764 the queued insns before the jump. */
1765 if (JUMP_P (BB_END (bb)))
1766 before = BB_END (bb);
fb20d6fa 1767 else
1768 {
53105d88 1769 /* We'd better be fallthru, or we've lost track of what's what. */
1770 gcc_assert (e->flags & EDGE_FALLTHRU);
4f18499c 1771
53105d88 1772 after = BB_END (bb);
b36d64df 1773 }
1774 }
1775
53105d88 1776 /* Otherwise we must split the edge. */
1777 else
1778 {
1779 bb = split_edge (e);
1780 after = BB_END (bb);
b36d64df 1781
53105d88 1782 if (flag_reorder_blocks_and_partition
218e3e4e 1783 && targetm_common.have_named_sections
53105d88 1784 && e->src != ENTRY_BLOCK_PTR
1785 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1786 && !(e->flags & EDGE_CROSSING)
1787 && JUMP_P (after)
1788 && !any_condjump_p (after)
1789 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1790 add_reg_note (after, REG_CROSSING_JUMP, NULL_RTX);
1791 }
1792
1793 /* Now that we've found the spot, do the insertion. */
b36d64df 1794 if (before)
1795 {
3072d30e 1796 emit_insn_before_noloc (insns, before, bb);
b36d64df 1797 last = prev_nonnote_insn (before);
1798 }
1799 else
3072d30e 1800 last = emit_insn_after_noloc (insns, after, bb);
b36d64df 1801
1802 if (returnjump_p (last))
1803 {
1804 /* ??? Remove all outgoing edges from BB and add one for EXIT.
a0c938f0 1805 This is not currently a problem because this only happens
1806 for the (single) epilogue, which already has a fallthru edge
1807 to EXIT. */
b36d64df 1808
ea091dfd 1809 e = single_succ_edge (bb);
cc636d56 1810 gcc_assert (e->dest == EXIT_BLOCK_PTR
ea091dfd 1811 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
b36d64df 1812
5cc577b6 1813 e->flags &= ~EDGE_FALLTHRU;
b36d64df 1814 emit_barrier_after (last);
b3d6de89 1815
b36d64df 1816 if (before)
1817 delete_insn (before);
1818 }
cc636d56 1819 else
1820 gcc_assert (!JUMP_P (last));
b36d64df 1821}
1822
1823/* Update the CFG for all queued instructions. */
1824
1825void
4c9e08a4 1826commit_edge_insertions (void)
b36d64df 1827{
b36d64df 1828 basic_block bb;
1829
1830#ifdef ENABLE_CHECKING
1831 verify_flow_info ();
1832#endif
1833
4c26117a 1834 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
b36d64df 1835 {
cd665a06 1836 edge e;
1837 edge_iterator ei;
b36d64df 1838
cd665a06 1839 FOR_EACH_EDGE (e, ei, bb->succs)
1840 if (e->insns.r)
202bbc06 1841 commit_one_edge_insertion (e);
fb20d6fa 1842 }
b36d64df 1843}
1844\f
3072d30e 1845
5f5d4cd1 1846/* Print out RTL-specific basic block information (live information
5147ec07 1847 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
1848 documented in dumpfile.h. */
b36d64df 1849
028f8cc7 1850static void
5147ec07 1851rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
b36d64df 1852{
1853 rtx insn;
1854 rtx last;
5f5d4cd1 1855 char *s_indent;
b36d64df 1856
f780cc25 1857 s_indent = (char *) alloca ((size_t) indent + 1);
4acc30e5 1858 memset (s_indent, ' ', (size_t) indent);
5f5d4cd1 1859 s_indent[indent] = '\0';
48e1416a 1860
5147ec07 1861 if (df && (flags & TDF_DETAILS))
3072d30e 1862 {
1863 df_dump_top (bb, outf);
1864 putc ('\n', outf);
1865 }
b36d64df 1866
4c2112a3 1867 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
1868 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1869 insn = NEXT_INSN (insn))
5147ec07 1870 {
ea9538fb 1871 if (flags & TDF_DETAILS)
1872 df_dump_insn_top (insn, outf);
5147ec07 1873 if (! (flags & TDF_SLIM))
1874 print_rtl_single (outf, insn);
1875 else
1876 dump_insn_slim (outf, insn);
ea9538fb 1877 if (flags & TDF_DETAILS)
1878 df_dump_insn_bottom (insn, outf);
5147ec07 1879 }
1880
1881 if (df && (flags & TDF_DETAILS))
3072d30e 1882 {
1883 df_dump_bottom (bb, outf);
1884 putc ('\n', outf);
1885 }
1886
b36d64df 1887}
1888\f
bec2cf98 1889/* Like dump_function_to_file, but for RTL. Print out dataflow information
1890 for the start of each basic block. FLAGS are the TDF_* masks documented
1891 in dumpfile.h. */
b36d64df 1892
1893void
5147ec07 1894print_rtl_with_bb (FILE *outf, const_rtx rtx_first, int flags)
b36d64df 1895{
1f1872fd 1896 const_rtx tmp_rtx;
b36d64df 1897 if (rtx_first == 0)
1898 fprintf (outf, "(nil)\n");
1899 else
1900 {
b36d64df 1901 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1902 int max_uid = get_max_uid ();
4c36ffe6 1903 basic_block *start = XCNEWVEC (basic_block, max_uid);
1904 basic_block *end = XCNEWVEC (basic_block, max_uid);
1905 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
4c26117a 1906 basic_block bb;
1907
bec2cf98 1908 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
1909 insns, but the CFG is not maintained so the basic block info
1910 is not reliable. Therefore it's omitted from the dumps. */
1911 if (! (cfun->curr_properties & PROP_cfg))
1912 flags &= ~TDF_BLOCKS;
1913
3072d30e 1914 if (df)
1915 df_dump_start (outf);
1916
bec2cf98 1917 if (flags & TDF_BLOCKS)
b36d64df 1918 {
bec2cf98 1919 FOR_EACH_BB_REVERSE (bb)
b36d64df 1920 {
bec2cf98 1921 rtx x;
1922
1923 start[INSN_UID (BB_HEAD (bb))] = bb;
1924 end[INSN_UID (BB_END (bb))] = bb;
1925 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1926 {
1927 enum bb_state state = IN_MULTIPLE_BB;
5cc577b6 1928
bec2cf98 1929 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1930 state = IN_ONE_BB;
1931 in_bb_p[INSN_UID (x)] = state;
b36d64df 1932
bec2cf98 1933 if (x == BB_END (bb))
1934 break;
1935 }
b36d64df 1936 }
1937 }
1938
1939 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1940 {
bec2cf98 1941 if (flags & TDF_BLOCKS)
1942 {
1943 bb = start[INSN_UID (tmp_rtx)];
1944 if (bb != NULL)
1945 {
1946 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
1947 if (df && (flags & TDF_DETAILS))
1948 df_dump_top (bb, outf);
1949 }
b36d64df 1950
bec2cf98 1951 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1952 && !NOTE_P (tmp_rtx)
1953 && !BARRIER_P (tmp_rtx))
1954 fprintf (outf, ";; Insn is not within a basic block\n");
1955 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1956 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1957 }
b36d64df 1958
ea9538fb 1959 if (flags & TDF_DETAILS)
1960 df_dump_insn_top (tmp_rtx, outf);
5147ec07 1961 if (! (flags & TDF_SLIM))
1962 print_rtl_single (outf, tmp_rtx);
1963 else
1964 dump_insn_slim (outf, tmp_rtx);
ea9538fb 1965 if (flags & TDF_DETAILS)
1966 df_dump_insn_bottom (tmp_rtx, outf);
b36d64df 1967
bec2cf98 1968 if (flags & TDF_BLOCKS)
1969 {
1970 bb = end[INSN_UID (tmp_rtx)];
1971 if (bb != NULL)
1972 {
1973 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
1974 if (df && (flags & TDF_DETAILS))
1975 df_dump_bottom (bb, outf);
a30975d4 1976 putc ('\n', outf);
bec2cf98 1977 }
1978 }
b36d64df 1979 }
1980
1981 free (start);
1982 free (end);
1983 free (in_bb_p);
1984 }
1985
edb7afe8 1986 if (crtl->epilogue_delay_list != 0)
b36d64df 1987 {
1988 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
edb7afe8 1989 for (tmp_rtx = crtl->epilogue_delay_list; tmp_rtx != 0;
b36d64df 1990 tmp_rtx = XEXP (tmp_rtx, 1))
1991 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1992 }
1993}
1994\f
4a020a8c 1995/* Update the branch probability of BB if a REG_BR_PROB is present. */
1996
f884e43f 1997void
4c9e08a4 1998update_br_prob_note (basic_block bb)
f884e43f 1999{
2000 rtx note;
6d7dc5b9 2001 if (!JUMP_P (BB_END (bb)))
f884e43f 2002 return;
5496dbfc 2003 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
f884e43f 2004 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
2005 return;
2006 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
2007}
5d65a39c 2008
2009/* Get the last insn associated with block BB (that includes barriers and
2010 tablejumps after BB). */
2011rtx
2012get_last_bb_insn (basic_block bb)
2013{
2014 rtx tmp;
2015 rtx end = BB_END (bb);
2016
2017 /* Include any jump table following the basic block. */
2018 if (tablejump_p (end, NULL, &tmp))
2019 end = tmp;
2020
2021 /* Include any barriers that may follow the basic block. */
c4d13c5c 2022 tmp = next_nonnote_insn_bb (end);
5d65a39c 2023 while (tmp && BARRIER_P (tmp))
2024 {
2025 end = tmp;
c4d13c5c 2026 tmp = next_nonnote_insn_bb (end);
5d65a39c 2027 }
2028
2029 return end;
2030}
f884e43f 2031\f
028f8cc7 2032/* Verify the CFG and RTL consistency common for both underlying RTL and
2033 cfglayout RTL.
b36d64df 2034
2035 Currently it does following checks:
2036
b36d64df 2037 - overlapping of basic blocks
fcde5c9e 2038 - insns with wrong BLOCK_FOR_INSN pointers
b36d64df 2039 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
dd5b4b36 2040 - tails of basic blocks (ensure that boundary is necessary)
b36d64df 2041 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2042 and NOTE_INSN_BASIC_BLOCK
4f18499c 2043 - verify that no fall_thru edge crosses hot/cold partition boundaries
fcde5c9e 2044 - verify that there are no pending RTL branch predictions
b36d64df 2045
2046 In future it can be extended check a lot of other stuff as well
2047 (reachability of basic blocks, life information, etc. etc.). */
5f5d4cd1 2048
028f8cc7 2049static int
4c9e08a4 2050rtl_verify_flow_info_1 (void)
b36d64df 2051{
b36d64df 2052 rtx x;
028f8cc7 2053 int err = 0;
d207f783 2054 basic_block bb;
b36d64df 2055
fcde5c9e 2056 /* Check the general integrity of the basic blocks. */
4c26117a 2057 FOR_EACH_BB_REVERSE (bb)
b36d64df 2058 {
fcde5c9e 2059 rtx insn;
5cc577b6 2060
e0dde8f8 2061 if (!(bb->flags & BB_RTL))
2062 {
2063 error ("BB_RTL flag not set for block %d", bb->index);
2064 err = 1;
2065 }
2066
fcde5c9e 2067 FOR_BB_INSNS (bb, insn)
e734eb61 2068 if (BLOCK_FOR_INSN (insn) != bb)
fcde5c9e 2069 {
2070 error ("insn %d basic block pointer is %d, should be %d",
2071 INSN_UID (insn),
2072 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2073 bb->index);
2074 err = 1;
2075 }
39257eb5 2076
43e94e51 2077 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
39257eb5 2078 if (!BARRIER_P (insn)
2079 && BLOCK_FOR_INSN (insn) != NULL)
2080 {
2081 error ("insn %d in header of bb %d has non-NULL basic block",
2082 INSN_UID (insn), bb->index);
2083 err = 1;
2084 }
43e94e51 2085 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
39257eb5 2086 if (!BARRIER_P (insn)
2087 && BLOCK_FOR_INSN (insn) != NULL)
2088 {
2089 error ("insn %d in footer of bb %d has non-NULL basic block",
2090 INSN_UID (insn), bb->index);
2091 err = 1;
2092 }
b36d64df 2093 }
2094
2095 /* Now check the basic blocks (boundaries etc.) */
4c26117a 2096 FOR_EACH_BB_REVERSE (bb)
b36d64df 2097 {
fb20d6fa 2098 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
e6261ab0 2099 edge e, fallthru = NULL;
a2e42321 2100 rtx note;
cd665a06 2101 edge_iterator ei;
b36d64df 2102
e6d1500f 2103 if (JUMP_P (BB_END (bb))
5496dbfc 2104 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
cd665a06 2105 && EDGE_COUNT (bb->succs) >= 2
5496dbfc 2106 && any_condjump_p (BB_END (bb)))
a2e42321 2107 {
d8c70625 2108 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
2109 && profile_status != PROFILE_ABSENT)
a2e42321 2110 {
d9f38790 2111 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
a2e42321 2112 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
2113 err = 1;
2114 }
2115 }
cd665a06 2116 FOR_EACH_EDGE (e, ei, bb->succs)
b36d64df 2117 {
f59cbcbf 2118 bool is_crossing;
2119
b36d64df 2120 if (e->flags & EDGE_FALLTHRU)
f59cbcbf 2121 n_fallthru++, fallthru = e;
2122
2123 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2124 && e->src != ENTRY_BLOCK_PTR
2125 && e->dest != EXIT_BLOCK_PTR);
2126 if (e->flags & EDGE_CROSSING)
4f18499c 2127 {
f59cbcbf 2128 if (!is_crossing)
2129 {
2130 error ("EDGE_CROSSING incorrectly set across same section");
2131 err = 1;
2132 }
2133 if (e->flags & EDGE_FALLTHRU)
2134 {
0a81f5a0 2135 error ("fallthru edge crosses section boundary (bb %i)",
4f18499c 2136 e->src->index);
2137 err = 1;
2138 }
f59cbcbf 2139 if (e->flags & EDGE_EH)
2140 {
2141 error ("EH edge crosses section boundary (bb %i)",
2142 e->src->index);
2143 err = 1;
2144 }
2145 }
2146 else if (is_crossing)
2147 {
2148 error ("EDGE_CROSSING missing across section boundary");
2149 err = 1;
4f18499c 2150 }
fb20d6fa 2151
7f42fe24 2152 if ((e->flags & ~(EDGE_DFS_BACK
2153 | EDGE_CAN_FALLTHRU
2154 | EDGE_IRREDUCIBLE_LOOP
f50b5bd0 2155 | EDGE_LOOP_EXIT
acc48fa6 2156 | EDGE_CROSSING
2157 | EDGE_PRESERVE)) == 0)
fb20d6fa 2158 n_branch++;
2159
2160 if (e->flags & EDGE_ABNORMAL_CALL)
2161 n_call++;
2162
2163 if (e->flags & EDGE_EH)
2164 n_eh++;
2165 else if (e->flags & EDGE_ABNORMAL)
2166 n_abnormal++;
b36d64df 2167 }
5cc577b6 2168
e38def9c 2169 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
fb20d6fa 2170 {
0a81f5a0 2171 error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
fb20d6fa 2172 err = 1;
2173 }
e38def9c 2174 if (n_eh > 1)
2175 {
2176 error ("too many eh edges %i", bb->index);
2177 err = 1;
2178 }
fb20d6fa 2179 if (n_branch
6d7dc5b9 2180 && (!JUMP_P (BB_END (bb))
5496dbfc 2181 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2182 || any_condjump_p (BB_END (bb))))))
fb20d6fa 2183 {
0a81f5a0 2184 error ("too many outgoing branch edges from bb %i", bb->index);
fb20d6fa 2185 err = 1;
2186 }
5496dbfc 2187 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
fb20d6fa 2188 {
0a81f5a0 2189 error ("fallthru edge after unconditional jump %i", bb->index);
fb20d6fa 2190 err = 1;
2191 }
5496dbfc 2192 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
fb20d6fa 2193 {
e38def9c 2194 error ("wrong number of branch edges after unconditional jump %i",
2195 bb->index);
fb20d6fa 2196 err = 1;
2197 }
5496dbfc 2198 if (n_branch != 1 && any_condjump_p (BB_END (bb))
8345ac1a 2199 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
fb20d6fa 2200 {
8345ac1a 2201 error ("wrong amount of branch edges after conditional jump %i",
2202 bb->index);
fb20d6fa 2203 err = 1;
2204 }
6d7dc5b9 2205 if (n_call && !CALL_P (BB_END (bb)))
fb20d6fa 2206 {
0a81f5a0 2207 error ("call edges for non-call insn in bb %i", bb->index);
fb20d6fa 2208 err = 1;
2209 }
2210 if (n_abnormal
6d7dc5b9 2211 && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
2212 && (!JUMP_P (BB_END (bb))
5496dbfc 2213 || any_condjump_p (BB_END (bb))
2214 || any_uncondjump_p (BB_END (bb))))
fb20d6fa 2215 {
0a81f5a0 2216 error ("abnormal edges for no purpose in bb %i", bb->index);
fb20d6fa 2217 err = 1;
2218 }
db34a109 2219
5496dbfc 2220 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
fdd27295 2221 /* We may have a barrier inside a basic block before dead code
f5310810 2222 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
2223 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
5cc577b6 2224 {
2225 debug_rtx (x);
2226 if (! BLOCK_FOR_INSN (x))
2227 error
2228 ("insn %d inside basic block %d but block_for_insn is NULL",
b3d6de89 2229 INSN_UID (x), bb->index);
5cc577b6 2230 else
2231 error
2232 ("insn %d inside basic block %d but block_for_insn is %i",
b3d6de89 2233 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
5cc577b6 2234
2235 err = 1;
2236 }
b36d64df 2237
2238 /* OK pointers are correct. Now check the header of basic
a0c938f0 2239 block. It ought to contain optional CODE_LABEL followed
b36d64df 2240 by NOTE_BASIC_BLOCK. */
5496dbfc 2241 x = BB_HEAD (bb);
6d7dc5b9 2242 if (LABEL_P (x))
b36d64df 2243 {
5496dbfc 2244 if (BB_END (bb) == x)
b36d64df 2245 {
2246 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
b3d6de89 2247 bb->index);
b36d64df 2248 err = 1;
2249 }
5cc577b6 2250
b36d64df 2251 x = NEXT_INSN (x);
2252 }
5cc577b6 2253
b36d64df 2254 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2255 {
2256 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
b3d6de89 2257 bb->index);
b36d64df 2258 err = 1;
2259 }
2260
5496dbfc 2261 if (BB_END (bb) == x)
f3e2a363 2262 /* Do checks for empty blocks here. */
5cc577b6 2263 ;
b36d64df 2264 else
5cc577b6 2265 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2266 {
2267 if (NOTE_INSN_BASIC_BLOCK_P (x))
2268 {
2269 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
b3d6de89 2270 INSN_UID (x), bb->index);
5cc577b6 2271 err = 1;
2272 }
2273
5496dbfc 2274 if (x == BB_END (bb))
5cc577b6 2275 break;
b36d64df 2276
aaf52ffb 2277 if (control_flow_insn_p (x))
5cc577b6 2278 {
b3d6de89 2279 error ("in basic block %d:", bb->index);
5cc577b6 2280 fatal_insn ("flow control insn inside a basic block", x);
2281 }
2282 }
b36d64df 2283 }
2284
028f8cc7 2285 /* Clean up. */
028f8cc7 2286 return err;
2287}
5cc577b6 2288
028f8cc7 2289/* Verify the CFG and RTL consistency common for both underlying RTL and
2290 cfglayout RTL.
5cc577b6 2291
028f8cc7 2292 Currently it does following checks:
2293 - all checks of rtl_verify_flow_info_1
fcde5c9e 2294 - test head/end pointers
028f8cc7 2295 - check that all insns are in the basic blocks
2296 (except the switch handling code, barriers and notes)
2297 - check that all returns are followed by barriers
2298 - check that all fallthru edge points to the adjacent blocks. */
fcde5c9e 2299
028f8cc7 2300static int
4c9e08a4 2301rtl_verify_flow_info (void)
028f8cc7 2302{
2303 basic_block bb;
2304 int err = rtl_verify_flow_info_1 ();
2305 rtx x;
fcde5c9e 2306 rtx last_head = get_last_insn ();
2307 basic_block *bb_info;
028f8cc7 2308 int num_bb_notes;
2309 const rtx rtx_first = get_insns ();
2310 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
fcde5c9e 2311 const int max_uid = get_max_uid ();
2312
2313 bb_info = XCNEWVEC (basic_block, max_uid);
b36d64df 2314
028f8cc7 2315 FOR_EACH_BB_REVERSE (bb)
2316 {
2317 edge e;
fcde5c9e 2318 rtx head = BB_HEAD (bb);
2319 rtx end = BB_END (bb);
cd665a06 2320
fcde5c9e 2321 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
39257eb5 2322 {
2323 /* Verify the end of the basic block is in the INSN chain. */
2324 if (x == end)
2325 break;
2326
2327 /* And that the code outside of basic blocks has NULL bb field. */
2328 if (!BARRIER_P (x)
2329 && BLOCK_FOR_INSN (x) != NULL)
2330 {
2331 error ("insn %d outside of basic blocks has non-NULL bb field",
2332 INSN_UID (x));
2333 err = 1;
2334 }
2335 }
fcde5c9e 2336
2337 if (!x)
2338 {
2339 error ("end insn %d for block %d not found in the insn stream",
2340 INSN_UID (end), bb->index);
2341 err = 1;
2342 }
2343
2344 /* Work backwards from the end to the head of the basic block
2345 to verify the head is in the RTL chain. */
2346 for (; x != NULL_RTX; x = PREV_INSN (x))
d5043f32 2347 {
fcde5c9e 2348 /* While walking over the insn chain, verify insns appear
2349 in only one basic block. */
2350 if (bb_info[INSN_UID (x)] != NULL)
2351 {
2352 error ("insn %d is in multiple basic blocks (%d and %d)",
2353 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2354 err = 1;
2355 }
2356
2357 bb_info[INSN_UID (x)] = bb;
2358
2359 if (x == head)
2360 break;
2361 }
2362 if (!x)
2363 {
2364 error ("head insn %d for block %d not found in the insn stream",
2365 INSN_UID (head), bb->index);
d5043f32 2366 err = 1;
2367 }
2368
39257eb5 2369 last_head = PREV_INSN (x);
fcde5c9e 2370
7f58c05e 2371 e = find_fallthru_edge (bb->succs);
028f8cc7 2372 if (!e)
2373 {
2374 rtx insn;
2375
2376 /* Ensure existence of barrier in BB with no fallthru edges. */
d2b48f0c 2377 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2378 {
2379 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
028f8cc7 2380 {
2381 error ("missing barrier after block %i", bb->index);
2382 err = 1;
2383 break;
2384 }
d2b48f0c 2385 if (BARRIER_P (insn))
2386 break;
2387 }
028f8cc7 2388 }
2389 else if (e->src != ENTRY_BLOCK_PTR
2390 && e->dest != EXIT_BLOCK_PTR)
a0c938f0 2391 {
028f8cc7 2392 rtx insn;
2393
2394 if (e->src->next_bb != e->dest)
2395 {
2396 error
2397 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2398 e->src->index, e->dest->index);
2399 err = 1;
2400 }
2401 else
5496dbfc 2402 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
028f8cc7 2403 insn = NEXT_INSN (insn))
ac7d3688 2404 if (BARRIER_P (insn) || INSN_P (insn))
028f8cc7 2405 {
2406 error ("verify_flow_info: Incorrect fallthru %i->%i",
2407 e->src->index, e->dest->index);
2408 fatal_insn ("wrong insn in the fallthru edge", insn);
2409 err = 1;
2410 }
a0c938f0 2411 }
028f8cc7 2412 }
b36d64df 2413
39257eb5 2414 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2415 {
2416 /* Check that the code before the first basic block has NULL
2417 bb field. */
2418 if (!BARRIER_P (x)
2419 && BLOCK_FOR_INSN (x) != NULL)
2420 {
2421 error ("insn %d outside of basic blocks has non-NULL bb field",
2422 INSN_UID (x));
2423 err = 1;
2424 }
2425 }
fcde5c9e 2426 free (bb_info);
2427
b36d64df 2428 num_bb_notes = 0;
4c26117a 2429 last_bb_seen = ENTRY_BLOCK_PTR;
2430
5cc577b6 2431 for (x = rtx_first; x; x = NEXT_INSN (x))
b36d64df 2432 {
2433 if (NOTE_INSN_BASIC_BLOCK_P (x))
2434 {
3c0a32c9 2435 bb = NOTE_BASIC_BLOCK (x);
5cc577b6 2436
b36d64df 2437 num_bb_notes++;
4c26117a 2438 if (bb != last_bb_seen->next_bb)
028f8cc7 2439 internal_error ("basic blocks not laid down consecutively");
b36d64df 2440
028f8cc7 2441 curr_bb = last_bb_seen = bb;
b36d64df 2442 }
2443
028f8cc7 2444 if (!curr_bb)
b36d64df 2445 {
2446 switch (GET_CODE (x))
2447 {
2448 case BARRIER:
2449 case NOTE:
2450 break;
2451
2452 case CODE_LABEL:
af6ed417 2453 /* An addr_vec is placed outside any basic block. */
b36d64df 2454 if (NEXT_INSN (x)
971ba038 2455 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
5cc577b6 2456 x = NEXT_INSN (x);
b36d64df 2457
2458 /* But in any case, non-deletable labels can appear anywhere. */
2459 break;
2460
2461 default:
cb8bacb6 2462 fatal_insn ("insn outside basic block", x);
b36d64df 2463 }
2464 }
2465
345f9e8c 2466 if (JUMP_P (x)
b36d64df 2467 && returnjump_p (x) && ! condjump_p (x)
8fcc0f14 2468 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
cb8bacb6 2469 fatal_insn ("return not followed by barrier", x);
5496dbfc 2470 if (curr_bb && x == BB_END (curr_bb))
028f8cc7 2471 curr_bb = NULL;
b36d64df 2472 }
2473
4d2e5d52 2474 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
b36d64df 2475 internal_error
b3d6de89 2476 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2477 num_bb_notes, n_basic_blocks);
b36d64df 2478
028f8cc7 2479 return err;
b36d64df 2480}
2481\f
4a82352a 2482/* Assume that the preceding pass has possibly eliminated jump instructions
b36d64df 2483 or converted the unconditional jumps. Eliminate the edges from CFG.
2484 Return true if any edges are eliminated. */
2485
2486bool
4c9e08a4 2487purge_dead_edges (basic_block bb)
b36d64df 2488{
cd665a06 2489 edge e;
5496dbfc 2490 rtx insn = BB_END (bb), note;
b36d64df 2491 bool purged = false;
cd665a06 2492 bool found;
2493 edge_iterator ei;
b36d64df 2494
9845d120 2495 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2496 do
2497 insn = PREV_INSN (insn);
2498 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2499
11808932 2500 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
6d7dc5b9 2501 if (NONJUMP_INSN_P (insn)
11808932 2502 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2503 {
2504 rtx eqnote;
2505
2506 if (! may_trap_p (PATTERN (insn))
2507 || ((eqnote = find_reg_equal_equiv_note (insn))
2508 && ! may_trap_p (XEXP (eqnote, 0))))
2509 remove_note (insn, note);
2510 }
2511
f178f44b 2512 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
cd665a06 2513 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
f178f44b 2514 {
e38def9c 2515 bool remove = false;
2516
04166d08 2517 /* There are three types of edges we need to handle correctly here: EH
2518 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2519 latter can appear when nonlocal gotos are used. */
e38def9c 2520 if (e->flags & EDGE_ABNORMAL_CALL)
f178f44b 2521 {
e38def9c 2522 if (!CALL_P (insn))
2523 remove = true;
2524 else if (can_nonlocal_goto (insn))
2525 ;
2526 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2527 ;
4c0315d0 2528 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
2529 ;
e38def9c 2530 else
2531 remove = true;
f178f44b 2532 }
e38def9c 2533 else if (e->flags & EDGE_EH)
2534 remove = !can_throw_internal (insn);
2535
2536 if (remove)
f178f44b 2537 {
e38def9c 2538 remove_edge (e);
2539 df_set_bb_dirty (bb);
2540 purged = true;
f178f44b 2541 }
2542 else
e38def9c 2543 ei_next (&ei);
f178f44b 2544 }
5cc577b6 2545
6d7dc5b9 2546 if (JUMP_P (insn))
b36d64df 2547 {
2548 rtx note;
2549 edge b,f;
cd665a06 2550 edge_iterator ei;
5cc577b6 2551
b36d64df 2552 /* We do care only about conditional jumps and simplejumps. */
2553 if (!any_condjump_p (insn)
2554 && !returnjump_p (insn)
2555 && !simplejump_p (insn))
461eeb14 2556 return purged;
5cc577b6 2557
a2e42321 2558 /* Branch probability/prediction notes are defined only for
2559 condjumps. We've possibly turned condjump into simplejump. */
2560 if (simplejump_p (insn))
2561 {
2562 note = find_reg_note (insn, REG_BR_PROB, NULL);
2563 if (note)
2564 remove_note (insn, note);
2565 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2566 remove_note (insn, note);
2567 }
2568
cd665a06 2569 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
b36d64df 2570 {
4270b4b9 2571 /* Avoid abnormal flags to leak from computed jumps turned
2572 into simplejumps. */
db34a109 2573
a0fb7382 2574 e->flags &= ~EDGE_ABNORMAL;
4270b4b9 2575
14abf923 2576 /* See if this edge is one we should keep. */
2577 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2578 /* A conditional jump can fall through into the next
2579 block, so we should keep the edge. */
cd665a06 2580 {
2581 ei_next (&ei);
2582 continue;
2583 }
5cc577b6 2584 else if (e->dest != EXIT_BLOCK_PTR
5496dbfc 2585 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
14abf923 2586 /* If the destination block is the target of the jump,
2587 keep the edge. */
cd665a06 2588 {
2589 ei_next (&ei);
2590 continue;
2591 }
14abf923 2592 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2593 /* If the destination block is the exit block, and this
2594 instruction is a return, then keep the edge. */
cd665a06 2595 {
2596 ei_next (&ei);
2597 continue;
2598 }
14abf923 2599 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2600 /* Keep the edges that correspond to exceptions thrown by
1b64239d 2601 this instruction and rematerialize the EDGE_ABNORMAL
2602 flag we just cleared above. */
2603 {
2604 e->flags |= EDGE_ABNORMAL;
cd665a06 2605 ei_next (&ei);
1b64239d 2606 continue;
2607 }
5cc577b6 2608
14abf923 2609 /* We do not need this edge. */
3072d30e 2610 df_set_bb_dirty (bb);
b36d64df 2611 purged = true;
2612 remove_edge (e);
2613 }
5cc577b6 2614
cd665a06 2615 if (EDGE_COUNT (bb->succs) == 0 || !purged)
461eeb14 2616 return purged;
5cc577b6 2617
450d042a 2618 if (dump_file)
2619 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
5cc577b6 2620
b36d64df 2621 if (!optimize)
2622 return purged;
2623
2624 /* Redistribute probabilities. */
ea091dfd 2625 if (single_succ_p (bb))
b36d64df 2626 {
ea091dfd 2627 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2628 single_succ_edge (bb)->count = bb->count;
db34a109 2629 }
b36d64df 2630 else
2631 {
2632 note = find_reg_note (insn, REG_BR_PROB, NULL);
2633 if (!note)
2634 return purged;
5cc577b6 2635
b36d64df 2636 b = BRANCH_EDGE (bb);
2637 f = FALLTHRU_EDGE (bb);
2638 b->probability = INTVAL (XEXP (note, 0));
2639 f->probability = REG_BR_PROB_BASE - b->probability;
2640 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2641 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2642 }
5cc577b6 2643
b36d64df 2644 return purged;
2645 }
6d7dc5b9 2646 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
bf4311e9 2647 {
2648 /* First, there should not be any EH or ABCALL edges resulting
2649 from non-local gotos and the like. If there were, we shouldn't
2650 have created the sibcall in the first place. Second, there
2651 should of course never have been a fallthru edge. */
ea091dfd 2652 gcc_assert (single_succ_p (bb));
2653 gcc_assert (single_succ_edge (bb)->flags
2654 == (EDGE_SIBCALL | EDGE_ABNORMAL));
bf4311e9 2655
2656 return 0;
2657 }
b36d64df 2658
b36d64df 2659 /* If we don't see a jump insn, we don't know exactly why the block would
2660 have been broken at this point. Look for a simple, non-fallthru edge,
2661 as these are only created by conditional branches. If we find such an
2662 edge we know that there used to be a jump here and can then safely
2663 remove all non-fallthru edges. */
cd665a06 2664 found = false;
2665 FOR_EACH_EDGE (e, ei, bb->succs)
2666 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2667 {
2668 found = true;
2669 break;
2670 }
5cc577b6 2671
cd665a06 2672 if (!found)
b36d64df 2673 return purged;
5cc577b6 2674
6e81a369 2675 /* Remove all but the fake and fallthru edges. The fake edge may be
2676 the only successor for this block in the case of noreturn
2677 calls. */
cd665a06 2678 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
b36d64df 2679 {
6e81a369 2680 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
461eeb14 2681 {
3072d30e 2682 df_set_bb_dirty (bb);
461eeb14 2683 remove_edge (e);
2684 purged = true;
2685 }
cd665a06 2686 else
2687 ei_next (&ei);
b36d64df 2688 }
5cc577b6 2689
ea091dfd 2690 gcc_assert (single_succ_p (bb));
5cc577b6 2691
ea091dfd 2692 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2693 single_succ_edge (bb)->count = bb->count;
b36d64df 2694
450d042a 2695 if (dump_file)
2696 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
b3d6de89 2697 bb->index);
b36d64df 2698 return purged;
2699}
2700
5cc577b6 2701/* Search all basic blocks for potentially dead edges and purge them. Return
2702 true if some edge has been eliminated. */
b36d64df 2703
2704bool
94ee50e8 2705purge_all_dead_edges (void)
b36d64df 2706{
4c26117a 2707 int purged = false;
4c26117a 2708 basic_block bb;
0922c912 2709
4c26117a 2710 FOR_EACH_BB (bb)
0922c912 2711 {
4c26117a 2712 bool purged_here = purge_dead_edges (bb);
5cc577b6 2713
0922c912 2714 purged |= purged_here;
0922c912 2715 }
5cc577b6 2716
b36d64df 2717 return purged;
2718}
1026363d 2719
54f21e20 2720/* This is used by a few passes that emit some instructions after abnormal
2721 calls, moving the basic block's end, while they in fact do want to emit
2722 them on the fallthru edge. Look for abnormal call edges, find backward
2723 the call in the block and insert the instructions on the edge instead.
2724
2725 Similarly, handle instructions throwing exceptions internally.
2726
2727 Return true when instructions have been found and inserted on edges. */
2728
2729bool
2730fixup_abnormal_edges (void)
2731{
2732 bool inserted = false;
2733 basic_block bb;
2734
2735 FOR_EACH_BB (bb)
2736 {
2737 edge e;
2738 edge_iterator ei;
2739
2740 /* Look for cases we are interested in - calls or instructions causing
2741 exceptions. */
2742 FOR_EACH_EDGE (e, ei, bb->succs)
2743 if ((e->flags & EDGE_ABNORMAL_CALL)
2744 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
2745 == (EDGE_ABNORMAL | EDGE_EH)))
2746 break;
2747
2748 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
2749 {
2750 rtx insn;
2751
2752 /* Get past the new insns generated. Allow notes, as the insns
2753 may be already deleted. */
2754 insn = BB_END (bb);
2755 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
2756 && !can_throw_internal (insn)
2757 && insn != BB_HEAD (bb))
2758 insn = PREV_INSN (insn);
2759
2760 if (CALL_P (insn) || can_throw_internal (insn))
2761 {
2762 rtx stop, next;
2763
2764 e = find_fallthru_edge (bb->succs);
2765
2766 stop = NEXT_INSN (BB_END (bb));
2767 BB_END (bb) = insn;
2768
2769 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
2770 {
2771 next = NEXT_INSN (insn);
2772 if (INSN_P (insn))
2773 {
2774 delete_insn (insn);
2775
2776 /* Sometimes there's still the return value USE.
2777 If it's placed after a trapping call (i.e. that
2778 call is the last insn anyway), we have no fallthru
2779 edge. Simply delete this use and don't try to insert
2780 on the non-existent edge. */
2781 if (GET_CODE (PATTERN (insn)) != USE)
2782 {
2783 /* We're not deleting it, we're moving it. */
2784 INSN_DELETED_P (insn) = 0;
2785 PREV_INSN (insn) = NULL_RTX;
2786 NEXT_INSN (insn) = NULL_RTX;
2787
2788 insert_insn_on_edge (insn, e);
2789 inserted = true;
2790 }
2791 }
2792 else if (!BARRIER_P (insn))
2793 set_block_for_insn (insn, NULL);
2794 }
2795 }
2796
2797 /* It may be that we don't find any trapping insn. In this
2798 case we discovered quite late that the insn that had been
2799 marked as can_throw_internal in fact couldn't trap at all.
2800 So we should in fact delete the EH edges out of the block. */
2801 else
2802 purge_dead_edges (bb);
2803 }
2804 }
2805
2806 return inserted;
2807}
23a070f3 2808\f
2809/* Cut the insns from FIRST to LAST out of the insns stream. */
2810
2811rtx
2812unlink_insn_chain (rtx first, rtx last)
2813{
2814 rtx prevfirst = PREV_INSN (first);
2815 rtx nextlast = NEXT_INSN (last);
2816
2817 PREV_INSN (first) = NULL;
2818 NEXT_INSN (last) = NULL;
2819 if (prevfirst)
2820 NEXT_INSN (prevfirst) = nextlast;
2821 if (nextlast)
2822 PREV_INSN (nextlast) = prevfirst;
2823 else
2824 set_last_insn (prevfirst);
2825 if (!prevfirst)
2826 set_first_insn (nextlast);
2827 return first;
2828}
2829\f
2830/* Skip over inter-block insns occurring after BB which are typically
2831 associated with BB (e.g., barriers). If there are any such insns,
2832 we return the last one. Otherwise, we return the end of BB. */
2833
2834static rtx
2835skip_insns_after_block (basic_block bb)
2836{
2837 rtx insn, last_insn, next_head, prev;
2838
2839 next_head = NULL_RTX;
2840 if (bb->next_bb != EXIT_BLOCK_PTR)
2841 next_head = BB_HEAD (bb->next_bb);
2842
2843 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
2844 {
2845 if (insn == next_head)
2846 break;
2847
2848 switch (GET_CODE (insn))
2849 {
2850 case BARRIER:
2851 last_insn = insn;
2852 continue;
2853
2854 case NOTE:
2855 switch (NOTE_KIND (insn))
2856 {
2857 case NOTE_INSN_BLOCK_END:
2858 gcc_unreachable ();
2859 continue;
2860 default:
2861 continue;
2862 break;
2863 }
2864 break;
2865
2866 case CODE_LABEL:
2867 if (NEXT_INSN (insn)
2868 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
2869 {
2870 insn = NEXT_INSN (insn);
2871 last_insn = insn;
2872 continue;
2873 }
2874 break;
2875
2876 default:
2877 break;
2878 }
2879
2880 break;
2881 }
2882
2883 /* It is possible to hit contradictory sequence. For instance:
2884
2885 jump_insn
2886 NOTE_INSN_BLOCK_BEG
2887 barrier
2888
2889 Where barrier belongs to jump_insn, but the note does not. This can be
2890 created by removing the basic block originally following
2891 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
2892
2893 for (insn = last_insn; insn != BB_END (bb); insn = prev)
2894 {
2895 prev = PREV_INSN (insn);
2896 if (NOTE_P (insn))
2897 switch (NOTE_KIND (insn))
2898 {
2899 case NOTE_INSN_BLOCK_END:
2900 gcc_unreachable ();
2901 break;
2902 case NOTE_INSN_DELETED:
2903 case NOTE_INSN_DELETED_LABEL:
2904 case NOTE_INSN_DELETED_DEBUG_LABEL:
2905 continue;
2906 default:
2907 reorder_insns (insn, insn, last_insn);
2908 }
2909 }
2910
2911 return last_insn;
2912}
2913
2914/* Locate or create a label for a given basic block. */
2915
2916static rtx
2917label_for_bb (basic_block bb)
2918{
2919 rtx label = BB_HEAD (bb);
2920
2921 if (!LABEL_P (label))
2922 {
2923 if (dump_file)
2924 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
2925
2926 label = block_label (bb);
2927 }
2928
2929 return label;
2930}
2931
2932/* Locate the effective beginning and end of the insn chain for each
2933 block, as defined by skip_insns_after_block above. */
2934
2935static void
2936record_effective_endpoints (void)
2937{
2938 rtx next_insn;
2939 basic_block bb;
2940 rtx insn;
2941
2942 for (insn = get_insns ();
2943 insn
2944 && NOTE_P (insn)
2945 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
2946 insn = NEXT_INSN (insn))
2947 continue;
2948 /* No basic blocks at all? */
2949 gcc_assert (insn);
2950
2951 if (PREV_INSN (insn))
2952 cfg_layout_function_header =
2953 unlink_insn_chain (get_insns (), PREV_INSN (insn));
2954 else
2955 cfg_layout_function_header = NULL_RTX;
2956
2957 next_insn = get_insns ();
2958 FOR_EACH_BB (bb)
2959 {
2960 rtx end;
2961
2962 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
2963 BB_HEADER (bb) = unlink_insn_chain (next_insn,
2964 PREV_INSN (BB_HEAD (bb)));
2965 end = skip_insns_after_block (bb);
2966 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
2967 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
2968 next_insn = NEXT_INSN (BB_END (bb));
2969 }
2970
2971 cfg_layout_function_footer = next_insn;
2972 if (cfg_layout_function_footer)
2973 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
2974}
2975\f
2976static unsigned int
2977into_cfg_layout_mode (void)
2978{
2979 cfg_layout_initialize (0);
2980 return 0;
2981}
2982
2983static unsigned int
2984outof_cfg_layout_mode (void)
2985{
2986 basic_block bb;
2987
2988 FOR_EACH_BB (bb)
2989 if (bb->next_bb != EXIT_BLOCK_PTR)
2990 bb->aux = bb->next_bb;
2991
2992 cfg_layout_finalize ();
2993
2994 return 0;
2995}
2996
2997struct rtl_opt_pass pass_into_cfg_layout_mode =
2998{
2999 {
3000 RTL_PASS,
3001 "into_cfglayout", /* name */
c7875731 3002 OPTGROUP_NONE, /* optinfo_flags */
23a070f3 3003 NULL, /* gate */
3004 into_cfg_layout_mode, /* execute */
3005 NULL, /* sub */
3006 NULL, /* next */
3007 0, /* static_pass_number */
3008 TV_CFG, /* tv_id */
3009 0, /* properties_required */
3010 PROP_cfglayout, /* properties_provided */
3011 0, /* properties_destroyed */
3012 0, /* todo_flags_start */
3013 0 /* todo_flags_finish */
3014 }
3015};
3016
3017struct rtl_opt_pass pass_outof_cfg_layout_mode =
3018{
3019 {
3020 RTL_PASS,
3021 "outof_cfglayout", /* name */
c7875731 3022 OPTGROUP_NONE, /* optinfo_flags */
23a070f3 3023 NULL, /* gate */
3024 outof_cfg_layout_mode, /* execute */
3025 NULL, /* sub */
3026 NULL, /* next */
3027 0, /* static_pass_number */
3028 TV_CFG, /* tv_id */
3029 0, /* properties_required */
3030 0, /* properties_provided */
3031 PROP_cfglayout, /* properties_destroyed */
3032 0, /* todo_flags_start */
3033 0 /* todo_flags_finish */
3034 }
3035};
3036\f
3037
3038/* Link the basic blocks in the correct order, compacting the basic
3039 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3040 function also clears the basic block header and footer fields.
3041
3042 This function is usually called after a pass (e.g. tracer) finishes
3043 some transformations while in cfglayout mode. The required sequence
3044 of the basic blocks is in a linked list along the bb->aux field.
3045 This functions re-links the basic block prev_bb and next_bb pointers
4a020a8c 3046 accordingly, and it compacts and renumbers the blocks.
3047
3048 FIXME: This currently works only for RTL, but the only RTL-specific
3049 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3050 to GIMPLE a long time ago, but it doesn't relink the basic block
3051 chain. It could do that (to give better initial RTL) if this function
3052 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
23a070f3 3053
3054void
3055relink_block_chain (bool stay_in_cfglayout_mode)
3056{
3057 basic_block bb, prev_bb;
3058 int index;
3059
3060 /* Maybe dump the re-ordered sequence. */
3061 if (dump_file)
3062 {
3063 fprintf (dump_file, "Reordered sequence:\n");
3064 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
3065 bb;
3066 bb = (basic_block) bb->aux, index++)
3067 {
3068 fprintf (dump_file, " %i ", index);
3069 if (get_bb_original (bb))
3070 fprintf (dump_file, "duplicate of %i ",
3071 get_bb_original (bb)->index);
3072 else if (forwarder_block_p (bb)
3073 && !LABEL_P (BB_HEAD (bb)))
3074 fprintf (dump_file, "compensation ");
3075 else
3076 fprintf (dump_file, "bb %i ", bb->index);
3077 fprintf (dump_file, " [%i]\n", bb->frequency);
3078 }
3079 }
3080
3081 /* Now reorder the blocks. */
3082 prev_bb = ENTRY_BLOCK_PTR;
3083 bb = ENTRY_BLOCK_PTR->next_bb;
3084 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3085 {
3086 bb->prev_bb = prev_bb;
3087 prev_bb->next_bb = bb;
3088 }
3089 prev_bb->next_bb = EXIT_BLOCK_PTR;
3090 EXIT_BLOCK_PTR->prev_bb = prev_bb;
3091
3092 /* Then, clean up the aux fields. */
3093 FOR_ALL_BB (bb)
3094 {
3095 bb->aux = NULL;
3096 if (!stay_in_cfglayout_mode)
3097 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3098 }
3099
3100 /* Maybe reset the original copy tables, they are not valid anymore
3101 when we renumber the basic blocks in compact_blocks. If we are
3102 are going out of cfglayout mode, don't re-allocate the tables. */
3103 free_original_copy_tables ();
3104 if (stay_in_cfglayout_mode)
3105 initialize_original_copy_tables ();
3106
3107 /* Finally, put basic_block_info in the new order. */
3108 compact_blocks ();
3109}
3110\f
3111
3112/* Given a reorder chain, rearrange the code to match. */
3113
3114static void
3115fixup_reorder_chain (void)
3116{
3117 basic_block bb;
3118 rtx insn = NULL;
3119
3120 if (cfg_layout_function_header)
3121 {
3122 set_first_insn (cfg_layout_function_header);
3123 insn = cfg_layout_function_header;
3124 while (NEXT_INSN (insn))
3125 insn = NEXT_INSN (insn);
3126 }
3127
3128 /* First do the bulk reordering -- rechain the blocks without regard to
3129 the needed changes to jumps and labels. */
3130
3131 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
3132 {
3133 if (BB_HEADER (bb))
3134 {
3135 if (insn)
3136 NEXT_INSN (insn) = BB_HEADER (bb);
3137 else
3138 set_first_insn (BB_HEADER (bb));
3139 PREV_INSN (BB_HEADER (bb)) = insn;
3140 insn = BB_HEADER (bb);
3141 while (NEXT_INSN (insn))
3142 insn = NEXT_INSN (insn);
3143 }
3144 if (insn)
3145 NEXT_INSN (insn) = BB_HEAD (bb);
3146 else
3147 set_first_insn (BB_HEAD (bb));
3148 PREV_INSN (BB_HEAD (bb)) = insn;
3149 insn = BB_END (bb);
3150 if (BB_FOOTER (bb))
3151 {
3152 NEXT_INSN (insn) = BB_FOOTER (bb);
3153 PREV_INSN (BB_FOOTER (bb)) = insn;
3154 while (NEXT_INSN (insn))
3155 insn = NEXT_INSN (insn);
3156 }
3157 }
3158
3159 NEXT_INSN (insn) = cfg_layout_function_footer;
3160 if (cfg_layout_function_footer)
3161 PREV_INSN (cfg_layout_function_footer) = insn;
3162
3163 while (NEXT_INSN (insn))
3164 insn = NEXT_INSN (insn);
3165
3166 set_last_insn (insn);
3167#ifdef ENABLE_CHECKING
3168 verify_insn_chain ();
3169#endif
3170
3171 /* Now add jumps and labels as needed to match the blocks new
3172 outgoing edges. */
3173
3174 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
3175 {
3176 edge e_fall, e_taken, e;
3177 rtx bb_end_insn;
3178 rtx ret_label = NULL_RTX;
3179 basic_block nb, src_bb;
3180 edge_iterator ei;
3181
3182 if (EDGE_COUNT (bb->succs) == 0)
3183 continue;
3184
3185 /* Find the old fallthru edge, and another non-EH edge for
3186 a taken jump. */
3187 e_taken = e_fall = NULL;
3188
3189 FOR_EACH_EDGE (e, ei, bb->succs)
3190 if (e->flags & EDGE_FALLTHRU)
3191 e_fall = e;
3192 else if (! (e->flags & EDGE_EH))
3193 e_taken = e;
3194
3195 bb_end_insn = BB_END (bb);
3196 if (JUMP_P (bb_end_insn))
3197 {
3198 ret_label = JUMP_LABEL (bb_end_insn);
3199 if (any_condjump_p (bb_end_insn))
3200 {
3201 /* This might happen if the conditional jump has side
3202 effects and could therefore not be optimized away.
3203 Make the basic block to end with a barrier in order
3204 to prevent rtl_verify_flow_info from complaining. */
3205 if (!e_fall)
3206 {
3207 gcc_assert (!onlyjump_p (bb_end_insn)
3208 || returnjump_p (bb_end_insn));
3209 BB_FOOTER (bb) = emit_barrier_after (bb_end_insn);
3210 continue;
3211 }
3212
3213 /* If the old fallthru is still next, nothing to do. */
3214 if (bb->aux == e_fall->dest
3215 || e_fall->dest == EXIT_BLOCK_PTR)
3216 continue;
3217
3218 /* The degenerated case of conditional jump jumping to the next
3219 instruction can happen for jumps with side effects. We need
3220 to construct a forwarder block and this will be done just
3221 fine by force_nonfallthru below. */
3222 if (!e_taken)
3223 ;
3224
3225 /* There is another special case: if *neither* block is next,
3226 such as happens at the very end of a function, then we'll
3227 need to add a new unconditional jump. Choose the taken
3228 edge based on known or assumed probability. */
3229 else if (bb->aux != e_taken->dest)
3230 {
3231 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
3232
3233 if (note
3234 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
3235 && invert_jump (bb_end_insn,
3236 (e_fall->dest == EXIT_BLOCK_PTR
3237 ? NULL_RTX
3238 : label_for_bb (e_fall->dest)), 0))
3239 {
3240 e_fall->flags &= ~EDGE_FALLTHRU;
3241 gcc_checking_assert (could_fall_through
3242 (e_taken->src, e_taken->dest));
3243 e_taken->flags |= EDGE_FALLTHRU;
3244 update_br_prob_note (bb);
3245 e = e_fall, e_fall = e_taken, e_taken = e;
3246 }
3247 }
3248
3249 /* If the "jumping" edge is a crossing edge, and the fall
3250 through edge is non-crossing, leave things as they are. */
3251 else if ((e_taken->flags & EDGE_CROSSING)
3252 && !(e_fall->flags & EDGE_CROSSING))
3253 continue;
3254
3255 /* Otherwise we can try to invert the jump. This will
3256 basically never fail, however, keep up the pretense. */
3257 else if (invert_jump (bb_end_insn,
3258 (e_fall->dest == EXIT_BLOCK_PTR
3259 ? NULL_RTX
3260 : label_for_bb (e_fall->dest)), 0))
3261 {
3262 e_fall->flags &= ~EDGE_FALLTHRU;
3263 gcc_checking_assert (could_fall_through
3264 (e_taken->src, e_taken->dest));
3265 e_taken->flags |= EDGE_FALLTHRU;
3266 update_br_prob_note (bb);
3267 if (LABEL_NUSES (ret_label) == 0
3268 && single_pred_p (e_taken->dest))
3269 delete_insn (ret_label);
3270 continue;
3271 }
3272 }
3273 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3274 {
3275 /* If the old fallthru is still next or if
3276 asm goto doesn't have a fallthru (e.g. when followed by
3277 __builtin_unreachable ()), nothing to do. */
3278 if (! e_fall
3279 || bb->aux == e_fall->dest
3280 || e_fall->dest == EXIT_BLOCK_PTR)
3281 continue;
3282
3283 /* Otherwise we'll have to use the fallthru fixup below. */
3284 }
3285 else
3286 {
3287 /* Otherwise we have some return, switch or computed
3288 jump. In the 99% case, there should not have been a
3289 fallthru edge. */
3290 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3291 continue;
3292 }
3293 }
3294 else
3295 {
3296 /* No fallthru implies a noreturn function with EH edges, or
3297 something similarly bizarre. In any case, we don't need to
3298 do anything. */
3299 if (! e_fall)
3300 continue;
3301
3302 /* If the fallthru block is still next, nothing to do. */
3303 if (bb->aux == e_fall->dest)
3304 continue;
3305
3306 /* A fallthru to exit block. */
3307 if (e_fall->dest == EXIT_BLOCK_PTR)
3308 continue;
3309 }
3310
3311 /* We got here if we need to add a new jump insn.
3312 Note force_nonfallthru can delete E_FALL and thus we have to
3313 save E_FALL->src prior to the call to force_nonfallthru. */
3314 src_bb = e_fall->src;
3315 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3316 if (nb)
3317 {
3318 nb->aux = bb->aux;
3319 bb->aux = nb;
3320 /* Don't process this new block. */
3321 bb = nb;
3322
3323 /* Make sure new bb is tagged for correct section (same as
3324 fall-thru source, since you cannot fall-thru across
3325 section boundaries). */
3326 BB_COPY_PARTITION (src_bb, single_pred (bb));
3327 if (flag_reorder_blocks_and_partition
3328 && targetm_common.have_named_sections
3329 && JUMP_P (BB_END (bb))
3330 && !any_condjump_p (BB_END (bb))
3331 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
3332 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
3333 }
3334 }
3335
3336 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3337
3338 /* Annoying special case - jump around dead jumptables left in the code. */
3339 FOR_EACH_BB (bb)
3340 {
3341 edge e = find_fallthru_edge (bb->succs);
3342
3343 if (e && !can_fallthru (e->src, e->dest))
3344 force_nonfallthru (e);
3345 }
3346
3347 /* Ensure goto_locus from edges has some instructions with that locus
3348 in RTL. */
3349 if (!optimize)
3350 FOR_EACH_BB (bb)
3351 {
3352 edge e;
3353 edge_iterator ei;
3354
3355 FOR_EACH_EDGE (e, ei, bb->succs)
8e7408e3 3356 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
5169661d 3357 && !(e->flags & EDGE_ABNORMAL))
23a070f3 3358 {
3359 edge e2;
3360 edge_iterator ei2;
3361 basic_block dest, nb;
3362 rtx end;
3363
3364 insn = BB_END (e->src);
3365 end = PREV_INSN (BB_HEAD (e->src));
3366 while (insn != end
5169661d 3367 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
23a070f3 3368 insn = PREV_INSN (insn);
3369 if (insn != end
5169661d 3370 && INSN_LOCATION (insn) == e->goto_locus)
23a070f3 3371 continue;
3372 if (simplejump_p (BB_END (e->src))
5169661d 3373 && !INSN_HAS_LOCATION (BB_END (e->src)))
23a070f3 3374 {
5169661d 3375 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
23a070f3 3376 continue;
3377 }
3378 dest = e->dest;
3379 if (dest == EXIT_BLOCK_PTR)
3380 {
3381 /* Non-fallthru edges to the exit block cannot be split. */
3382 if (!(e->flags & EDGE_FALLTHRU))
3383 continue;
3384 }
3385 else
3386 {
3387 insn = BB_HEAD (dest);
3388 end = NEXT_INSN (BB_END (dest));
3389 while (insn != end && !NONDEBUG_INSN_P (insn))
3390 insn = NEXT_INSN (insn);
5169661d 3391 if (insn != end && INSN_HAS_LOCATION (insn)
3392 && INSN_LOCATION (insn) == e->goto_locus)
23a070f3 3393 continue;
3394 }
3395 nb = split_edge (e);
3396 if (!INSN_P (BB_END (nb)))
3397 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3398 nb);
5169661d 3399 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
23a070f3 3400
3401 /* If there are other incoming edges to the destination block
3402 with the same goto locus, redirect them to the new block as
3403 well, this can prevent other such blocks from being created
3404 in subsequent iterations of the loop. */
3405 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
8e7408e3 3406 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
23a070f3 3407 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
5169661d 3408 && e->goto_locus == e2->goto_locus)
23a070f3 3409 redirect_edge_and_branch (e2, nb);
3410 else
3411 ei_next (&ei2);
3412 }
3413 }
3414}
3415\f
3416/* Perform sanity checks on the insn chain.
3417 1. Check that next/prev pointers are consistent in both the forward and
3418 reverse direction.
3419 2. Count insns in chain, going both directions, and check if equal.
3420 3. Check that get_last_insn () returns the actual end of chain. */
3421
3422DEBUG_FUNCTION void
3423verify_insn_chain (void)
3424{
3425 rtx x, prevx, nextx;
3426 int insn_cnt1, insn_cnt2;
3427
3428 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3429 x != 0;
3430 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
3431 gcc_assert (PREV_INSN (x) == prevx);
3432
3433 gcc_assert (prevx == get_last_insn ());
3434
3435 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
3436 x != 0;
3437 nextx = x, insn_cnt2++, x = PREV_INSN (x))
3438 gcc_assert (NEXT_INSN (x) == nextx);
3439
3440 gcc_assert (insn_cnt1 == insn_cnt2);
3441}
3442\f
3443/* If we have assembler epilogues, the block falling through to exit must
3444 be the last one in the reordered chain when we reach final. Ensure
3445 that this condition is met. */
3446static void
3447fixup_fallthru_exit_predecessor (void)
3448{
3449 edge e;
3450 basic_block bb = NULL;
3451
3452 /* This transformation is not valid before reload, because we might
3453 separate a call from the instruction that copies the return
3454 value. */
3455 gcc_assert (reload_completed);
3456
3457 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
3458 if (e)
3459 bb = e->src;
3460
3461 if (bb && bb->aux)
3462 {
3463 basic_block c = ENTRY_BLOCK_PTR->next_bb;
3464
3465 /* If the very first block is the one with the fall-through exit
3466 edge, we have to split that block. */
3467 if (c == bb)
3468 {
3469 bb = split_block (bb, NULL)->dest;
3470 bb->aux = c->aux;
3471 c->aux = bb;
3472 BB_FOOTER (bb) = BB_FOOTER (c);
3473 BB_FOOTER (c) = NULL;
3474 }
3475
3476 while (c->aux != bb)
3477 c = (basic_block) c->aux;
3478
3479 c->aux = bb->aux;
3480 while (c->aux)
3481 c = (basic_block) c->aux;
3482
3483 c->aux = bb;
3484 bb->aux = NULL;
3485 }
3486}
3487
3488/* In case there are more than one fallthru predecessors of exit, force that
3489 there is only one. */
3490
3491static void
3492force_one_exit_fallthru (void)
3493{
3494 edge e, predecessor = NULL;
3495 bool more = false;
3496 edge_iterator ei;
3497 basic_block forwarder, bb;
3498
3499 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3500 if (e->flags & EDGE_FALLTHRU)
3501 {
3502 if (predecessor == NULL)
3503 predecessor = e;
3504 else
3505 {
3506 more = true;
3507 break;
3508 }
3509 }
3510
3511 if (!more)
3512 return;
3513
3514 /* Exit has several fallthru predecessors. Create a forwarder block for
3515 them. */
3516 forwarder = split_edge (predecessor);
3517 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
3518 {
3519 if (e->src == forwarder
3520 || !(e->flags & EDGE_FALLTHRU))
3521 ei_next (&ei);
3522 else
3523 redirect_edge_and_branch_force (e, forwarder);
3524 }
3525
3526 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
3527 exit block. */
3528 FOR_EACH_BB (bb)
3529 {
3530 if (bb->aux == NULL && bb != forwarder)
3531 {
3532 bb->aux = forwarder;
3533 break;
3534 }
3535 }
3536}
3537\f
3538/* Return true in case it is possible to duplicate the basic block BB. */
3539
3540static bool
3541cfg_layout_can_duplicate_bb_p (const_basic_block bb)
3542{
3543 /* Do not attempt to duplicate tablejumps, as we need to unshare
3544 the dispatch table. This is difficult to do, as the instructions
3545 computing jump destination may be hoisted outside the basic block. */
3546 if (tablejump_p (BB_END (bb), NULL, NULL))
3547 return false;
3548
3549 /* Do not duplicate blocks containing insns that can't be copied. */
3550 if (targetm.cannot_copy_insn_p)
3551 {
3552 rtx insn = BB_HEAD (bb);
3553 while (1)
3554 {
3555 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
3556 return false;
3557 if (insn == BB_END (bb))
3558 break;
3559 insn = NEXT_INSN (insn);
3560 }
3561 }
3562
3563 return true;
3564}
3565
3566rtx
3567duplicate_insn_chain (rtx from, rtx to)
3568{
3569 rtx insn, last, copy;
3570
3571 /* Avoid updating of boundaries of previous basic block. The
3572 note will get removed from insn stream in fixup. */
3573 last = emit_note (NOTE_INSN_DELETED);
3574
3575 /* Create copy at the end of INSN chain. The chain will
3576 be reordered later. */
3577 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
3578 {
3579 switch (GET_CODE (insn))
3580 {
3581 case DEBUG_INSN:
3582 /* Don't duplicate label debug insns. */
3583 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
3584 break;
3585 /* FALLTHRU */
3586 case INSN:
3587 case CALL_INSN:
3588 case JUMP_INSN:
3589 /* Avoid copying of dispatch tables. We never duplicate
3590 tablejumps, so this can hit only in case the table got
3591 moved far from original jump. */
3592 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
3593 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
3594 {
3595 /* Avoid copying following barrier as well if any
3596 (and debug insns in between). */
3597 rtx next;
3598
3599 for (next = NEXT_INSN (insn);
3600 next != NEXT_INSN (to);
3601 next = NEXT_INSN (next))
3602 if (!DEBUG_INSN_P (next))
3603 break;
3604 if (next != NEXT_INSN (to) && BARRIER_P (next))
3605 insn = next;
3606 break;
3607 }
3608 copy = emit_copy_of_insn_after (insn, get_last_insn ());
3609 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
3610 && ANY_RETURN_P (JUMP_LABEL (insn)))
3611 JUMP_LABEL (copy) = JUMP_LABEL (insn);
3612 maybe_copy_prologue_epilogue_insn (insn, copy);
3613 break;
3614
3615 case CODE_LABEL:
3616 break;
3617
3618 case BARRIER:
3619 emit_barrier ();
3620 break;
3621
3622 case NOTE:
3623 switch (NOTE_KIND (insn))
3624 {
3625 /* In case prologue is empty and function contain label
3626 in first BB, we may want to copy the block. */
3627 case NOTE_INSN_PROLOGUE_END:
3628
3629 case NOTE_INSN_DELETED:
3630 case NOTE_INSN_DELETED_LABEL:
3631 case NOTE_INSN_DELETED_DEBUG_LABEL:
3632 /* No problem to strip these. */
3633 case NOTE_INSN_FUNCTION_BEG:
3634 /* There is always just single entry to function. */
3635 case NOTE_INSN_BASIC_BLOCK:
3636 break;
3637
3638 case NOTE_INSN_EPILOGUE_BEG:
3639 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
3640 emit_note_copy (insn);
3641 break;
3642
3643 default:
3644 /* All other notes should have already been eliminated. */
3645 gcc_unreachable ();
3646 }
3647 break;
3648 default:
3649 gcc_unreachable ();
3650 }
3651 }
3652 insn = NEXT_INSN (last);
3653 delete_insn (last);
3654 return insn;
3655}
3656
3657/* Create a duplicate of the basic block BB. */
3658
3659static basic_block
3660cfg_layout_duplicate_bb (basic_block bb)
3661{
3662 rtx insn;
3663 basic_block new_bb;
3664
3665 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
3666 new_bb = create_basic_block (insn,
3667 insn ? get_last_insn () : NULL,
3668 EXIT_BLOCK_PTR->prev_bb);
3669
3670 BB_COPY_PARTITION (new_bb, bb);
3671 if (BB_HEADER (bb))
3672 {
3673 insn = BB_HEADER (bb);
3674 while (NEXT_INSN (insn))
3675 insn = NEXT_INSN (insn);
3676 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
3677 if (insn)
3678 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
3679 }
3680
3681 if (BB_FOOTER (bb))
3682 {
3683 insn = BB_FOOTER (bb);
3684 while (NEXT_INSN (insn))
3685 insn = NEXT_INSN (insn);
3686 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
3687 if (insn)
3688 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
3689 }
3690
3691 return new_bb;
3692}
3693
3694\f
3695/* Main entry point to this module - initialize the datastructures for
3696 CFG layout changes. It keeps LOOPS up-to-date if not null.
3697
3698 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
3699
3700void
3701cfg_layout_initialize (unsigned int flags)
3702{
3703 rtx x;
3704 basic_block bb;
3705
3706 initialize_original_copy_tables ();
3707
3708 cfg_layout_rtl_register_cfg_hooks ();
3709
3710 record_effective_endpoints ();
3711
3712 /* Make sure that the targets of non local gotos are marked. */
3713 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
3714 {
3715 bb = BLOCK_FOR_INSN (XEXP (x, 0));
3716 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
3717 }
3718
3719 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
3720}
3721
3722/* Splits superblocks. */
3723void
3724break_superblocks (void)
3725{
3726 sbitmap superblocks;
3727 bool need = false;
3728 basic_block bb;
3729
3730 superblocks = sbitmap_alloc (last_basic_block);
53c5d9d4 3731 bitmap_clear (superblocks);
23a070f3 3732
3733 FOR_EACH_BB (bb)
3734 if (bb->flags & BB_SUPERBLOCK)
3735 {
3736 bb->flags &= ~BB_SUPERBLOCK;
3737 SET_BIT (superblocks, bb->index);
3738 need = true;
3739 }
3740
3741 if (need)
3742 {
3743 rebuild_jump_labels (get_insns ());
3744 find_many_sub_basic_blocks (superblocks);
3745 }
3746
3747 free (superblocks);
3748}
3749
3750/* Finalize the changes: reorder insn list according to the sequence specified
3751 by aux pointers, enter compensation code, rebuild scope forest. */
3752
3753void
3754cfg_layout_finalize (void)
3755{
3756#ifdef ENABLE_CHECKING
3757 verify_flow_info ();
3758#endif
3759 force_one_exit_fallthru ();
3760 rtl_register_cfg_hooks ();
3761 if (reload_completed
3762#ifdef HAVE_epilogue
3763 && !HAVE_epilogue
3764#endif
3765 )
3766 fixup_fallthru_exit_predecessor ();
3767 fixup_reorder_chain ();
3768
3769 rebuild_jump_labels (get_insns ());
3770 delete_dead_jumptables ();
3771
3772#ifdef ENABLE_CHECKING
3773 verify_insn_chain ();
3774 verify_flow_info ();
3775#endif
3776}
3777
54f21e20 3778
1026363d 3779/* Same as split_block but update cfg_layout structures. */
5f5d4cd1 3780
3781static basic_block
4c9e08a4 3782cfg_layout_split_block (basic_block bb, void *insnp)
1026363d 3783{
f780cc25 3784 rtx insn = (rtx) insnp;
5f5d4cd1 3785 basic_block new_bb = rtl_split_block (bb, insn);
1026363d 3786
43e94e51 3787 BB_FOOTER (new_bb) = BB_FOOTER (bb);
3788 BB_FOOTER (bb) = NULL;
1026363d 3789
5f5d4cd1 3790 return new_bb;
1026363d 3791}
3792
1026363d 3793/* Redirect Edge to DEST. */
4ee9c684 3794static edge
4c9e08a4 3795cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
1026363d 3796{
3797 basic_block src = e->src;
4ee9c684 3798 edge ret;
1026363d 3799
c60fa3a7 3800 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4ee9c684 3801 return NULL;
c60fa3a7 3802
e5562ab8 3803 if (e->dest == dest)
4ee9c684 3804 return e;
c60fa3a7 3805
e5562ab8 3806 if (e->src != ENTRY_BLOCK_PTR
4ee9c684 3807 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
adee86a0 3808 {
3072d30e 3809 df_set_bb_dirty (src);
4ee9c684 3810 return ret;
adee86a0 3811 }
c60fa3a7 3812
3813 if (e->src == ENTRY_BLOCK_PTR
3814 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
3815 {
450d042a 3816 if (dump_file)
3817 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
c60fa3a7 3818 e->src->index, dest->index);
3819
3072d30e 3820 df_set_bb_dirty (e->src);
c60fa3a7 3821 redirect_edge_succ (e, dest);
4ee9c684 3822 return e;
c60fa3a7 3823 }
3824
1026363d 3825 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
3826 in the case the basic block appears to be in sequence. Avoid this
3827 transformation. */
3828
1026363d 3829 if (e->flags & EDGE_FALLTHRU)
3830 {
3831 /* Redirect any branch edges unified with the fallthru one. */
6d7dc5b9 3832 if (JUMP_P (BB_END (src))
b9de5542 3833 && label_is_jump_target_p (BB_HEAD (e->dest),
3834 BB_END (src)))
1026363d 3835 {
cc636d56 3836 edge redirected;
a0c938f0 3837
450d042a 3838 if (dump_file)
3839 fprintf (dump_file, "Fallthru edge unified with branch "
b9de5542 3840 "%i->%i redirected to %i\n",
3841 e->src->index, e->dest->index, dest->index);
3842 e->flags &= ~EDGE_FALLTHRU;
cc636d56 3843 redirected = redirect_branch_edge (e, dest);
3844 gcc_assert (redirected);
d5321455 3845 redirected->flags |= EDGE_FALLTHRU;
3846 df_set_bb_dirty (redirected->src);
3847 return redirected;
1026363d 3848 }
3849 /* In case we are redirecting fallthru edge to the branch edge
a0c938f0 3850 of conditional jump, remove it. */
cd665a06 3851 if (EDGE_COUNT (src->succs) == 2)
1026363d 3852 {
9ea57afb 3853 /* Find the edge that is different from E. */
3854 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
cd665a06 3855
1026363d 3856 if (s->dest == dest
5496dbfc 3857 && any_condjump_p (BB_END (src))
3858 && onlyjump_p (BB_END (src)))
3859 delete_insn (BB_END (src));
1026363d 3860 }
450d042a 3861 if (dump_file)
6d31b223 3862 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
c60fa3a7 3863 e->src->index, e->dest->index, dest->index);
d5321455 3864 ret = redirect_edge_succ_nodup (e, dest);
1026363d 3865 }
3866 else
c60fa3a7 3867 ret = redirect_branch_edge (e, dest);
1026363d 3868
3869 /* We don't want simplejumps in the insn stream during cfglayout. */
cc636d56 3870 gcc_assert (!simplejump_p (BB_END (src)));
1026363d 3871
3072d30e 3872 df_set_bb_dirty (src);
1026363d 3873 return ret;
3874}
3875
3876/* Simple wrapper as we always can redirect fallthru edges. */
3877static basic_block
4c9e08a4 3878cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
1026363d 3879{
cc636d56 3880 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
3881
3882 gcc_assert (redirected);
1026363d 3883 return NULL;
3884}
3885
5f5d4cd1 3886/* Same as delete_basic_block but update cfg_layout structures. */
3887
1026363d 3888static void
4c9e08a4 3889cfg_layout_delete_block (basic_block bb)
1026363d 3890{
5496dbfc 3891 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
1026363d 3892
43e94e51 3893 if (BB_HEADER (bb))
1026363d 3894 {
5496dbfc 3895 next = BB_HEAD (bb);
1026363d 3896 if (prev)
43e94e51 3897 NEXT_INSN (prev) = BB_HEADER (bb);
1026363d 3898 else
43e94e51 3899 set_first_insn (BB_HEADER (bb));
3900 PREV_INSN (BB_HEADER (bb)) = prev;
3901 insn = BB_HEADER (bb);
1026363d 3902 while (NEXT_INSN (insn))
3903 insn = NEXT_INSN (insn);
3904 NEXT_INSN (insn) = next;
3905 PREV_INSN (next) = insn;
3906 }
5496dbfc 3907 next = NEXT_INSN (BB_END (bb));
43e94e51 3908 if (BB_FOOTER (bb))
1026363d 3909 {
43e94e51 3910 insn = BB_FOOTER (bb);
c60fa3a7 3911 while (insn)
3912 {
6d7dc5b9 3913 if (BARRIER_P (insn))
c60fa3a7 3914 {
3915 if (PREV_INSN (insn))
3916 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
3917 else
43e94e51 3918 BB_FOOTER (bb) = NEXT_INSN (insn);
c60fa3a7 3919 if (NEXT_INSN (insn))
3920 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
3921 }
6d7dc5b9 3922 if (LABEL_P (insn))
c60fa3a7 3923 break;
3924 insn = NEXT_INSN (insn);
3925 }
43e94e51 3926 if (BB_FOOTER (bb))
c60fa3a7 3927 {
5496dbfc 3928 insn = BB_END (bb);
43e94e51 3929 NEXT_INSN (insn) = BB_FOOTER (bb);
3930 PREV_INSN (BB_FOOTER (bb)) = insn;
c60fa3a7 3931 while (NEXT_INSN (insn))
3932 insn = NEXT_INSN (insn);
3933 NEXT_INSN (insn) = next;
3934 if (next)
3935 PREV_INSN (next) = insn;
3936 else
3937 set_last_insn (insn);
3938 }
1026363d 3939 }
3940 if (bb->next_bb != EXIT_BLOCK_PTR)
43e94e51 3941 to = &BB_HEADER (bb->next_bb);
1026363d 3942 else
3943 to = &cfg_layout_function_footer;
7a22afab 3944
1026363d 3945 rtl_delete_block (bb);
3946
3947 if (prev)
3948 prev = NEXT_INSN (prev);
4c9e08a4 3949 else
1026363d 3950 prev = get_insns ();
3951 if (next)
3952 next = PREV_INSN (next);
4c9e08a4 3953 else
1026363d 3954 next = get_last_insn ();
3955
3956 if (next && NEXT_INSN (next) != prev)
3957 {
3958 remaints = unlink_insn_chain (prev, next);
3959 insn = remaints;
3960 while (NEXT_INSN (insn))
3961 insn = NEXT_INSN (insn);
3962 NEXT_INSN (insn) = *to;
3963 if (*to)
3964 PREV_INSN (*to) = insn;
3965 *to = remaints;
3966 }
3967}
3968
139c3f48 3969/* Return true when blocks A and B can be safely merged. */
47aaf6e6 3970
c60fa3a7 3971static bool
47aaf6e6 3972cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
c60fa3a7 3973{
4f18499c 3974 /* If we are partitioning hot/cold basic blocks, we don't want to
3975 mess up unconditional or indirect jumps that cross between hot
7562ed74 3976 and cold sections.
3977
1118aef7 3978 Basic block partitioning may result in some jumps that appear to
a0c938f0 3979 be optimizable (or blocks that appear to be mergeable), but which really
3980 must be left untouched (they are required to make it safely across
3981 partition boundaries). See the comments at the top of
1118aef7 3982 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
3983
1897b881 3984 if (BB_PARTITION (a) != BB_PARTITION (b))
7562ed74 3985 return false;
4f18499c 3986
79f958cb 3987 /* Protect the loop latches. */
3988 if (current_loops && b->loop_father->latch == b)
3989 return false;
3990
d945554e 3991 /* If we would end up moving B's instructions, make sure it doesn't fall
3992 through into the exit block, since we cannot recover from a fallthrough
3993 edge into the exit block occurring in the middle of a function. */
3994 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
3995 {
3996 edge e = find_fallthru_edge (b->succs);
3997 if (e && e->dest == EXIT_BLOCK_PTR)
3998 return false;
3999 }
4000
c60fa3a7 4001 /* There must be exactly one edge in between the blocks. */
ea091dfd 4002 return (single_succ_p (a)
4003 && single_succ (a) == b
4004 && single_pred_p (b) == 1
cd665a06 4005 && a != b
c60fa3a7 4006 /* Must be simple edge. */
ea091dfd 4007 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
c60fa3a7 4008 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
4aaec180 4009 /* If the jump insn has side effects, we can't kill the edge.
4010 When not optimizing, try_redirect_by_replacing_jump will
4011 not allow us to redirect an edge by replacing a table jump. */
6d7dc5b9 4012 && (!JUMP_P (BB_END (a))
4aaec180 4013 || ((!optimize || reload_completed)
5496dbfc 4014 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
c60fa3a7 4015}
4016
a53ff4c1 4017/* Merge block A and B. The blocks must be mergeable. */
4018
c60fa3a7 4019static void
4020cfg_layout_merge_blocks (basic_block a, basic_block b)
4021{
18b762f0 4022 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
d4afc00c 4023 rtx insn;
18b762f0 4024
1b4345f7 4025 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
c60fa3a7 4026
3072d30e 4027 if (dump_file)
18b762f0 4028 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4029 a->index);
3072d30e 4030
c60fa3a7 4031 /* If there was a CODE_LABEL beginning B, delete it. */
6d7dc5b9 4032 if (LABEL_P (BB_HEAD (b)))
7b07eae7 4033 {
7b07eae7 4034 delete_insn (BB_HEAD (b));
4035 }
c60fa3a7 4036
4037 /* We should have fallthru edge in a, or we can do dummy redirection to get
4038 it cleaned up. */
6d7dc5b9 4039 if (JUMP_P (BB_END (a)))
cd665a06 4040 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
cc636d56 4041 gcc_assert (!JUMP_P (BB_END (a)));
c60fa3a7 4042
8ddad41d 4043 /* When not optimizing CFG and the edge is the only place in RTL which holds
9c388755 4044 some unique locus, emit a nop with that locus in between. */
8ddad41d 4045 if (!optimize)
4046 emit_nop_for_unique_locus_between (a, b);
9c388755 4047
c60fa3a7 4048 /* Possible line number notes should appear in between. */
43e94e51 4049 if (BB_HEADER (b))
c60fa3a7 4050 {
5496dbfc 4051 rtx first = BB_END (a), last;
c60fa3a7 4052
43e94e51 4053 last = emit_insn_after_noloc (BB_HEADER (b), BB_END (a), a);
5a23e907 4054 /* The above might add a BARRIER as BB_END, but as barriers
4055 aren't valid parts of a bb, remove_insn doesn't update
4056 BB_END if it is a barrier. So adjust BB_END here. */
4057 while (BB_END (a) != first && BARRIER_P (BB_END (a)))
4058 BB_END (a) = PREV_INSN (BB_END (a));
39257eb5 4059 delete_insn_chain (NEXT_INSN (first), last, false);
43e94e51 4060 BB_HEADER (b) = NULL;
c60fa3a7 4061 }
4062
4063 /* In the case basic blocks are not adjacent, move them around. */
5496dbfc 4064 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
c60fa3a7 4065 {
d4afc00c 4066 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
f41d4683 4067
d4afc00c 4068 emit_insn_after_noloc (insn, BB_END (a), a);
c60fa3a7 4069 }
4070 /* Otherwise just re-associate the instructions. */
4071 else
4072 {
5496dbfc 4073 insn = BB_HEAD (b);
5496dbfc 4074 BB_END (a) = BB_END (b);
c60fa3a7 4075 }
4076
d4afc00c 4077 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4078 We need to explicitly call. */
4079 update_bb_for_insn_chain (insn, BB_END (b), a);
4080
4081 /* Skip possible DELETED_LABEL insn. */
4082 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4083 insn = NEXT_INSN (insn);
4084 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4085 BB_HEAD (b) = NULL;
4086 delete_insn (insn);
4087
3072d30e 4088 df_bb_delete (b->index);
4089
c60fa3a7 4090 /* Possible tablejumps and barriers should appear after the block. */
43e94e51 4091 if (BB_FOOTER (b))
c60fa3a7 4092 {
43e94e51 4093 if (!BB_FOOTER (a))
4094 BB_FOOTER (a) = BB_FOOTER (b);
c60fa3a7 4095 else
4096 {
43e94e51 4097 rtx last = BB_FOOTER (a);
c60fa3a7 4098
4099 while (NEXT_INSN (last))
4100 last = NEXT_INSN (last);
43e94e51 4101 NEXT_INSN (last) = BB_FOOTER (b);
4102 PREV_INSN (BB_FOOTER (b)) = last;
c60fa3a7 4103 }
43e94e51 4104 BB_FOOTER (b) = NULL;
c60fa3a7 4105 }
4106
18b762f0 4107 /* If B was a forwarder block, propagate the locus on the edge. */
8e7408e3 4108 if (forwarder_p
4109 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) != UNKNOWN_LOCATION)
18b762f0 4110 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4111
450d042a 4112 if (dump_file)
18b762f0 4113 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
c60fa3a7 4114}
4115
4116/* Split edge E. */
5f5d4cd1 4117
c60fa3a7 4118static basic_block
4119cfg_layout_split_edge (edge e)
4120{
c60fa3a7 4121 basic_block new_bb =
4122 create_basic_block (e->src != ENTRY_BLOCK_PTR
5496dbfc 4123 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
c60fa3a7 4124 NULL_RTX, e->src);
4125
e1b2b77c 4126 if (e->dest == EXIT_BLOCK_PTR)
4127 BB_COPY_PARTITION (new_bb, e->src);
4128 else
4129 BB_COPY_PARTITION (new_bb, e->dest);
3bdc4312 4130 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
c60fa3a7 4131 redirect_edge_and_branch_force (e, new_bb);
4132
4133 return new_bb;
4134}
4135
5f5d4cd1 4136/* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4137
4138static void
4139rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4140{
4141}
4142
9631926a 4143/* Return true if BB contains only labels or non-executable
4144 instructions. */
4145
4146static bool
4147rtl_block_empty_p (basic_block bb)
4148{
4149 rtx insn;
4150
4151 if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
4152 return true;
4153
4154 FOR_BB_INSNS (bb, insn)
4155 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4156 return false;
4157
4158 return true;
4159}
4160
4161/* Split a basic block if it ends with a conditional branch and if
4162 the other part of the block is not empty. */
4163
4164static basic_block
4165rtl_split_block_before_cond_jump (basic_block bb)
4166{
4167 rtx insn;
4168 rtx split_point = NULL;
4169 rtx last = NULL;
4170 bool found_code = false;
4171
4172 FOR_BB_INSNS (bb, insn)
4173 {
4174 if (any_condjump_p (insn))
4175 split_point = last;
4176 else if (NONDEBUG_INSN_P (insn))
4177 found_code = true;
4178 last = insn;
4179 }
4180
4181 /* Did not find everything. */
4182 if (found_code && split_point)
4183 return split_block (bb, split_point)->dest;
4184 else
4185 return NULL;
4186}
4187
4ee9c684 4188/* Return 1 if BB ends with a call, possibly followed by some
4189 instructions that must stay with the call, 0 otherwise. */
4190
4191static bool
47aaf6e6 4192rtl_block_ends_with_call_p (basic_block bb)
4ee9c684 4193{
4194 rtx insn = BB_END (bb);
4195
6d7dc5b9 4196 while (!CALL_P (insn)
4ee9c684 4197 && insn != BB_HEAD (bb)
f69dfa18 4198 && (keep_with_call_p (insn)
9845d120 4199 || NOTE_P (insn)
4200 || DEBUG_INSN_P (insn)))
4ee9c684 4201 insn = PREV_INSN (insn);
6d7dc5b9 4202 return (CALL_P (insn));
4ee9c684 4203}
4204
4205/* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4206
4207static bool
5493cb9a 4208rtl_block_ends_with_condjump_p (const_basic_block bb)
4ee9c684 4209{
4210 return any_condjump_p (BB_END (bb));
4211}
4212
4213/* Return true if we need to add fake edge to exit.
4214 Helper function for rtl_flow_call_edges_add. */
4215
4216static bool
5493cb9a 4217need_fake_edge_p (const_rtx insn)
4ee9c684 4218{
4219 if (!INSN_P (insn))
4220 return false;
4221
6d7dc5b9 4222 if ((CALL_P (insn)
4ee9c684 4223 && !SIBLING_CALL_P (insn)
4224 && !find_reg_note (insn, REG_NORETURN, NULL)
9c2a0c05 4225 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4ee9c684 4226 return true;
4227
4228 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4229 && MEM_VOLATILE_P (PATTERN (insn)))
4230 || (GET_CODE (PATTERN (insn)) == PARALLEL
4231 && asm_noperands (insn) != -1
4232 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4233 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4234}
4235
4236/* Add fake edges to the function exit for any non constant and non noreturn
4237 calls, volatile inline assembly in the bitmap of blocks specified by
4238 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4239 that were split.
4240
4241 The goal is to expose cases in which entering a basic block does not imply
4242 that all subsequent instructions must be executed. */
4243
4244static int
4245rtl_flow_call_edges_add (sbitmap blocks)
4246{
4247 int i;
4248 int blocks_split = 0;
4249 int last_bb = last_basic_block;
4250 bool check_last_block = false;
4251
4d2e5d52 4252 if (n_basic_blocks == NUM_FIXED_BLOCKS)
4ee9c684 4253 return 0;
4254
4255 if (! blocks)
4256 check_last_block = true;
4257 else
4258 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
4259
4260 /* In the last basic block, before epilogue generation, there will be
4261 a fallthru edge to EXIT. Special care is required if the last insn
4262 of the last basic block is a call because make_edge folds duplicate
4263 edges, which would result in the fallthru edge also being marked
4264 fake, which would result in the fallthru edge being removed by
4265 remove_fake_edges, which would result in an invalid CFG.
4266
4267 Moreover, we can't elide the outgoing fake edge, since the block
4268 profiler needs to take this into account in order to solve the minimal
4269 spanning tree in the case that the call doesn't return.
4270
4271 Handle this by adding a dummy instruction in a new last basic block. */
4272 if (check_last_block)
4273 {
4274 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
4275 rtx insn = BB_END (bb);
4276
4277 /* Back up past insns that must be kept in the same block as a call. */
4278 while (insn != BB_HEAD (bb)
4279 && keep_with_call_p (insn))
4280 insn = PREV_INSN (insn);
4281
4282 if (need_fake_edge_p (insn))
4283 {
4284 edge e;
4285
c6356c17 4286 e = find_edge (bb, EXIT_BLOCK_PTR);
4287 if (e)
4288 {
18b42941 4289 insert_insn_on_edge (gen_use (const0_rtx), e);
c6356c17 4290 commit_edge_insertions ();
4291 }
4ee9c684 4292 }
4293 }
4294
4295 /* Now add fake edges to the function exit for any non constant
4296 calls since there is no way that we can determine if they will
4297 return or not... */
4298
4d2e5d52 4299 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4ee9c684 4300 {
4301 basic_block bb = BASIC_BLOCK (i);
4302 rtx insn;
4303 rtx prev_insn;
4304
4305 if (!bb)
4306 continue;
4307
4308 if (blocks && !TEST_BIT (blocks, i))
4309 continue;
4310
4311 for (insn = BB_END (bb); ; insn = prev_insn)
4312 {
4313 prev_insn = PREV_INSN (insn);
4314 if (need_fake_edge_p (insn))
4315 {
4316 edge e;
4317 rtx split_at_insn = insn;
4318
4319 /* Don't split the block between a call and an insn that should
a0c938f0 4320 remain in the same block as the call. */
6d7dc5b9 4321 if (CALL_P (insn))
4ee9c684 4322 while (split_at_insn != BB_END (bb)
4323 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4324 split_at_insn = NEXT_INSN (split_at_insn);
4325
4326 /* The handling above of the final block before the epilogue
a0c938f0 4327 should be enough to verify that there is no edge to the exit
4ee9c684 4328 block in CFG already. Calling make_edge in such case would
4329 cause us to mark that edge as fake and remove it later. */
4330
4331#ifdef ENABLE_CHECKING
4332 if (split_at_insn == BB_END (bb))
cd665a06 4333 {
c6356c17 4334 e = find_edge (bb, EXIT_BLOCK_PTR);
4335 gcc_assert (e == NULL);
cd665a06 4336 }
4ee9c684 4337#endif
4338
4339 /* Note that the following may create a new basic block
4340 and renumber the existing basic blocks. */
4341 if (split_at_insn != BB_END (bb))
4342 {
4343 e = split_block (bb, split_at_insn);
4344 if (e)
4345 blocks_split++;
4346 }
4347
4348 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
4349 }
4350
4351 if (insn == BB_HEAD (bb))
4352 break;
4353 }
4354 }
4355
4356 if (blocks_split)
4357 verify_flow_info ();
4358
4359 return blocks_split;
4360}
4361
c50ae675 4362/* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
d90dbd3e 4363 the conditional branch target, SECOND_HEAD should be the fall-thru
c50ae675 4364 there is no need to handle this here the loop versioning code handles
4365 this. the reason for SECON_HEAD is that it is needed for condition
4366 in trees, and this should be of the same type since it is a hook. */
4367static void
4368rtl_lv_add_condition_to_bb (basic_block first_head ,
a0c938f0 4369 basic_block second_head ATTRIBUTE_UNUSED,
4370 basic_block cond_bb, void *comp_rtx)
c50ae675 4371{
4372 rtx label, seq, jump;
4373 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4374 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4375 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4376 enum machine_mode mode;
4377
4378
4379 label = block_label (first_head);
4380 mode = GET_MODE (op0);
4381 if (mode == VOIDmode)
4382 mode = GET_MODE (op1);
4383
4384 start_sequence ();
4385 op0 = force_operand (op0, NULL_RTX);
4386 op1 = force_operand (op1, NULL_RTX);
4387 do_compare_rtx_and_jump (op0, op1, comp, 0,
79ab74cc 4388 mode, NULL_RTX, NULL_RTX, label, -1);
c50ae675 4389 jump = get_last_insn ();
4390 JUMP_LABEL (jump) = label;
4391 LABEL_NUSES (label)++;
4392 seq = get_insns ();
4393 end_sequence ();
4394
4395 /* Add the new cond , in the new head. */
4396 emit_insn_after(seq, BB_END(cond_bb));
4397}
4398
4399
4400/* Given a block B with unconditional branch at its end, get the
4401 store the return the branch edge and the fall-thru edge in
4402 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4403static void
4404rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4405 edge *fallthru_edge)
4406{
4407 edge e = EDGE_SUCC (b, 0);
4408
4409 if (e->flags & EDGE_FALLTHRU)
4410 {
4411 *fallthru_edge = e;
4412 *branch_edge = EDGE_SUCC (b, 1);
4413 }
4414 else
4415 {
4416 *branch_edge = e;
4417 *fallthru_edge = EDGE_SUCC (b, 1);
4418 }
4419}
4420
e0dde8f8 4421void
4422init_rtl_bb_info (basic_block bb)
4423{
43e94e51 4424 gcc_assert (!bb->il.x.rtl);
4425 bb->il.x.head_ = NULL;
4426 bb->il.x.rtl = ggc_alloc_cleared_rtl_bb_info ();
e0dde8f8 4427}
4428
611d2ac1 4429/* Returns true if it is possible to remove edge E by redirecting
4430 it to the destination of the other edge from E->src. */
4431
4432static bool
5493cb9a 4433rtl_can_remove_branch_p (const_edge e)
611d2ac1 4434{
5493cb9a 4435 const_basic_block src = e->src;
4436 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
4437 const_rtx insn = BB_END (src), set;
611d2ac1 4438
4439 /* The conditions are taken from try_redirect_by_replacing_jump. */
4440 if (target == EXIT_BLOCK_PTR)
4441 return false;
4442
4443 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4444 return false;
4445
4446 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
4447 || BB_PARTITION (src) != BB_PARTITION (target))
4448 return false;
4449
4450 if (!onlyjump_p (insn)
4451 || tablejump_p (insn, NULL, NULL))
4452 return false;
4453
4454 set = single_set (insn);
4455 if (!set || side_effects_p (set))
4456 return false;
4457
4458 return true;
4459}
4460
0a55d497 4461static basic_block
4462rtl_duplicate_bb (basic_block bb)
4463{
4464 bb = cfg_layout_duplicate_bb (bb);
4465 bb->aux = NULL;
4466 return bb;
4467}
4468
98193482 4469/* Do book-keeping of basic block BB for the profile consistency checker.
4470 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
4471 then do post-pass accounting. Store the counting in RECORD. */
4472static void
4473rtl_account_profile_record (basic_block bb, int after_pass,
4474 struct profile_record *record)
4475{
4476 rtx insn;
4477 FOR_BB_INSNS (bb, insn)
4478 if (INSN_P (insn))
4479 {
4480 record->size[after_pass]
4481 += insn_rtx_cost (PATTERN (insn), false);
4482 if (profile_status == PROFILE_READ)
4483 record->time[after_pass]
4484 += insn_rtx_cost (PATTERN (insn), true) * bb->count;
4485 else if (profile_status == PROFILE_GUESSED)
4486 record->time[after_pass]
4487 += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
4488 }
4489}
4490
1026363d 4491/* Implementation of CFG manipulation for linearized RTL. */
4492struct cfg_hooks rtl_cfg_hooks = {
5f5d4cd1 4493 "rtl",
1026363d 4494 rtl_verify_flow_info,
028f8cc7 4495 rtl_dump_bb,
c60fa3a7 4496 rtl_create_basic_block,
1026363d 4497 rtl_redirect_edge_and_branch,
4498 rtl_redirect_edge_and_branch_force,
611d2ac1 4499 rtl_can_remove_branch_p,
1026363d 4500 rtl_delete_block,
4501 rtl_split_block,
5f5d4cd1 4502 rtl_move_block_after,
c60fa3a7 4503 rtl_can_merge_blocks, /* can_merge_blocks_p */
4504 rtl_merge_blocks,
4ee9c684 4505 rtl_predict_edge,
4506 rtl_predicted_by_p,
0a55d497 4507 cfg_layout_can_duplicate_bb_p,
4508 rtl_duplicate_bb,
5f5d4cd1 4509 rtl_split_edge,
4510 rtl_make_forwarder_block,
4ee9c684 4511 rtl_tidy_fallthru_edge,
202bbc06 4512 rtl_force_nonfallthru,
4ee9c684 4513 rtl_block_ends_with_call_p,
4514 rtl_block_ends_with_condjump_p,
26b12c91 4515 rtl_flow_call_edges_add,
4516 NULL, /* execute_on_growing_pred */
c50ae675 4517 NULL, /* execute_on_shrinking_pred */
4518 NULL, /* duplicate loop for trees */
4519 NULL, /* lv_add_condition_to_bb */
4520 NULL, /* lv_adjust_loop_header_phi*/
4521 NULL, /* extract_cond_bb_edges */
9631926a 4522 NULL, /* flush_pending_stmts */
4523 rtl_block_empty_p, /* block_empty_p */
4524 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
98193482 4525 rtl_account_profile_record,
1026363d 4526};
4527
4528/* Implementation of CFG manipulation for cfg layout RTL, where
4529 basic block connected via fallthru edges does not have to be adjacent.
4530 This representation will hopefully become the default one in future
4531 version of the compiler. */
4ee9c684 4532
1026363d 4533struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5f5d4cd1 4534 "cfglayout mode",
c60fa3a7 4535 rtl_verify_flow_info_1,
028f8cc7 4536 rtl_dump_bb,
c60fa3a7 4537 cfg_layout_create_basic_block,
1026363d 4538 cfg_layout_redirect_edge_and_branch,
4539 cfg_layout_redirect_edge_and_branch_force,
611d2ac1 4540 rtl_can_remove_branch_p,
1026363d 4541 cfg_layout_delete_block,
4542 cfg_layout_split_block,
5f5d4cd1 4543 rtl_move_block_after,
c60fa3a7 4544 cfg_layout_can_merge_blocks_p,
4545 cfg_layout_merge_blocks,
4ee9c684 4546 rtl_predict_edge,
4547 rtl_predicted_by_p,
4548 cfg_layout_can_duplicate_bb_p,
4549 cfg_layout_duplicate_bb,
5f5d4cd1 4550 cfg_layout_split_edge,
4551 rtl_make_forwarder_block,
202bbc06 4552 NULL, /* tidy_fallthru_edge */
4553 rtl_force_nonfallthru,
4ee9c684 4554 rtl_block_ends_with_call_p,
4555 rtl_block_ends_with_condjump_p,
26b12c91 4556 rtl_flow_call_edges_add,
4557 NULL, /* execute_on_growing_pred */
c50ae675 4558 NULL, /* execute_on_shrinking_pred */
4559 duplicate_loop_to_header_edge, /* duplicate loop for trees */
4560 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
4561 NULL, /* lv_adjust_loop_header_phi*/
4562 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
9631926a 4563 NULL, /* flush_pending_stmts */
4564 rtl_block_empty_p, /* block_empty_p */
4565 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
98193482 4566 rtl_account_profile_record,
1026363d 4567};
23a070f3 4568
4569#include "gt-cfgrtl.h"