]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgrtl.c
* rtl.h (print_insn_with_notes): Remove prototype.
[thirdparty/gcc.git] / gcc / cfgrtl.c
CommitLineData
b36d64df 1/* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
d7ff3ab6 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 2011, 2012 Free Software Foundation, Inc.
b36d64df 5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
b36d64df 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
b36d64df 21
5cc577b6 22/* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
b36d64df 24
25 Available functionality:
c60fa3a7 26 - Basic CFG/RTL manipulation API documented in cfghooks.h
5cc577b6 27 - CFG-aware instruction chain manipulation
b36d64df 28 delete_insn, delete_insn_chain
c60fa3a7 29 - Edge splitting and committing to edges
30 insert_insn_on_edge, commit_edge_insertions
31 - CFG updating after insn simplification
32 purge_dead_edges, purge_all_dead_edges
54f21e20 33 - CFG fixing after coarse manipulation
34 fixup_abnormal_edges
c60fa3a7 35
36 Functions not supposed for generic use:
5cc577b6 37 - Infrastructure to determine quickly basic block for insn
b36d64df 38 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
5cc577b6 39 - Edge redirection with updating and optimizing of insn chain
c60fa3a7 40 block_label, tidy_fallthru_edge, force_nonfallthru */
b36d64df 41\f
42#include "config.h"
43#include "system.h"
805e22b2 44#include "coretypes.h"
45#include "tm.h"
b36d64df 46#include "tree.h"
b36d64df 47#include "hard-reg-set.h"
48#include "basic-block.h"
49#include "regs.h"
50#include "flags.h"
b36d64df 51#include "function.h"
52#include "except.h"
d7091a76 53#include "rtl-error.h"
b36d64df 54#include "tm_p.h"
55#include "obstack.h"
225e5f6f 56#include "insn-attr.h"
26fb1781 57#include "insn-config.h"
804e497b 58#include "expr.h"
065efcb1 59#include "target.h"
218e3e4e 60#include "common/common-target.h"
c50ae675 61#include "cfgloop.h"
e0dde8f8 62#include "ggc.h"
77fce4cd 63#include "tree-pass.h"
3072d30e 64#include "df.h"
b36d64df 65
23a070f3 66/* Holds the interesting leading and trailing notes for the function.
67 Only applicable if the CFG is in cfglayout mode. */
68static GTY(()) rtx cfg_layout_function_footer;
69static GTY(()) rtx cfg_layout_function_header;
70
71static rtx skip_insns_after_block (basic_block);
72static void record_effective_endpoints (void);
73static rtx label_for_bb (basic_block);
74static void fixup_reorder_chain (void);
75
76void verify_insn_chain (void);
77static void fixup_fallthru_exit_predecessor (void);
5493cb9a 78static int can_delete_note_p (const_rtx);
79static int can_delete_label_p (const_rtx);
4c9e08a4 80static basic_block rtl_split_edge (edge);
5f5d4cd1 81static bool rtl_move_block_after (basic_block, basic_block);
4c9e08a4 82static int rtl_verify_flow_info (void);
5f5d4cd1 83static basic_block cfg_layout_split_block (basic_block, void *);
4ee9c684 84static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
4c9e08a4 85static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
86static void cfg_layout_delete_block (basic_block);
87static void rtl_delete_block (basic_block);
88static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
4ee9c684 89static edge rtl_redirect_edge_and_branch (edge, basic_block);
5f5d4cd1 90static basic_block rtl_split_block (basic_block, void *);
5147ec07 91static void rtl_dump_bb (FILE *, basic_block, int, int);
4c9e08a4 92static int rtl_verify_flow_info_1 (void);
5f5d4cd1 93static void rtl_make_forwarder_block (edge);
b36d64df 94\f
95/* Return true if NOTE is not one of the ones that must be kept paired,
5cc577b6 96 so that we may simply delete it. */
b36d64df 97
98static int
5493cb9a 99can_delete_note_p (const_rtx note)
b36d64df 100{
25e880b1 101 switch (NOTE_KIND (note))
102 {
103 case NOTE_INSN_DELETED:
104 case NOTE_INSN_BASIC_BLOCK:
105 case NOTE_INSN_EPILOGUE_BEG:
106 return true;
107
108 default:
109 return false;
110 }
b36d64df 111}
112
113/* True if a given label can be deleted. */
114
115static int
5493cb9a 116can_delete_label_p (const_rtx label)
b36d64df 117{
5cc577b6 118 return (!LABEL_PRESERVE_P (label)
119 /* User declared labels must be preserved. */
120 && LABEL_NAME (label) == 0
2f138c1c 121 && !in_expr_list_p (forced_labels, label));
b36d64df 122}
123
a2b85e40 124/* Delete INSN by patching it out. */
b36d64df 125
a2b85e40 126void
4c9e08a4 127delete_insn (rtx insn)
b36d64df 128{
b36d64df 129 rtx note;
130 bool really_delete = true;
131
6d7dc5b9 132 if (LABEL_P (insn))
b36d64df 133 {
134 /* Some labels can't be directly removed from the INSN chain, as they
a0c938f0 135 might be references via variables, constant pool etc.
136 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
b36d64df 137 if (! can_delete_label_p (insn))
138 {
139 const char *name = LABEL_NAME (insn);
a2b85e40 140 basic_block bb = BLOCK_FOR_INSN (insn);
141 rtx bb_note = NEXT_INSN (insn);
b36d64df 142
143 really_delete = false;
144 PUT_CODE (insn, NOTE);
ad4583d9 145 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
7bd3dcc4 146 NOTE_DELETED_LABEL_NAME (insn) = name;
a2b85e40 147
148 if (bb_note != NULL_RTX && NOTE_INSN_BASIC_BLOCK_P (bb_note)
149 && BLOCK_FOR_INSN (bb_note) == bb)
150 {
151 reorder_insns_nobb (insn, insn, bb_note);
152 BB_HEAD (bb) = bb_note;
153 if (BB_END (bb) == bb_note)
154 BB_END (bb) = insn;
155 }
b36d64df 156 }
5cc577b6 157
b36d64df 158 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
159 }
160
161 if (really_delete)
162 {
e172357f 163 /* If this insn has already been deleted, something is very wrong. */
cc636d56 164 gcc_assert (!INSN_DELETED_P (insn));
b36d64df 165 remove_insn (insn);
166 INSN_DELETED_P (insn) = 1;
167 }
168
169 /* If deleting a jump, decrement the use count of the label. Deleting
170 the label itself should happen in the normal course of block merging. */
19d2fe05 171 if (JUMP_P (insn))
242dafee 172 {
19d2fe05 173 if (JUMP_LABEL (insn)
174 && LABEL_P (JUMP_LABEL (insn)))
175 LABEL_NUSES (JUMP_LABEL (insn))--;
176
177 /* If there are more targets, remove them too. */
178 while ((note
179 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
6d7dc5b9 180 && LABEL_P (XEXP (note, 0)))
242dafee 181 {
182 LABEL_NUSES (XEXP (note, 0))--;
183 remove_note (insn, note);
184 }
185 }
b36d64df 186
19d2fe05 187 /* Also if deleting any insn that references a label as an operand. */
188 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
189 && LABEL_P (XEXP (note, 0)))
190 {
191 LABEL_NUSES (XEXP (note, 0))--;
192 remove_note (insn, note);
193 }
194
971ba038 195 if (JUMP_TABLE_DATA_P (insn))
b36d64df 196 {
197 rtx pat = PATTERN (insn);
198 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
199 int len = XVECLEN (pat, diff_vec_p);
200 int i;
201
202 for (i = 0; i < len; i++)
fd0d7f8c 203 {
204 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
205
206 /* When deleting code in bulk (e.g. removing many unreachable
207 blocks) we can delete a label that's a target of the vector
208 before deleting the vector itself. */
6d7dc5b9 209 if (!NOTE_P (label))
fd0d7f8c 210 LABEL_NUSES (label)--;
211 }
b36d64df 212 }
b36d64df 213}
214
fb20d6fa 215/* Like delete_insn but also purge dead edges from BB. */
ebc94641 216
a2b85e40 217void
4c9e08a4 218delete_insn_and_edges (rtx insn)
fb20d6fa 219{
fb20d6fa 220 bool purge = false;
221
ab87d1bc 222 if (INSN_P (insn)
fb20d6fa 223 && BLOCK_FOR_INSN (insn)
5496dbfc 224 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
fb20d6fa 225 purge = true;
a2b85e40 226 delete_insn (insn);
fb20d6fa 227 if (purge)
228 purge_dead_edges (BLOCK_FOR_INSN (insn));
fb20d6fa 229}
230
b36d64df 231/* Unlink a chain of insns between START and FINISH, leaving notes
39257eb5 232 that must be paired. If CLEAR_BB is true, we set bb field for
233 insns that cannot be removed to NULL. */
b36d64df 234
235void
39257eb5 236delete_insn_chain (rtx start, rtx finish, bool clear_bb)
b36d64df 237{
a2b85e40 238 rtx prev, current;
b36d64df 239
5cc577b6 240 /* Unchain the insns one by one. It would be quicker to delete all of these
241 with a single unchaining, rather than one at a time, but we need to keep
242 the NOTE's. */
a2b85e40 243 current = finish;
b36d64df 244 while (1)
245 {
a2b85e40 246 prev = PREV_INSN (current);
247 if (NOTE_P (current) && !can_delete_note_p (current))
b36d64df 248 ;
249 else
a2b85e40 250 delete_insn (current);
b36d64df 251
a2b85e40 252 if (clear_bb && !INSN_DELETED_P (current))
253 set_block_for_insn (current, NULL);
39257eb5 254
a2b85e40 255 if (current == start)
b36d64df 256 break;
a2b85e40 257 current = prev;
b36d64df 258 }
259}
260\f
5cc577b6 261/* Create a new basic block consisting of the instructions between HEAD and END
262 inclusive. This function is designed to allow fast BB construction - reuses
263 the note and basic block struct in BB_NOTE, if any and do not grow
264 BASIC_BLOCK chain and should be used directly only by CFG construction code.
265 END can be NULL in to create new empty basic block before HEAD. Both END
7fa55aef 266 and HEAD can be NULL to create basic block at the end of INSN chain.
267 AFTER is the basic block we should be put after. */
b36d64df 268
269basic_block
4c9e08a4 270create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
b36d64df 271{
272 basic_block bb;
273
274 if (bb_note
b36d64df 275 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
276 && bb->aux == NULL)
277 {
278 /* If we found an existing note, thread it back onto the chain. */
279
280 rtx after;
281
6d7dc5b9 282 if (LABEL_P (head))
b36d64df 283 after = head;
284 else
285 {
286 after = PREV_INSN (head);
287 head = bb_note;
288 }
289
290 if (after != bb_note && NEXT_INSN (after) != bb_note)
ab87d1bc 291 reorder_insns_nobb (bb_note, bb_note, after);
b36d64df 292 }
293 else
294 {
295 /* Otherwise we must create a note and a basic block structure. */
296
297 bb = alloc_block ();
298
e0dde8f8 299 init_rtl_bb_info (bb);
b36d64df 300 if (!head && !end)
5cc577b6 301 head = end = bb_note
302 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
6d7dc5b9 303 else if (LABEL_P (head) && end)
b36d64df 304 {
305 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
306 if (head == end)
307 end = bb_note;
308 }
309 else
310 {
311 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
312 head = bb_note;
313 if (!end)
314 end = head;
315 }
5cc577b6 316
b36d64df 317 NOTE_BASIC_BLOCK (bb_note) = bb;
318 }
319
320 /* Always include the bb note in the block. */
321 if (NEXT_INSN (end) == bb_note)
322 end = bb_note;
323
5496dbfc 324 BB_HEAD (bb) = head;
325 BB_END (bb) = end;
f23d9a22 326 bb->index = last_basic_block++;
e0dde8f8 327 bb->flags = BB_NEW | BB_RTL;
7fa55aef 328 link_block (bb, after);
85b938d0 329 SET_BASIC_BLOCK (bb->index, bb);
3072d30e 330 df_bb_refs_record (bb->index, false);
ab87d1bc 331 update_bb_for_insn (bb);
7562ed74 332 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
b36d64df 333
334 /* Tag the block so that we know it has been used when considering
335 other basic block notes. */
336 bb->aux = bb;
337
338 return bb;
339}
340
5cc577b6 341/* Create new basic block consisting of instructions in between HEAD and END
0a55d497 342 and place it to the BB chain after block AFTER. END can be NULL to
343 create a new empty basic block before HEAD. Both END and HEAD can be
344 NULL to create basic block at the end of INSN chain. */
b36d64df 345
c60fa3a7 346static basic_block
347rtl_create_basic_block (void *headp, void *endp, basic_block after)
b36d64df 348{
f780cc25 349 rtx head = (rtx) headp, end = (rtx) endp;
b36d64df 350 basic_block bb;
b3d6de89 351
743ff5bb 352 /* Grow the basic block array if needed. */
f1f41a6c 353 if ((size_t) last_basic_block >= basic_block_info->length ())
743ff5bb 354 {
355 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
f1f41a6c 356 vec_safe_grow_cleared (basic_block_info, new_size);
743ff5bb 357 }
b3d6de89 358
3c0a32c9 359 n_basic_blocks++;
b36d64df 360
f23d9a22 361 bb = create_basic_block_structure (head, end, NULL, after);
b36d64df 362 bb->aux = NULL;
363 return bb;
364}
c60fa3a7 365
366static basic_block
367cfg_layout_create_basic_block (void *head, void *end, basic_block after)
368{
369 basic_block newbb = rtl_create_basic_block (head, end, after);
370
c60fa3a7 371 return newbb;
372}
b36d64df 373\f
374/* Delete the insns in a (non-live) block. We physically delete every
375 non-deleted-note insn, and update the flow graph appropriately.
376
377 Return nonzero if we deleted an exception handler. */
378
379/* ??? Preserving all such notes strikes me as wrong. It would be nice
380 to post-process the stream to remove empty blocks, loops, ranges, etc. */
381
5f704f9f 382static void
4c9e08a4 383rtl_delete_block (basic_block b)
b36d64df 384{
5d65a39c 385 rtx insn, end;
b36d64df 386
387 /* If the head of this block is a CODE_LABEL, then it might be the
57548910 388 label for an exception handler which can't be reached. We need
389 to remove the label from the exception_handler_label list. */
5496dbfc 390 insn = BB_HEAD (b);
b36d64df 391
5d65a39c 392 end = get_last_bb_insn (b);
b36d64df 393
394 /* Selectively delete the entire chain. */
5496dbfc 395 BB_HEAD (b) = NULL;
39257eb5 396 delete_insn_chain (insn, end, true);
397
3072d30e 398
399 if (dump_file)
400 fprintf (dump_file, "deleting block %d\n", b->index);
401 df_bb_delete (b->index);
b36d64df 402}
403\f
f23d9a22 404/* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
b36d64df 405
406void
4c9e08a4 407compute_bb_for_insn (void)
b36d64df 408{
4c26117a 409 basic_block bb;
b36d64df 410
4c26117a 411 FOR_EACH_BB (bb)
b36d64df 412 {
5496dbfc 413 rtx end = BB_END (bb);
5cc577b6 414 rtx insn;
b36d64df 415
5496dbfc 416 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
b36d64df 417 {
ab87d1bc 418 BLOCK_FOR_INSN (insn) = bb;
b36d64df 419 if (insn == end)
420 break;
b36d64df 421 }
422 }
423}
424
425/* Release the basic_block_for_insn array. */
426
2a1990e9 427unsigned int
4c9e08a4 428free_bb_for_insn (void)
b36d64df 429{
ab87d1bc 430 rtx insn;
431 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6d7dc5b9 432 if (!BARRIER_P (insn))
ab87d1bc 433 BLOCK_FOR_INSN (insn) = NULL;
2a1990e9 434 return 0;
b36d64df 435}
436
225e5f6f 437static unsigned int
438rest_of_pass_free_cfg (void)
439{
440#ifdef DELAY_SLOTS
441 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
442 valid at that point so it would be too late to call df_analyze. */
443 if (optimize > 0 && flag_delayed_branch)
db8f6977 444 {
445 df_note_add_problem ();
446 df_analyze ();
447 }
225e5f6f 448#endif
449
450 free_bb_for_insn ();
451 return 0;
452}
453
20099e35 454struct rtl_opt_pass pass_free_cfg =
77fce4cd 455{
20099e35 456 {
457 RTL_PASS,
0c297edc 458 "*free_cfg", /* name */
c7875731 459 OPTGROUP_NONE, /* optinfo_flags */
77fce4cd 460 NULL, /* gate */
225e5f6f 461 rest_of_pass_free_cfg, /* execute */
77fce4cd 462 NULL, /* sub */
463 NULL, /* next */
464 0, /* static_pass_number */
0b1615c1 465 TV_NONE, /* tv_id */
77fce4cd 466 0, /* properties_required */
467 0, /* properties_provided */
468 PROP_cfg, /* properties_destroyed */
469 0, /* todo_flags_start */
470 0, /* todo_flags_finish */
20099e35 471 }
77fce4cd 472};
473
a0fee14a 474/* Return RTX to emit after when we want to emit code on the entry of function. */
475rtx
476entry_of_function (void)
477{
a0c938f0 478 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
4d2e5d52 479 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
a0fee14a 480}
481
aa02fa19 482/* Emit INSN at the entry point of the function, ensuring that it is only
483 executed once per function. */
484void
485emit_insn_at_entry (rtx insn)
486{
487 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
488 edge e = ei_safe_edge (ei);
1db61145 489 gcc_assert (e->flags & EDGE_FALLTHRU);
aa02fa19 490
491 insert_insn_on_edge (insn, e);
492 commit_edge_insertions ();
493}
494
f41d4683 495/* Update BLOCK_FOR_INSN of insns between BEGIN and END
48e1416a 496 (or BARRIER if found) and notify df of the bb change.
f41d4683 497 The insn chain range is inclusive
498 (i.e. both BEGIN and END will be updated. */
b36d64df 499
f41d4683 500static void
501update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
b36d64df 502{
503 rtx insn;
504
f41d4683 505 end = NEXT_INSN (end);
506 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
a2bdd643 507 if (!BARRIER_P (insn))
508 df_insn_change_bb (insn, bb);
b36d64df 509}
f41d4683 510
511/* Update BLOCK_FOR_INSN of insns in BB to BB,
512 and notify df of the change. */
513
514void
515update_bb_for_insn (basic_block bb)
516{
517 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
518}
519
4a020a8c 520\f
521/* Like active_insn_p, except keep the return value clobber around
522 even after reload. */
523
524static bool
525flow_active_insn_p (const_rtx insn)
526{
527 if (active_insn_p (insn))
528 return true;
529
530 /* A clobber of the function return value exists for buggy
531 programs that fail to return a value. Its effect is to
532 keep the return value from being live across the entire
533 function. If we allow it to be skipped, we introduce the
534 possibility for register lifetime confusion. */
535 if (GET_CODE (PATTERN (insn)) == CLOBBER
536 && REG_P (XEXP (PATTERN (insn), 0))
537 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
538 return true;
539
540 return false;
541}
542
543/* Return true if the block has no effect and only forwards control flow to
544 its single destination. */
4a020a8c 545
546bool
bbdfcf34 547contains_no_active_insn_p (const_basic_block bb)
4a020a8c 548{
549 rtx insn;
550
551 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
552 || !single_succ_p (bb))
553 return false;
554
bbdfcf34 555 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
556 if (INSN_P (insn) && flow_active_insn_p (insn))
557 return false;
558
559 return (!INSN_P (insn)
560 || (JUMP_P (insn) && simplejump_p (insn))
561 || !flow_active_insn_p (insn));
562}
563
564/* Likewise, but protect loop latches, headers and preheaders. */
565/* FIXME: Make this a cfg hook. */
566
567bool
568forwarder_block_p (const_basic_block bb)
569{
570 if (!contains_no_active_insn_p (bb))
571 return false;
572
4a020a8c 573 /* Protect loop latches, headers and preheaders. */
574 if (current_loops)
575 {
576 basic_block dest;
577 if (bb->loop_father->header == bb)
578 return false;
579 dest = EDGE_SUCC (bb, 0)->dest;
580 if (dest->loop_father->header == dest)
581 return false;
582 }
583
bbdfcf34 584 return true;
4a020a8c 585}
586
587/* Return nonzero if we can reach target from src by falling through. */
588/* FIXME: Make this a cfg hook. */
589
590bool
591can_fallthru (basic_block src, basic_block target)
592{
593 rtx insn = BB_END (src);
594 rtx insn2;
595 edge e;
596 edge_iterator ei;
597
598 if (target == EXIT_BLOCK_PTR)
599 return true;
600 if (src->next_bb != target)
601 return 0;
602 FOR_EACH_EDGE (e, ei, src->succs)
603 if (e->dest == EXIT_BLOCK_PTR
604 && e->flags & EDGE_FALLTHRU)
605 return 0;
606
607 insn2 = BB_HEAD (target);
608 if (insn2 && !active_insn_p (insn2))
609 insn2 = next_active_insn (insn2);
610
611 /* ??? Later we may add code to move jump tables offline. */
612 return next_active_insn (insn) == insn2;
613}
614
615/* Return nonzero if we could reach target from src by falling through,
616 if the target was made adjacent. If we already have a fall-through
617 edge to the exit block, we can't do that. */
618static bool
619could_fall_through (basic_block src, basic_block target)
620{
621 edge e;
622 edge_iterator ei;
623
624 if (target == EXIT_BLOCK_PTR)
625 return true;
626 FOR_EACH_EDGE (e, ei, src->succs)
627 if (e->dest == EXIT_BLOCK_PTR
628 && e->flags & EDGE_FALLTHRU)
629 return 0;
630 return true;
631}
b36d64df 632\f
018f0595 633/* Return the NOTE_INSN_BASIC_BLOCK of BB. */
634rtx
635bb_note (basic_block bb)
636{
637 rtx note;
638
639 note = BB_HEAD (bb);
640 if (LABEL_P (note))
641 note = NEXT_INSN (note);
642
643 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
644 return note;
645}
646
3072d30e 647/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
648 note associated with the BLOCK. */
649
650static rtx
651first_insn_after_basic_block_note (basic_block block)
652{
653 rtx insn;
654
655 /* Get the first instruction in the block. */
656 insn = BB_HEAD (block);
657
658 if (insn == NULL_RTX)
659 return NULL_RTX;
660 if (LABEL_P (insn))
661 insn = NEXT_INSN (insn);
662 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
663
664 return NEXT_INSN (insn);
665}
666
5f5d4cd1 667/* Creates a new basic block just after basic block B by splitting
668 everything after specified instruction I. */
b36d64df 669
5f5d4cd1 670static basic_block
4c9e08a4 671rtl_split_block (basic_block bb, void *insnp)
b36d64df 672{
673 basic_block new_bb;
f780cc25 674 rtx insn = (rtx) insnp;
5f5d4cd1 675 edge e;
cd665a06 676 edge_iterator ei;
b36d64df 677
5f5d4cd1 678 if (!insn)
679 {
680 insn = first_insn_after_basic_block_note (bb);
681
682 if (insn)
9845d120 683 {
684 rtx next = insn;
685
686 insn = PREV_INSN (insn);
687
688 /* If the block contains only debug insns, insn would have
689 been NULL in a non-debug compilation, and then we'd end
690 up emitting a DELETED note. For -fcompare-debug
691 stability, emit the note too. */
692 if (insn != BB_END (bb)
693 && DEBUG_INSN_P (next)
694 && DEBUG_INSN_P (BB_END (bb)))
695 {
696 while (next != BB_END (bb) && DEBUG_INSN_P (next))
697 next = NEXT_INSN (next);
698
699 if (next == BB_END (bb))
700 emit_note_after (NOTE_INSN_DELETED, next);
701 }
702 }
5f5d4cd1 703 else
704 insn = get_last_insn ();
705 }
706
707 /* We probably should check type of the insn so that we do not create
708 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
709 bother. */
710 if (insn == BB_END (bb))
711 emit_note_after (NOTE_INSN_DELETED, insn);
b36d64df 712
713 /* Create the new basic block. */
5496dbfc 714 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
7562ed74 715 BB_COPY_PARTITION (new_bb, bb);
5496dbfc 716 BB_END (bb) = insn;
b36d64df 717
718 /* Redirect the outgoing edges. */
cd665a06 719 new_bb->succs = bb->succs;
720 bb->succs = NULL;
721 FOR_EACH_EDGE (e, ei, new_bb->succs)
b36d64df 722 e->src = new_bb;
723
3072d30e 724 /* The new block starts off being dirty. */
725 df_set_bb_dirty (bb);
5f5d4cd1 726 return new_bb;
c60fa3a7 727}
728
8ddad41d 729/* Return true if the single edge between blocks A and B is the only place
730 in RTL which holds some unique locus. */
731
732static bool
733unique_locus_on_edge_between_p (basic_block a, basic_block b)
734{
5169661d 735 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
8ddad41d 736 rtx insn, end;
737
8e7408e3 738 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
8ddad41d 739 return false;
740
741 /* First scan block A backward. */
742 insn = BB_END (a);
743 end = PREV_INSN (BB_HEAD (a));
5169661d 744 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
8ddad41d 745 insn = PREV_INSN (insn);
746
5169661d 747 if (insn != end && INSN_LOCATION (insn) == goto_locus)
8ddad41d 748 return false;
749
750 /* Then scan block B forward. */
751 insn = BB_HEAD (b);
752 if (insn)
753 {
754 end = NEXT_INSN (BB_END (b));
755 while (insn != end && !NONDEBUG_INSN_P (insn))
756 insn = NEXT_INSN (insn);
757
5169661d 758 if (insn != end && INSN_HAS_LOCATION (insn)
759 && INSN_LOCATION (insn) == goto_locus)
8ddad41d 760 return false;
761 }
762
763 return true;
764}
765
766/* If the single edge between blocks A and B is the only place in RTL which
767 holds some unique locus, emit a nop with that locus between the blocks. */
768
769static void
770emit_nop_for_unique_locus_between (basic_block a, basic_block b)
771{
772 if (!unique_locus_on_edge_between_p (a, b))
773 return;
774
775 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
5169661d 776 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
8ddad41d 777}
778
b36d64df 779/* Blocks A and B are to be merged into a single block A. The insns
c60fa3a7 780 are already contiguous. */
b36d64df 781
c60fa3a7 782static void
783rtl_merge_blocks (basic_block a, basic_block b)
b36d64df 784{
5496dbfc 785 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
b36d64df 786 rtx del_first = NULL_RTX, del_last = NULL_RTX;
9845d120 787 rtx b_debug_start = b_end, b_debug_end = b_end;
18b762f0 788 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
b36d64df 789 int b_empty = 0;
790
3072d30e 791 if (dump_file)
18b762f0 792 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
793 a->index);
3072d30e 794
9845d120 795 while (DEBUG_INSN_P (b_end))
796 b_end = PREV_INSN (b_debug_start = b_end);
797
b36d64df 798 /* If there was a CODE_LABEL beginning B, delete it. */
6d7dc5b9 799 if (LABEL_P (b_head))
b36d64df 800 {
801 /* Detect basic blocks with nothing but a label. This can happen
802 in particular at the end of a function. */
803 if (b_head == b_end)
804 b_empty = 1;
5cc577b6 805
b36d64df 806 del_first = del_last = b_head;
807 b_head = NEXT_INSN (b_head);
808 }
809
5cc577b6 810 /* Delete the basic block note and handle blocks containing just that
811 note. */
b36d64df 812 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
813 {
814 if (b_head == b_end)
815 b_empty = 1;
816 if (! del_last)
817 del_first = b_head;
5cc577b6 818
b36d64df 819 del_last = b_head;
820 b_head = NEXT_INSN (b_head);
821 }
822
823 /* If there was a jump out of A, delete it. */
6d7dc5b9 824 if (JUMP_P (a_end))
b36d64df 825 {
826 rtx prev;
827
828 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
6d7dc5b9 829 if (!NOTE_P (prev)
ad4583d9 830 || NOTE_INSN_BASIC_BLOCK_P (prev)
5496dbfc 831 || prev == BB_HEAD (a))
b36d64df 832 break;
833
834 del_first = a_end;
835
836#ifdef HAVE_cc0
837 /* If this was a conditional jump, we need to also delete
838 the insn that set cc0. */
839 if (only_sets_cc0_p (prev))
840 {
841 rtx tmp = prev;
5cc577b6 842
b36d64df 843 prev = prev_nonnote_insn (prev);
844 if (!prev)
5496dbfc 845 prev = BB_HEAD (a);
b36d64df 846 del_first = tmp;
847 }
848#endif
849
850 a_end = PREV_INSN (del_first);
851 }
6d7dc5b9 852 else if (BARRIER_P (NEXT_INSN (a_end)))
b36d64df 853 del_first = NEXT_INSN (a_end);
854
b36d64df 855 /* Delete everything marked above as well as crap that might be
856 hanging out between the two blocks. */
8ddad41d 857 BB_END (a) = a_end;
858 BB_HEAD (b) = b_empty ? NULL_RTX : b_head;
39257eb5 859 delete_insn_chain (del_first, del_last, true);
b36d64df 860
8ddad41d 861 /* When not optimizing CFG and the edge is the only place in RTL which holds
862 some unique locus, emit a nop with that locus in between. */
863 if (!optimize)
864 {
865 emit_nop_for_unique_locus_between (a, b);
866 a_end = BB_END (a);
867 }
868
b36d64df 869 /* Reassociate the insns of B with A. */
870 if (!b_empty)
871 {
9845d120 872 update_bb_for_insn_chain (a_end, b_debug_end, a);
5cc577b6 873
8ddad41d 874 BB_END (a) = b_debug_end;
875 BB_HEAD (b) = NULL_RTX;
9845d120 876 }
877 else if (b_end != b_debug_end)
878 {
879 /* Move any deleted labels and other notes between the end of A
880 and the debug insns that make up B after the debug insns,
881 bringing the debug insns into A while keeping the notes after
882 the end of A. */
883 if (NEXT_INSN (a_end) != b_debug_start)
884 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
885 b_debug_end);
886 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
8ddad41d 887 BB_END (a) = b_debug_end;
b36d64df 888 }
5cc577b6 889
3072d30e 890 df_bb_delete (b->index);
18b762f0 891
892 /* If B was a forwarder block, propagate the locus on the edge. */
81ad108c 893 if (forwarder_p
894 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
18b762f0 895 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
896
897 if (dump_file)
898 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
b36d64df 899}
c60fa3a7 900
3072d30e 901
c60fa3a7 902/* Return true when block A and B can be merged. */
5493cb9a 903
47aaf6e6 904static bool
905rtl_can_merge_blocks (basic_block a, basic_block b)
c60fa3a7 906{
4f18499c 907 /* If we are partitioning hot/cold basic blocks, we don't want to
908 mess up unconditional or indirect jumps that cross between hot
7562ed74 909 and cold sections.
910
1118aef7 911 Basic block partitioning may result in some jumps that appear to
a0c938f0 912 be optimizable (or blocks that appear to be mergeable), but which really
913 must be left untouched (they are required to make it safely across
914 partition boundaries). See the comments at the top of
1118aef7 915 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
7562ed74 916
1897b881 917 if (BB_PARTITION (a) != BB_PARTITION (b))
7562ed74 918 return false;
4f18499c 919
79f958cb 920 /* Protect the loop latches. */
921 if (current_loops && b->loop_father->latch == b)
922 return false;
923
c60fa3a7 924 /* There must be exactly one edge in between the blocks. */
ea091dfd 925 return (single_succ_p (a)
926 && single_succ (a) == b
927 && single_pred_p (b)
cd665a06 928 && a != b
c60fa3a7 929 /* Must be simple edge. */
ea091dfd 930 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
c60fa3a7 931 && a->next_bb == b
932 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
933 /* If the jump insn has side effects,
934 we can't kill the edge. */
6d7dc5b9 935 && (!JUMP_P (BB_END (a))
18f811c9 936 || (reload_completed
5496dbfc 937 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
c60fa3a7 938}
b36d64df 939\f
5cc577b6 940/* Return the label in the head of basic block BLOCK. Create one if it doesn't
941 exist. */
b36d64df 942
943rtx
4c9e08a4 944block_label (basic_block block)
b36d64df 945{
946 if (block == EXIT_BLOCK_PTR)
947 return NULL_RTX;
5cc577b6 948
6d7dc5b9 949 if (!LABEL_P (BB_HEAD (block)))
b36d64df 950 {
5496dbfc 951 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
b36d64df 952 }
5cc577b6 953
5496dbfc 954 return BB_HEAD (block);
b36d64df 955}
956
957/* Attempt to perform edge redirection by replacing possibly complex jump
5cc577b6 958 instruction by unconditional jump or removing jump completely. This can
959 apply only if all edges now point to the same block. The parameters and
960 return values are equivalent to redirect_edge_and_branch. */
b36d64df 961
4ee9c684 962edge
c60fa3a7 963try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
b36d64df 964{
965 basic_block src = e->src;
5496dbfc 966 rtx insn = BB_END (src), kill_from;
b19beda9 967 rtx set;
b36d64df 968 int fallthru = 0;
4f18499c 969
970 /* If we are partitioning hot/cold basic blocks, we don't want to
971 mess up unconditional or indirect jumps that cross between hot
1118aef7 972 and cold sections.
973
974 Basic block partitioning may result in some jumps that appear to
a0c938f0 975 be optimizable (or blocks that appear to be mergeable), but which really
976 must be left untouched (they are required to make it safely across
977 partition boundaries). See the comments at the top of
1118aef7 978 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
a0c938f0 979
1897b881 980 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
981 || BB_PARTITION (src) != BB_PARTITION (target))
53f8294f 982 return NULL;
4f18499c 983
34f05bfa 984 /* We can replace or remove a complex jump only when we have exactly
985 two edges. Also, if we have exactly one outgoing edge, we can
986 redirect that. */
987 if (EDGE_COUNT (src->succs) >= 3
988 /* Verify that all targets will be TARGET. Specifically, the
989 edge that is not E must also go to TARGET. */
990 || (EDGE_COUNT (src->succs) == 2
991 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
992 return NULL;
5cc577b6 993
34f05bfa 994 if (!onlyjump_p (insn))
4ee9c684 995 return NULL;
e5562ab8 996 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
4ee9c684 997 return NULL;
b36d64df 998
999 /* Avoid removing branch with side effects. */
1000 set = single_set (insn);
1001 if (!set || side_effects_p (set))
4ee9c684 1002 return NULL;
b36d64df 1003
1004 /* In case we zap a conditional jump, we'll need to kill
1005 the cc0 setter too. */
1006 kill_from = insn;
1007#ifdef HAVE_cc0
3d5a3e76 1008 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
1009 && only_sets_cc0_p (PREV_INSN (insn)))
b36d64df 1010 kill_from = PREV_INSN (insn);
1011#endif
1012
1013 /* See if we can create the fallthru edge. */
c60fa3a7 1014 if (in_cfglayout || can_fallthru (src, target))
b36d64df 1015 {
450d042a 1016 if (dump_file)
1017 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
b36d64df 1018 fallthru = 1;
1019
4a82352a 1020 /* Selectively unlink whole insn chain. */
c60fa3a7 1021 if (in_cfglayout)
1022 {
43e94e51 1023 rtx insn = BB_FOOTER (src);
c60fa3a7 1024
39257eb5 1025 delete_insn_chain (kill_from, BB_END (src), false);
c60fa3a7 1026
1027 /* Remove barriers but keep jumptables. */
1028 while (insn)
1029 {
6d7dc5b9 1030 if (BARRIER_P (insn))
c60fa3a7 1031 {
1032 if (PREV_INSN (insn))
1033 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1034 else
43e94e51 1035 BB_FOOTER (src) = NEXT_INSN (insn);
c60fa3a7 1036 if (NEXT_INSN (insn))
1037 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1038 }
6d7dc5b9 1039 if (LABEL_P (insn))
c60fa3a7 1040 break;
1041 insn = NEXT_INSN (insn);
1042 }
1043 }
1044 else
39257eb5 1045 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1046 false);
b36d64df 1047 }
5cc577b6 1048
b36d64df 1049 /* If this already is simplejump, redirect it. */
1050 else if (simplejump_p (insn))
1051 {
1052 if (e->dest == target)
4ee9c684 1053 return NULL;
450d042a 1054 if (dump_file)
1055 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
b3d6de89 1056 INSN_UID (insn), e->dest->index, target->index);
8963581a 1057 if (!redirect_jump (insn, block_label (target), 0))
1058 {
cc636d56 1059 gcc_assert (target == EXIT_BLOCK_PTR);
1060 return NULL;
8963581a 1061 }
b36d64df 1062 }
5cc577b6 1063
8963581a 1064 /* Cannot do anything for target exit block. */
1065 else if (target == EXIT_BLOCK_PTR)
4ee9c684 1066 return NULL;
8963581a 1067
b36d64df 1068 /* Or replace possibly complicated jump insn by simple jump insn. */
1069 else
1070 {
1071 rtx target_label = block_label (target);
669f9131 1072 rtx barrier, label, table;
b36d64df 1073
0891f67c 1074 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
5496dbfc 1075 JUMP_LABEL (BB_END (src)) = target_label;
b36d64df 1076 LABEL_NUSES (target_label)++;
450d042a 1077 if (dump_file)
1078 fprintf (dump_file, "Replacing insn %i by jump %i\n",
5496dbfc 1079 INSN_UID (insn), INSN_UID (BB_END (src)));
b36d64df 1080
1ed8ccdb 1081
201f6961 1082 delete_insn_chain (kill_from, insn, false);
1083
1ed8ccdb 1084 /* Recognize a tablejump that we are converting to a
1085 simple jump and remove its associated CODE_LABEL
1086 and ADDR_VEC or ADDR_DIFF_VEC. */
201f6961 1087 if (tablejump_p (insn, &label, &table))
1088 delete_insn_chain (label, table, false);
669f9131 1089
5496dbfc 1090 barrier = next_nonnote_insn (BB_END (src));
6d7dc5b9 1091 if (!barrier || !BARRIER_P (barrier))
5496dbfc 1092 emit_barrier_after (BB_END (src));
1824678b 1093 else
1094 {
5496dbfc 1095 if (barrier != NEXT_INSN (BB_END (src)))
1824678b 1096 {
1097 /* Move the jump before barrier so that the notes
1098 which originally were or were created before jump table are
1099 inside the basic block. */
5496dbfc 1100 rtx new_insn = BB_END (src);
1824678b 1101
f41d4683 1102 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1103 PREV_INSN (barrier), src);
1824678b 1104
1105 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1106 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1107
1108 NEXT_INSN (new_insn) = barrier;
1109 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1110
1111 PREV_INSN (new_insn) = PREV_INSN (barrier);
1112 PREV_INSN (barrier) = new_insn;
1113 }
1114 }
b36d64df 1115 }
1116
1117 /* Keep only one edge out and set proper flags. */
ea091dfd 1118 if (!single_succ_p (src))
cd665a06 1119 remove_edge (e);
ea091dfd 1120 gcc_assert (single_succ_p (src));
cd665a06 1121
ea091dfd 1122 e = single_succ_edge (src);
b36d64df 1123 if (fallthru)
1124 e->flags = EDGE_FALLTHRU;
1125 else
1126 e->flags = 0;
5cc577b6 1127
b36d64df 1128 e->probability = REG_BR_PROB_BASE;
1129 e->count = src->count;
1130
b36d64df 1131 if (e->dest != target)
1132 redirect_edge_succ (e, target);
4ee9c684 1133 return e;
b36d64df 1134}
1135
a8dd994c 1136/* Subroutine of redirect_branch_edge that tries to patch the jump
1137 instruction INSN so that it reaches block NEW. Do this
1138 only when it originally reached block OLD. Return true if this
1139 worked or the original target wasn't OLD, return false if redirection
1140 doesn't work. */
1141
1142static bool
1143patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
b36d64df 1144{
1145 rtx tmp;
b36d64df 1146 /* Recognize a tablejump and adjust all matching cases. */
b19beda9 1147 if (tablejump_p (insn, NULL, &tmp))
b36d64df 1148 {
1149 rtvec vec;
1150 int j;
a8dd994c 1151 rtx new_label = block_label (new_bb);
b36d64df 1152
a8dd994c 1153 if (new_bb == EXIT_BLOCK_PTR)
1154 return false;
b36d64df 1155 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1156 vec = XVEC (PATTERN (tmp), 0);
1157 else
1158 vec = XVEC (PATTERN (tmp), 1);
1159
1160 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1161 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1162 {
1163 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1164 --LABEL_NUSES (old_label);
1165 ++LABEL_NUSES (new_label);
1166 }
1167
2358393e 1168 /* Handle casesi dispatch insns. */
b36d64df 1169 if ((tmp = single_set (insn)) != NULL
1170 && SET_DEST (tmp) == pc_rtx
1171 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1172 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1173 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1174 {
514b43f8 1175 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
b36d64df 1176 new_label);
1177 --LABEL_NUSES (old_label);
1178 ++LABEL_NUSES (new_label);
1179 }
1180 }
78f55ca8 1181 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1182 {
1183 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1184 rtx new_label, note;
1185
1186 if (new_bb == EXIT_BLOCK_PTR)
1187 return false;
1188 new_label = block_label (new_bb);
1189
1190 for (i = 0; i < n; ++i)
1191 {
1192 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1193 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1194 if (XEXP (old_ref, 0) == old_label)
1195 {
1196 ASM_OPERANDS_LABEL (tmp, i)
1197 = gen_rtx_LABEL_REF (Pmode, new_label);
1198 --LABEL_NUSES (old_label);
1199 ++LABEL_NUSES (new_label);
1200 }
1201 }
1202
1203 if (JUMP_LABEL (insn) == old_label)
1204 {
1205 JUMP_LABEL (insn) = new_label;
1206 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1207 if (note)
1208 remove_note (insn, note);
1209 }
1210 else
1211 {
1212 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1213 if (note)
1214 remove_note (insn, note);
1215 if (JUMP_LABEL (insn) != new_label
1216 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1217 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1218 }
dcb172b4 1219 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1220 != NULL_RTX)
1221 XEXP (note, 0) = new_label;
78f55ca8 1222 }
b36d64df 1223 else
1224 {
1225 /* ?? We may play the games with moving the named labels from
1226 one basic block to the other in case only one computed_jump is
1227 available. */
5cc577b6 1228 if (computed_jump_p (insn)
1229 /* A return instruction can't be redirected. */
1230 || returnjump_p (insn))
a8dd994c 1231 return false;
8963581a 1232
a8dd994c 1233 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
8963581a 1234 {
a8dd994c 1235 /* If the insn doesn't go where we think, we're confused. */
1236 gcc_assert (JUMP_LABEL (insn) == old_label);
1237
1238 /* If the substitution doesn't succeed, die. This can happen
1239 if the back end emitted unrecognizable instructions or if
1240 target is exit block on some arches. */
1241 if (!redirect_jump (insn, block_label (new_bb), 0))
1242 {
1243 gcc_assert (new_bb == EXIT_BLOCK_PTR);
1244 return false;
1245 }
8963581a 1246 }
b36d64df 1247 }
a8dd994c 1248 return true;
1249}
1250
1251
1252/* Redirect edge representing branch of (un)conditional jump or tablejump,
1253 NULL on failure */
1254static edge
1255redirect_branch_edge (edge e, basic_block target)
1256{
1257 rtx old_label = BB_HEAD (e->dest);
1258 basic_block src = e->src;
1259 rtx insn = BB_END (src);
1260
1261 /* We can only redirect non-fallthru edges of jump insn. */
1262 if (e->flags & EDGE_FALLTHRU)
1263 return NULL;
1264 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1265 return NULL;
1266
1267 if (!currently_expanding_to_rtl)
1268 {
1269 if (!patch_jump_insn (insn, old_label, target))
1270 return NULL;
1271 }
1272 else
1273 /* When expanding this BB might actually contain multiple
1274 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1275 Redirect all of those that match our label. */
50b15e04 1276 FOR_BB_INSNS (src, insn)
a8dd994c 1277 if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1278 return NULL;
b36d64df 1279
450d042a 1280 if (dump_file)
1281 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
b3d6de89 1282 e->src->index, e->dest->index, target->index);
5cc577b6 1283
b36d64df 1284 if (e->dest != target)
4ee9c684 1285 e = redirect_edge_succ_nodup (e, target);
3072d30e 1286
4ee9c684 1287 return e;
c60fa3a7 1288}
1289
1290/* Attempt to change code to redirect edge E to TARGET. Don't do that on
1291 expense of adding new instructions or reordering basic blocks.
1292
1293 Function can be also called with edge destination equivalent to the TARGET.
1294 Then it should try the simplifications and do nothing if none is possible.
1295
4ee9c684 1296 Return edge representing the branch if transformation succeeded. Return NULL
1297 on failure.
1298 We still return NULL in case E already destinated TARGET and we didn't
1299 managed to simplify instruction stream. */
c60fa3a7 1300
4ee9c684 1301static edge
f82b06e0 1302rtl_redirect_edge_and_branch (edge e, basic_block target)
c60fa3a7 1303{
4ee9c684 1304 edge ret;
adee86a0 1305 basic_block src = e->src;
1306
c60fa3a7 1307 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4ee9c684 1308 return NULL;
c60fa3a7 1309
e5562ab8 1310 if (e->dest == target)
4ee9c684 1311 return e;
e5562ab8 1312
4ee9c684 1313 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
adee86a0 1314 {
3072d30e 1315 df_set_bb_dirty (src);
4ee9c684 1316 return ret;
adee86a0 1317 }
c60fa3a7 1318
4ee9c684 1319 ret = redirect_branch_edge (e, target);
1320 if (!ret)
1321 return NULL;
5cc577b6 1322
3072d30e 1323 df_set_bb_dirty (src);
4ee9c684 1324 return ret;
b36d64df 1325}
1326
20dd417a 1327/* Like force_nonfallthru below, but additionally performs redirection
9cb2517e 1328 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1329 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1330 simple_return_rtx, indicating which kind of returnjump to create.
1331 It should be NULL otherwise. */
b36d64df 1332
4115ac36 1333basic_block
9cb2517e 1334force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
b36d64df 1335{
498d173d 1336 basic_block jump_block, new_bb = NULL, src = e->src;
b36d64df 1337 rtx note;
1338 edge new_edge;
498d173d 1339 int abnormal_edge_flags = 0;
d7ff3ab6 1340 bool asm_goto_edge = false;
9c388755 1341 int loc;
b36d64df 1342
accd6c44 1343 /* In the case the last instruction is conditional jump to the next
1344 instruction, first redirect the jump itself and then continue
df07c3ae 1345 by creating a basic block afterwards to redirect fallthru edge. */
accd6c44 1346 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
5496dbfc 1347 && any_condjump_p (BB_END (e->src))
5496dbfc 1348 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
accd6c44 1349 {
1350 rtx note;
d9aa52be 1351 edge b = unchecked_make_edge (e->src, target, 0);
cc636d56 1352 bool redirected;
accd6c44 1353
cc636d56 1354 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1355 gcc_assert (redirected);
a0c938f0 1356
5496dbfc 1357 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
accd6c44 1358 if (note)
1359 {
1360 int prob = INTVAL (XEXP (note, 0));
1361
1362 b->probability = prob;
1363 b->count = e->count * prob / REG_BR_PROB_BASE;
1364 e->probability -= e->probability;
1365 e->count -= b->count;
1366 if (e->probability < 0)
1367 e->probability = 0;
1368 if (e->count < 0)
1369 e->count = 0;
1370 }
1371 }
1372
b36d64df 1373 if (e->flags & EDGE_ABNORMAL)
498d173d 1374 {
1375 /* Irritating special case - fallthru edge to the same block as abnormal
1376 edge.
1377 We can't redirect abnormal edge, but we still can split the fallthru
4c9e08a4 1378 one and create separate abnormal edge to original destination.
498d173d 1379 This allows bb-reorder to make such edge non-fallthru. */
cc636d56 1380 gcc_assert (e->dest == target);
4ecbba1a 1381 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1382 e->flags &= EDGE_FALLTHRU;
498d173d 1383 }
cc636d56 1384 else
76cc9616 1385 {
cc636d56 1386 gcc_assert (e->flags & EDGE_FALLTHRU);
1387 if (e->src == ENTRY_BLOCK_PTR)
1388 {
1389 /* We can't redirect the entry block. Create an empty block
cd665a06 1390 at the start of the function which we use to add the new
1391 jump. */
1392 edge tmp;
1393 edge_iterator ei;
1394 bool found = false;
a0c938f0 1395
cd665a06 1396 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
a0c938f0 1397
cc636d56 1398 /* Change the existing edge's source to be the new block, and add
1399 a new edge from the entry block to the new block. */
1400 e->src = bb;
cd665a06 1401 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1402 {
1403 if (tmp == e)
1404 {
f1f41a6c 1405 ENTRY_BLOCK_PTR->succs->unordered_remove (ei.index);
cd665a06 1406 found = true;
1407 break;
1408 }
1409 else
1410 ei_next (&ei);
1411 }
a0c938f0 1412
cd665a06 1413 gcc_assert (found);
a0c938f0 1414
f1f41a6c 1415 vec_safe_push (bb->succs, e);
cc636d56 1416 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1417 }
76cc9616 1418 }
1419
d7ff3ab6 1420 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
92f67f6e 1421 don't point to the target or fallthru label. */
d7ff3ab6 1422 if (JUMP_P (BB_END (e->src))
1423 && target != EXIT_BLOCK_PTR
d7ff3ab6 1424 && (e->flags & EDGE_FALLTHRU)
1425 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
b36d64df 1426 {
d7ff3ab6 1427 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
ca632883 1428 bool adjust_jump_target = false;
d7ff3ab6 1429
1430 for (i = 0; i < n; ++i)
92f67f6e 1431 {
1432 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
ca632883 1433 {
1434 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1435 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1436 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1437 adjust_jump_target = true;
1438 }
92f67f6e 1439 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
d7ff3ab6 1440 asm_goto_edge = true;
92f67f6e 1441 }
ca632883 1442 if (adjust_jump_target)
1443 {
1444 rtx insn = BB_END (e->src), note;
1445 rtx old_label = BB_HEAD (e->dest);
1446 rtx new_label = BB_HEAD (target);
1447
1448 if (JUMP_LABEL (insn) == old_label)
1449 {
1450 JUMP_LABEL (insn) = new_label;
1451 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1452 if (note)
1453 remove_note (insn, note);
1454 }
1455 else
1456 {
1457 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1458 if (note)
1459 remove_note (insn, note);
1460 if (JUMP_LABEL (insn) != new_label
1461 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1462 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1463 }
1464 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1465 != NULL_RTX)
1466 XEXP (note, 0) = new_label;
1467 }
d7ff3ab6 1468 }
1469
1470 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1471 {
1472 gcov_type count = e->count;
1473 int probability = e->probability;
b36d64df 1474 /* Create the new structures. */
3160014e 1475
7a1fee99 1476 /* If the old block ended with a tablejump, skip its table
1477 by searching forward from there. Otherwise start searching
1478 forward from the last instruction of the old block. */
5496dbfc 1479 if (!tablejump_p (BB_END (e->src), NULL, &note))
1480 note = BB_END (e->src);
3160014e 1481 note = NEXT_INSN (note);
1482
3160014e 1483 jump_block = create_basic_block (note, NULL, e->src);
d7ff3ab6 1484 jump_block->count = count;
b36d64df 1485 jump_block->frequency = EDGE_FREQUENCY (e);
b36d64df 1486
4f18499c 1487 /* Make sure new block ends up in correct hot/cold section. */
1488
7562ed74 1489 BB_COPY_PARTITION (jump_block, e->src);
065efcb1 1490 if (flag_reorder_blocks_and_partition
218e3e4e 1491 && targetm_common.have_named_sections
1897b881 1492 && JUMP_P (BB_END (jump_block))
1493 && !any_condjump_p (BB_END (jump_block))
1494 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
a1ddb869 1495 add_reg_note (BB_END (jump_block), REG_CROSSING_JUMP, NULL_RTX);
a0c938f0 1496
b36d64df 1497 /* Wire edge in. */
1498 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
d7ff3ab6 1499 new_edge->probability = probability;
1500 new_edge->count = count;
b36d64df 1501
1502 /* Redirect old edge. */
1503 redirect_edge_pred (e, jump_block);
1504 e->probability = REG_BR_PROB_BASE;
1505
d7ff3ab6 1506 /* If asm goto has any label refs to target's label,
1507 add also edge from asm goto bb to target. */
1508 if (asm_goto_edge)
1509 {
1510 new_edge->probability /= 2;
1511 new_edge->count /= 2;
1512 jump_block->count /= 2;
1513 jump_block->frequency /= 2;
1514 new_edge = make_edge (new_edge->src, target,
1515 e->flags & ~EDGE_FALLTHRU);
1516 new_edge->probability = probability - probability / 2;
1517 new_edge->count = count - count / 2;
1518 }
1519
b36d64df 1520 new_bb = jump_block;
1521 }
1522 else
1523 jump_block = e->src;
5cc577b6 1524
8e7408e3 1525 loc = e->goto_locus;
b36d64df 1526 e->flags &= ~EDGE_FALLTHRU;
1527 if (target == EXIT_BLOCK_PTR)
1528 {
9cb2517e 1529 if (jump_label == ret_rtx)
1530 {
38c95e2f 1531#ifdef HAVE_return
9cb2517e 1532 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
38c95e2f 1533#else
9cb2517e 1534 gcc_unreachable ();
38c95e2f 1535#endif
9cb2517e 1536 }
1537 else
1538 {
1539 gcc_assert (jump_label == simple_return_rtx);
1540#ifdef HAVE_simple_return
1541 emit_jump_insn_after_setloc (gen_simple_return (),
1542 BB_END (jump_block), loc);
1543#else
1544 gcc_unreachable ();
1545#endif
1546 }
31a53363 1547 set_return_jump_label (BB_END (jump_block));
b36d64df 1548 }
1549 else
1550 {
1551 rtx label = block_label (target);
9c388755 1552 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
5496dbfc 1553 JUMP_LABEL (BB_END (jump_block)) = label;
b36d64df 1554 LABEL_NUSES (label)++;
1555 }
5cc577b6 1556
5496dbfc 1557 emit_barrier_after (BB_END (jump_block));
b36d64df 1558 redirect_edge_succ_nodup (e, target);
1559
498d173d 1560 if (abnormal_edge_flags)
1561 make_edge (src, target, abnormal_edge_flags);
1562
48e1416a 1563 df_mark_solutions_dirty ();
b36d64df 1564 return new_bb;
1565}
1566
1567/* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1568 (and possibly create new basic block) to make edge non-fallthru.
1569 Return newly created BB or NULL if none. */
5cc577b6 1570
202bbc06 1571static basic_block
1572rtl_force_nonfallthru (edge e)
b36d64df 1573{
9cb2517e 1574 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
b36d64df 1575}
1576
1577/* Redirect edge even at the expense of creating new jump insn or
1578 basic block. Return new basic block if created, NULL otherwise.
a53ff4c1 1579 Conversion must be possible. */
b36d64df 1580
1026363d 1581static basic_block
4c9e08a4 1582rtl_redirect_edge_and_branch_force (edge e, basic_block target)
b36d64df 1583{
5cc577b6 1584 if (redirect_edge_and_branch (e, target)
1585 || e->dest == target)
b36d64df 1586 return NULL;
1587
1588 /* In case the edge redirection failed, try to force it to be non-fallthru
1589 and redirect newly created simplejump. */
3072d30e 1590 df_set_bb_dirty (e->src);
9cb2517e 1591 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
b36d64df 1592}
1593
1594/* The given edge should potentially be a fallthru edge. If that is in
1595 fact true, delete the jump and barriers that are in the way. */
1596
5f5d4cd1 1597static void
1598rtl_tidy_fallthru_edge (edge e)
b36d64df 1599{
1600 rtx q;
5f5d4cd1 1601 basic_block b = e->src, c = b->next_bb;
1602
b36d64df 1603 /* ??? In a late-running flow pass, other folks may have deleted basic
1604 blocks by nopping out blocks, leaving multiple BARRIERs between here
442e3cb9 1605 and the target label. They ought to be chastised and fixed.
b36d64df 1606
1607 We can also wind up with a sequence of undeletable labels between
1608 one block and the next.
1609
1610 So search through a sequence of barriers, labels, and notes for
1611 the head of block C and assert that we really do fall through. */
1612
5496dbfc 1613 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
ac25a327 1614 if (INSN_P (q))
1615 return;
b36d64df 1616
1617 /* Remove what will soon cease being the jump insn from the source block.
1618 If block B consisted only of this single jump, turn it into a deleted
1619 note. */
5496dbfc 1620 q = BB_END (b);
6d7dc5b9 1621 if (JUMP_P (q)
b36d64df 1622 && onlyjump_p (q)
1623 && (any_uncondjump_p (q)
ea091dfd 1624 || single_succ_p (b)))
b36d64df 1625 {
1626#ifdef HAVE_cc0
1627 /* If this was a conditional jump, we need to also delete
1628 the insn that set cc0. */
1629 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1630 q = PREV_INSN (q);
1631#endif
1632
1633 q = PREV_INSN (q);
b36d64df 1634 }
1635
1636 /* Selectively unlink the sequence. */
5496dbfc 1637 if (q != PREV_INSN (BB_HEAD (c)))
39257eb5 1638 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
b36d64df 1639
1640 e->flags |= EDGE_FALLTHRU;
1641}
b36d64df 1642\f
5f5d4cd1 1643/* Should move basic block BB after basic block AFTER. NIY. */
1644
1645static bool
1646rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1647 basic_block after ATTRIBUTE_UNUSED)
1648{
1649 return false;
1650}
1651
b36d64df 1652/* Split a (typically critical) edge. Return the new block.
a53ff4c1 1653 The edge must not be abnormal.
b36d64df 1654
1655 ??? The code generally expects to be called on critical edges.
1656 The case of a block ending in an unconditional jump to a
1657 block with multiple predecessors is not handled optimally. */
1658
9140e457 1659static basic_block
4c9e08a4 1660rtl_split_edge (edge edge_in)
b36d64df 1661{
1662 basic_block bb;
b36d64df 1663 rtx before;
1664
1665 /* Abnormal edges cannot be split. */
cc636d56 1666 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
b36d64df 1667
1668 /* We are going to place the new block in front of edge destination.
4a82352a 1669 Avoid existence of fallthru predecessors. */
b36d64df 1670 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1671 {
7f58c05e 1672 edge e = find_fallthru_edge (edge_in->dest->preds);
b36d64df 1673
1674 if (e)
1675 force_nonfallthru (e);
1676 }
1677
588aadb7 1678 /* Create the basic block note. */
1679 if (edge_in->dest != EXIT_BLOCK_PTR)
5496dbfc 1680 before = BB_HEAD (edge_in->dest);
b36d64df 1681 else
1682 before = NULL_RTX;
1683
9bb8a4af 1684 /* If this is a fall through edge to the exit block, the blocks might be
0a55d497 1685 not adjacent, and the right place is after the source. */
1686 if ((edge_in->flags & EDGE_FALLTHRU) && edge_in->dest == EXIT_BLOCK_PTR)
9bb8a4af 1687 {
1688 before = NEXT_INSN (BB_END (edge_in->src));
9bb8a4af 1689 bb = create_basic_block (before, NULL, edge_in->src);
7562ed74 1690 BB_COPY_PARTITION (bb, edge_in->src);
9bb8a4af 1691 }
1692 else
065efcb1 1693 {
1694 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
7562ed74 1695 /* ??? Why not edge_in->dest->prev_bb here? */
1696 BB_COPY_PARTITION (bb, edge_in->dest);
065efcb1 1697 }
b36d64df 1698
805e22b2 1699 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
b36d64df 1700
917bbcab 1701 /* For non-fallthru edges, we must adjust the predecessor's
b36d64df 1702 jump instruction to target our new block. */
1703 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1704 {
cc636d56 1705 edge redirected = redirect_edge_and_branch (edge_in, bb);
1706 gcc_assert (redirected);
b36d64df 1707 }
1708 else
dcb172b4 1709 {
1710 if (edge_in->src != ENTRY_BLOCK_PTR)
1711 {
1712 /* For asm goto even splitting of fallthru edge might
1713 need insn patching, as other labels might point to the
1714 old label. */
1715 rtx last = BB_END (edge_in->src);
1716 if (last
1717 && JUMP_P (last)
1718 && edge_in->dest != EXIT_BLOCK_PTR
1719 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1720 && patch_jump_insn (last, before, bb))
1721 df_set_bb_dirty (edge_in->src);
1722 }
1723 redirect_edge_succ (edge_in, bb);
1724 }
b36d64df 1725
1726 return bb;
1727}
1728
1729/* Queue instructions for insertion on an edge between two basic blocks.
1730 The new instructions and basic blocks (if any) will not appear in the
1731 CFG until commit_edge_insertions is called. */
1732
1733void
4c9e08a4 1734insert_insn_on_edge (rtx pattern, edge e)
b36d64df 1735{
1736 /* We cannot insert instructions on an abnormal critical edge.
1737 It will be easier to find the culprit if we die now. */
cc636d56 1738 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
b36d64df 1739
4ee9c684 1740 if (e->insns.r == NULL_RTX)
b36d64df 1741 start_sequence ();
1742 else
4ee9c684 1743 push_to_sequence (e->insns.r);
b36d64df 1744
1745 emit_insn (pattern);
1746
4ee9c684 1747 e->insns.r = get_insns ();
b36d64df 1748 end_sequence ();
1749}
1750
1751/* Update the CFG for the instructions queued on edge E. */
1752
a8dd994c 1753void
ca462ec0 1754commit_one_edge_insertion (edge e)
b36d64df 1755{
1756 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
53105d88 1757 basic_block bb;
b36d64df 1758
1759 /* Pull the insns off the edge now since the edge might go away. */
4ee9c684 1760 insns = e->insns.r;
1761 e->insns.r = NULL_RTX;
b36d64df 1762
53105d88 1763 /* Figure out where to put these insns. If the destination has
1764 one predecessor, insert there. Except for the exit block. */
1765 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
b36d64df 1766 {
53105d88 1767 bb = e->dest;
1768
1769 /* Get the location correct wrt a code label, and "nice" wrt
1770 a basic block note, and before everything else. */
1771 tmp = BB_HEAD (bb);
1772 if (LABEL_P (tmp))
1773 tmp = NEXT_INSN (tmp);
1774 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1775 tmp = NEXT_INSN (tmp);
1776 if (tmp == BB_HEAD (bb))
1777 before = tmp;
1778 else if (tmp)
1779 after = PREV_INSN (tmp);
1780 else
1781 after = get_last_insn ();
1782 }
fb20d6fa 1783
53105d88 1784 /* If the source has one successor and the edge is not abnormal,
1785 insert there. Except for the entry block. */
1786 else if ((e->flags & EDGE_ABNORMAL) == 0
1787 && single_succ_p (e->src)
1788 && e->src != ENTRY_BLOCK_PTR)
1789 {
1790 bb = e->src;
fb20d6fa 1791
53105d88 1792 /* It is possible to have a non-simple jump here. Consider a target
1793 where some forms of unconditional jumps clobber a register. This
1794 happens on the fr30 for example.
b36d64df 1795
53105d88 1796 We know this block has a single successor, so we can just emit
1797 the queued insns before the jump. */
1798 if (JUMP_P (BB_END (bb)))
1799 before = BB_END (bb);
fb20d6fa 1800 else
1801 {
53105d88 1802 /* We'd better be fallthru, or we've lost track of what's what. */
1803 gcc_assert (e->flags & EDGE_FALLTHRU);
4f18499c 1804
53105d88 1805 after = BB_END (bb);
b36d64df 1806 }
1807 }
1808
53105d88 1809 /* Otherwise we must split the edge. */
1810 else
1811 {
1812 bb = split_edge (e);
1813 after = BB_END (bb);
b36d64df 1814
53105d88 1815 if (flag_reorder_blocks_and_partition
218e3e4e 1816 && targetm_common.have_named_sections
53105d88 1817 && e->src != ENTRY_BLOCK_PTR
1818 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1819 && !(e->flags & EDGE_CROSSING)
1820 && JUMP_P (after)
1821 && !any_condjump_p (after)
1822 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1823 add_reg_note (after, REG_CROSSING_JUMP, NULL_RTX);
1824 }
1825
1826 /* Now that we've found the spot, do the insertion. */
b36d64df 1827 if (before)
1828 {
3072d30e 1829 emit_insn_before_noloc (insns, before, bb);
b36d64df 1830 last = prev_nonnote_insn (before);
1831 }
1832 else
3072d30e 1833 last = emit_insn_after_noloc (insns, after, bb);
b36d64df 1834
1835 if (returnjump_p (last))
1836 {
1837 /* ??? Remove all outgoing edges from BB and add one for EXIT.
a0c938f0 1838 This is not currently a problem because this only happens
1839 for the (single) epilogue, which already has a fallthru edge
1840 to EXIT. */
b36d64df 1841
ea091dfd 1842 e = single_succ_edge (bb);
cc636d56 1843 gcc_assert (e->dest == EXIT_BLOCK_PTR
ea091dfd 1844 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
b36d64df 1845
5cc577b6 1846 e->flags &= ~EDGE_FALLTHRU;
b36d64df 1847 emit_barrier_after (last);
b3d6de89 1848
b36d64df 1849 if (before)
1850 delete_insn (before);
1851 }
cc636d56 1852 else
1853 gcc_assert (!JUMP_P (last));
b36d64df 1854}
1855
1856/* Update the CFG for all queued instructions. */
1857
1858void
4c9e08a4 1859commit_edge_insertions (void)
b36d64df 1860{
b36d64df 1861 basic_block bb;
1862
1863#ifdef ENABLE_CHECKING
1864 verify_flow_info ();
1865#endif
1866
4c26117a 1867 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
b36d64df 1868 {
cd665a06 1869 edge e;
1870 edge_iterator ei;
b36d64df 1871
cd665a06 1872 FOR_EACH_EDGE (e, ei, bb->succs)
1873 if (e->insns.r)
202bbc06 1874 commit_one_edge_insertion (e);
fb20d6fa 1875 }
b36d64df 1876}
1877\f
3072d30e 1878
5f5d4cd1 1879/* Print out RTL-specific basic block information (live information
5147ec07 1880 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
1881 documented in dumpfile.h. */
b36d64df 1882
028f8cc7 1883static void
5147ec07 1884rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
b36d64df 1885{
1886 rtx insn;
1887 rtx last;
5f5d4cd1 1888 char *s_indent;
b36d64df 1889
f780cc25 1890 s_indent = (char *) alloca ((size_t) indent + 1);
4acc30e5 1891 memset (s_indent, ' ', (size_t) indent);
5f5d4cd1 1892 s_indent[indent] = '\0';
48e1416a 1893
5147ec07 1894 if (df && (flags & TDF_DETAILS))
3072d30e 1895 {
1896 df_dump_top (bb, outf);
1897 putc ('\n', outf);
1898 }
b36d64df 1899
4c2112a3 1900 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
1901 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1902 insn = NEXT_INSN (insn))
5147ec07 1903 {
ea9538fb 1904 if (flags & TDF_DETAILS)
1905 df_dump_insn_top (insn, outf);
5147ec07 1906 if (! (flags & TDF_SLIM))
1907 print_rtl_single (outf, insn);
1908 else
1909 dump_insn_slim (outf, insn);
ea9538fb 1910 if (flags & TDF_DETAILS)
1911 df_dump_insn_bottom (insn, outf);
5147ec07 1912 }
1913
1914 if (df && (flags & TDF_DETAILS))
3072d30e 1915 {
1916 df_dump_bottom (bb, outf);
1917 putc ('\n', outf);
1918 }
1919
b36d64df 1920}
1921\f
bec2cf98 1922/* Like dump_function_to_file, but for RTL. Print out dataflow information
1923 for the start of each basic block. FLAGS are the TDF_* masks documented
1924 in dumpfile.h. */
b36d64df 1925
1926void
5147ec07 1927print_rtl_with_bb (FILE *outf, const_rtx rtx_first, int flags)
b36d64df 1928{
1f1872fd 1929 const_rtx tmp_rtx;
b36d64df 1930 if (rtx_first == 0)
1931 fprintf (outf, "(nil)\n");
1932 else
1933 {
b36d64df 1934 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1935 int max_uid = get_max_uid ();
4c36ffe6 1936 basic_block *start = XCNEWVEC (basic_block, max_uid);
1937 basic_block *end = XCNEWVEC (basic_block, max_uid);
1938 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
4c26117a 1939 basic_block bb;
1940
bec2cf98 1941 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
1942 insns, but the CFG is not maintained so the basic block info
1943 is not reliable. Therefore it's omitted from the dumps. */
1944 if (! (cfun->curr_properties & PROP_cfg))
1945 flags &= ~TDF_BLOCKS;
1946
3072d30e 1947 if (df)
1948 df_dump_start (outf);
1949
bec2cf98 1950 if (flags & TDF_BLOCKS)
b36d64df 1951 {
bec2cf98 1952 FOR_EACH_BB_REVERSE (bb)
b36d64df 1953 {
bec2cf98 1954 rtx x;
1955
1956 start[INSN_UID (BB_HEAD (bb))] = bb;
1957 end[INSN_UID (BB_END (bb))] = bb;
1958 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1959 {
1960 enum bb_state state = IN_MULTIPLE_BB;
5cc577b6 1961
bec2cf98 1962 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1963 state = IN_ONE_BB;
1964 in_bb_p[INSN_UID (x)] = state;
b36d64df 1965
bec2cf98 1966 if (x == BB_END (bb))
1967 break;
1968 }
b36d64df 1969 }
1970 }
1971
1972 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1973 {
bec2cf98 1974 if (flags & TDF_BLOCKS)
1975 {
1976 bb = start[INSN_UID (tmp_rtx)];
1977 if (bb != NULL)
1978 {
1979 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
1980 if (df && (flags & TDF_DETAILS))
1981 df_dump_top (bb, outf);
1982 }
b36d64df 1983
bec2cf98 1984 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1985 && !NOTE_P (tmp_rtx)
1986 && !BARRIER_P (tmp_rtx))
1987 fprintf (outf, ";; Insn is not within a basic block\n");
1988 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1989 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1990 }
b36d64df 1991
ea9538fb 1992 if (flags & TDF_DETAILS)
1993 df_dump_insn_top (tmp_rtx, outf);
5147ec07 1994 if (! (flags & TDF_SLIM))
1995 print_rtl_single (outf, tmp_rtx);
1996 else
1997 dump_insn_slim (outf, tmp_rtx);
ea9538fb 1998 if (flags & TDF_DETAILS)
1999 df_dump_insn_bottom (tmp_rtx, outf);
b36d64df 2000
bec2cf98 2001 if (flags & TDF_BLOCKS)
2002 {
2003 bb = end[INSN_UID (tmp_rtx)];
2004 if (bb != NULL)
2005 {
2006 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
2007 if (df && (flags & TDF_DETAILS))
2008 df_dump_bottom (bb, outf);
a30975d4 2009 putc ('\n', outf);
bec2cf98 2010 }
2011 }
b36d64df 2012 }
2013
2014 free (start);
2015 free (end);
2016 free (in_bb_p);
2017 }
b36d64df 2018}
2019\f
4a020a8c 2020/* Update the branch probability of BB if a REG_BR_PROB is present. */
2021
f884e43f 2022void
4c9e08a4 2023update_br_prob_note (basic_block bb)
f884e43f 2024{
2025 rtx note;
6d7dc5b9 2026 if (!JUMP_P (BB_END (bb)))
f884e43f 2027 return;
5496dbfc 2028 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
f884e43f 2029 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
2030 return;
2031 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
2032}
5d65a39c 2033
2034/* Get the last insn associated with block BB (that includes barriers and
2035 tablejumps after BB). */
2036rtx
2037get_last_bb_insn (basic_block bb)
2038{
2039 rtx tmp;
2040 rtx end = BB_END (bb);
2041
2042 /* Include any jump table following the basic block. */
2043 if (tablejump_p (end, NULL, &tmp))
2044 end = tmp;
2045
2046 /* Include any barriers that may follow the basic block. */
c4d13c5c 2047 tmp = next_nonnote_insn_bb (end);
5d65a39c 2048 while (tmp && BARRIER_P (tmp))
2049 {
2050 end = tmp;
c4d13c5c 2051 tmp = next_nonnote_insn_bb (end);
5d65a39c 2052 }
2053
2054 return end;
2055}
f884e43f 2056\f
028f8cc7 2057/* Verify the CFG and RTL consistency common for both underlying RTL and
2058 cfglayout RTL.
b36d64df 2059
2060 Currently it does following checks:
2061
b36d64df 2062 - overlapping of basic blocks
fcde5c9e 2063 - insns with wrong BLOCK_FOR_INSN pointers
b36d64df 2064 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
dd5b4b36 2065 - tails of basic blocks (ensure that boundary is necessary)
b36d64df 2066 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2067 and NOTE_INSN_BASIC_BLOCK
4f18499c 2068 - verify that no fall_thru edge crosses hot/cold partition boundaries
fcde5c9e 2069 - verify that there are no pending RTL branch predictions
b36d64df 2070
2071 In future it can be extended check a lot of other stuff as well
2072 (reachability of basic blocks, life information, etc. etc.). */
5f5d4cd1 2073
028f8cc7 2074static int
4c9e08a4 2075rtl_verify_flow_info_1 (void)
b36d64df 2076{
b36d64df 2077 rtx x;
028f8cc7 2078 int err = 0;
d207f783 2079 basic_block bb;
b36d64df 2080
fcde5c9e 2081 /* Check the general integrity of the basic blocks. */
4c26117a 2082 FOR_EACH_BB_REVERSE (bb)
b36d64df 2083 {
fcde5c9e 2084 rtx insn;
5cc577b6 2085
e0dde8f8 2086 if (!(bb->flags & BB_RTL))
2087 {
2088 error ("BB_RTL flag not set for block %d", bb->index);
2089 err = 1;
2090 }
2091
fcde5c9e 2092 FOR_BB_INSNS (bb, insn)
e734eb61 2093 if (BLOCK_FOR_INSN (insn) != bb)
fcde5c9e 2094 {
2095 error ("insn %d basic block pointer is %d, should be %d",
2096 INSN_UID (insn),
2097 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2098 bb->index);
2099 err = 1;
2100 }
39257eb5 2101
43e94e51 2102 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
39257eb5 2103 if (!BARRIER_P (insn)
2104 && BLOCK_FOR_INSN (insn) != NULL)
2105 {
2106 error ("insn %d in header of bb %d has non-NULL basic block",
2107 INSN_UID (insn), bb->index);
2108 err = 1;
2109 }
43e94e51 2110 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
39257eb5 2111 if (!BARRIER_P (insn)
2112 && BLOCK_FOR_INSN (insn) != NULL)
2113 {
2114 error ("insn %d in footer of bb %d has non-NULL basic block",
2115 INSN_UID (insn), bb->index);
2116 err = 1;
2117 }
b36d64df 2118 }
2119
2120 /* Now check the basic blocks (boundaries etc.) */
4c26117a 2121 FOR_EACH_BB_REVERSE (bb)
b36d64df 2122 {
103dfb10 2123 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2124 int n_eh = 0, n_abnormal = 0;
e6261ab0 2125 edge e, fallthru = NULL;
a2e42321 2126 rtx note;
cd665a06 2127 edge_iterator ei;
b36d64df 2128
e6d1500f 2129 if (JUMP_P (BB_END (bb))
5496dbfc 2130 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
cd665a06 2131 && EDGE_COUNT (bb->succs) >= 2
5496dbfc 2132 && any_condjump_p (BB_END (bb)))
a2e42321 2133 {
d8c70625 2134 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
2135 && profile_status != PROFILE_ABSENT)
a2e42321 2136 {
d9f38790 2137 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
a2e42321 2138 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
2139 err = 1;
2140 }
2141 }
cd665a06 2142 FOR_EACH_EDGE (e, ei, bb->succs)
b36d64df 2143 {
f59cbcbf 2144 bool is_crossing;
2145
b36d64df 2146 if (e->flags & EDGE_FALLTHRU)
f59cbcbf 2147 n_fallthru++, fallthru = e;
2148
2149 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2150 && e->src != ENTRY_BLOCK_PTR
2151 && e->dest != EXIT_BLOCK_PTR);
2152 if (e->flags & EDGE_CROSSING)
4f18499c 2153 {
f59cbcbf 2154 if (!is_crossing)
2155 {
2156 error ("EDGE_CROSSING incorrectly set across same section");
2157 err = 1;
2158 }
2159 if (e->flags & EDGE_FALLTHRU)
2160 {
103dfb10 2161 error ("fallthru edge crosses section boundary in bb %i",
4f18499c 2162 e->src->index);
2163 err = 1;
2164 }
f59cbcbf 2165 if (e->flags & EDGE_EH)
2166 {
103dfb10 2167 error ("EH edge crosses section boundary in bb %i",
f59cbcbf 2168 e->src->index);
2169 err = 1;
2170 }
2171 }
2172 else if (is_crossing)
2173 {
2174 error ("EDGE_CROSSING missing across section boundary");
2175 err = 1;
4f18499c 2176 }
fb20d6fa 2177
7f42fe24 2178 if ((e->flags & ~(EDGE_DFS_BACK
2179 | EDGE_CAN_FALLTHRU
2180 | EDGE_IRREDUCIBLE_LOOP
f50b5bd0 2181 | EDGE_LOOP_EXIT
acc48fa6 2182 | EDGE_CROSSING
2183 | EDGE_PRESERVE)) == 0)
fb20d6fa 2184 n_branch++;
2185
2186 if (e->flags & EDGE_ABNORMAL_CALL)
103dfb10 2187 n_abnormal_call++;
2188
2189 if (e->flags & EDGE_SIBCALL)
2190 n_sibcall++;
fb20d6fa 2191
2192 if (e->flags & EDGE_EH)
2193 n_eh++;
103dfb10 2194
2195 if (e->flags & EDGE_ABNORMAL)
fb20d6fa 2196 n_abnormal++;
b36d64df 2197 }
5cc577b6 2198
e38def9c 2199 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
fb20d6fa 2200 {
103dfb10 2201 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
fb20d6fa 2202 err = 1;
2203 }
e38def9c 2204 if (n_eh > 1)
2205 {
103dfb10 2206 error ("too many exception handling edges in bb %i", bb->index);
e38def9c 2207 err = 1;
2208 }
fb20d6fa 2209 if (n_branch
6d7dc5b9 2210 && (!JUMP_P (BB_END (bb))
5496dbfc 2211 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2212 || any_condjump_p (BB_END (bb))))))
fb20d6fa 2213 {
0a81f5a0 2214 error ("too many outgoing branch edges from bb %i", bb->index);
fb20d6fa 2215 err = 1;
2216 }
5496dbfc 2217 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
fb20d6fa 2218 {
103dfb10 2219 error ("fallthru edge after unconditional jump in bb %i", bb->index);
fb20d6fa 2220 err = 1;
2221 }
5496dbfc 2222 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
fb20d6fa 2223 {
103dfb10 2224 error ("wrong number of branch edges after unconditional jump"
2225 " in bb %i", bb->index);
fb20d6fa 2226 err = 1;
2227 }
5496dbfc 2228 if (n_branch != 1 && any_condjump_p (BB_END (bb))
8345ac1a 2229 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
fb20d6fa 2230 {
103dfb10 2231 error ("wrong amount of branch edges after conditional jump"
2232 " in bb %i", bb->index);
2233 err = 1;
2234 }
2235 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2236 {
2237 error ("abnormal call edges for non-call insn in bb %i", bb->index);
fb20d6fa 2238 err = 1;
2239 }
103dfb10 2240 if (n_sibcall && !CALL_P (BB_END (bb)))
fb20d6fa 2241 {
103dfb10 2242 error ("sibcall edges for non-call insn in bb %i", bb->index);
fb20d6fa 2243 err = 1;
2244 }
103dfb10 2245 if (n_abnormal > n_eh
2246 && !(CALL_P (BB_END (bb))
2247 && n_abnormal == n_abnormal_call + n_sibcall)
6d7dc5b9 2248 && (!JUMP_P (BB_END (bb))
5496dbfc 2249 || any_condjump_p (BB_END (bb))
2250 || any_uncondjump_p (BB_END (bb))))
fb20d6fa 2251 {
0a81f5a0 2252 error ("abnormal edges for no purpose in bb %i", bb->index);
fb20d6fa 2253 err = 1;
2254 }
db34a109 2255
5496dbfc 2256 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
fdd27295 2257 /* We may have a barrier inside a basic block before dead code
f5310810 2258 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
2259 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
5cc577b6 2260 {
2261 debug_rtx (x);
2262 if (! BLOCK_FOR_INSN (x))
2263 error
2264 ("insn %d inside basic block %d but block_for_insn is NULL",
b3d6de89 2265 INSN_UID (x), bb->index);
5cc577b6 2266 else
2267 error
2268 ("insn %d inside basic block %d but block_for_insn is %i",
b3d6de89 2269 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
5cc577b6 2270
2271 err = 1;
2272 }
b36d64df 2273
2274 /* OK pointers are correct. Now check the header of basic
a0c938f0 2275 block. It ought to contain optional CODE_LABEL followed
b36d64df 2276 by NOTE_BASIC_BLOCK. */
5496dbfc 2277 x = BB_HEAD (bb);
6d7dc5b9 2278 if (LABEL_P (x))
b36d64df 2279 {
5496dbfc 2280 if (BB_END (bb) == x)
b36d64df 2281 {
2282 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
b3d6de89 2283 bb->index);
b36d64df 2284 err = 1;
2285 }
5cc577b6 2286
b36d64df 2287 x = NEXT_INSN (x);
2288 }
5cc577b6 2289
b36d64df 2290 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2291 {
2292 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
b3d6de89 2293 bb->index);
b36d64df 2294 err = 1;
2295 }
2296
5496dbfc 2297 if (BB_END (bb) == x)
f3e2a363 2298 /* Do checks for empty blocks here. */
5cc577b6 2299 ;
b36d64df 2300 else
5cc577b6 2301 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2302 {
2303 if (NOTE_INSN_BASIC_BLOCK_P (x))
2304 {
2305 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
b3d6de89 2306 INSN_UID (x), bb->index);
5cc577b6 2307 err = 1;
2308 }
2309
5496dbfc 2310 if (x == BB_END (bb))
5cc577b6 2311 break;
b36d64df 2312
aaf52ffb 2313 if (control_flow_insn_p (x))
5cc577b6 2314 {
b3d6de89 2315 error ("in basic block %d:", bb->index);
5cc577b6 2316 fatal_insn ("flow control insn inside a basic block", x);
2317 }
2318 }
b36d64df 2319 }
2320
028f8cc7 2321 /* Clean up. */
028f8cc7 2322 return err;
2323}
5cc577b6 2324
028f8cc7 2325/* Verify the CFG and RTL consistency common for both underlying RTL and
2326 cfglayout RTL.
5cc577b6 2327
028f8cc7 2328 Currently it does following checks:
2329 - all checks of rtl_verify_flow_info_1
fcde5c9e 2330 - test head/end pointers
028f8cc7 2331 - check that all insns are in the basic blocks
2332 (except the switch handling code, barriers and notes)
2333 - check that all returns are followed by barriers
2334 - check that all fallthru edge points to the adjacent blocks. */
fcde5c9e 2335
028f8cc7 2336static int
4c9e08a4 2337rtl_verify_flow_info (void)
028f8cc7 2338{
2339 basic_block bb;
2340 int err = rtl_verify_flow_info_1 ();
2341 rtx x;
fcde5c9e 2342 rtx last_head = get_last_insn ();
2343 basic_block *bb_info;
028f8cc7 2344 int num_bb_notes;
2345 const rtx rtx_first = get_insns ();
2346 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
fcde5c9e 2347 const int max_uid = get_max_uid ();
2348
2349 bb_info = XCNEWVEC (basic_block, max_uid);
b36d64df 2350
028f8cc7 2351 FOR_EACH_BB_REVERSE (bb)
2352 {
2353 edge e;
fcde5c9e 2354 rtx head = BB_HEAD (bb);
2355 rtx end = BB_END (bb);
cd665a06 2356
fcde5c9e 2357 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
39257eb5 2358 {
2359 /* Verify the end of the basic block is in the INSN chain. */
2360 if (x == end)
2361 break;
2362
2363 /* And that the code outside of basic blocks has NULL bb field. */
2364 if (!BARRIER_P (x)
2365 && BLOCK_FOR_INSN (x) != NULL)
2366 {
2367 error ("insn %d outside of basic blocks has non-NULL bb field",
2368 INSN_UID (x));
2369 err = 1;
2370 }
2371 }
fcde5c9e 2372
2373 if (!x)
2374 {
2375 error ("end insn %d for block %d not found in the insn stream",
2376 INSN_UID (end), bb->index);
2377 err = 1;
2378 }
2379
2380 /* Work backwards from the end to the head of the basic block
2381 to verify the head is in the RTL chain. */
2382 for (; x != NULL_RTX; x = PREV_INSN (x))
d5043f32 2383 {
fcde5c9e 2384 /* While walking over the insn chain, verify insns appear
2385 in only one basic block. */
2386 if (bb_info[INSN_UID (x)] != NULL)
2387 {
2388 error ("insn %d is in multiple basic blocks (%d and %d)",
2389 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2390 err = 1;
2391 }
2392
2393 bb_info[INSN_UID (x)] = bb;
2394
2395 if (x == head)
2396 break;
2397 }
2398 if (!x)
2399 {
2400 error ("head insn %d for block %d not found in the insn stream",
2401 INSN_UID (head), bb->index);
d5043f32 2402 err = 1;
2403 }
2404
39257eb5 2405 last_head = PREV_INSN (x);
fcde5c9e 2406
7f58c05e 2407 e = find_fallthru_edge (bb->succs);
028f8cc7 2408 if (!e)
2409 {
2410 rtx insn;
2411
2412 /* Ensure existence of barrier in BB with no fallthru edges. */
d2b48f0c 2413 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2414 {
2415 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
028f8cc7 2416 {
2417 error ("missing barrier after block %i", bb->index);
2418 err = 1;
2419 break;
2420 }
d2b48f0c 2421 if (BARRIER_P (insn))
2422 break;
2423 }
028f8cc7 2424 }
2425 else if (e->src != ENTRY_BLOCK_PTR
2426 && e->dest != EXIT_BLOCK_PTR)
a0c938f0 2427 {
028f8cc7 2428 rtx insn;
2429
2430 if (e->src->next_bb != e->dest)
2431 {
2432 error
2433 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2434 e->src->index, e->dest->index);
2435 err = 1;
2436 }
2437 else
5496dbfc 2438 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
028f8cc7 2439 insn = NEXT_INSN (insn))
ac7d3688 2440 if (BARRIER_P (insn) || INSN_P (insn))
028f8cc7 2441 {
2442 error ("verify_flow_info: Incorrect fallthru %i->%i",
2443 e->src->index, e->dest->index);
2444 fatal_insn ("wrong insn in the fallthru edge", insn);
2445 err = 1;
2446 }
a0c938f0 2447 }
028f8cc7 2448 }
b36d64df 2449
39257eb5 2450 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2451 {
2452 /* Check that the code before the first basic block has NULL
2453 bb field. */
2454 if (!BARRIER_P (x)
2455 && BLOCK_FOR_INSN (x) != NULL)
2456 {
2457 error ("insn %d outside of basic blocks has non-NULL bb field",
2458 INSN_UID (x));
2459 err = 1;
2460 }
2461 }
fcde5c9e 2462 free (bb_info);
2463
b36d64df 2464 num_bb_notes = 0;
4c26117a 2465 last_bb_seen = ENTRY_BLOCK_PTR;
2466
5cc577b6 2467 for (x = rtx_first; x; x = NEXT_INSN (x))
b36d64df 2468 {
2469 if (NOTE_INSN_BASIC_BLOCK_P (x))
2470 {
3c0a32c9 2471 bb = NOTE_BASIC_BLOCK (x);
5cc577b6 2472
b36d64df 2473 num_bb_notes++;
4c26117a 2474 if (bb != last_bb_seen->next_bb)
028f8cc7 2475 internal_error ("basic blocks not laid down consecutively");
b36d64df 2476
028f8cc7 2477 curr_bb = last_bb_seen = bb;
b36d64df 2478 }
2479
028f8cc7 2480 if (!curr_bb)
b36d64df 2481 {
2482 switch (GET_CODE (x))
2483 {
2484 case BARRIER:
2485 case NOTE:
2486 break;
2487
2488 case CODE_LABEL:
af6ed417 2489 /* An addr_vec is placed outside any basic block. */
b36d64df 2490 if (NEXT_INSN (x)
971ba038 2491 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
5cc577b6 2492 x = NEXT_INSN (x);
b36d64df 2493
2494 /* But in any case, non-deletable labels can appear anywhere. */
2495 break;
2496
2497 default:
cb8bacb6 2498 fatal_insn ("insn outside basic block", x);
b36d64df 2499 }
2500 }
2501
345f9e8c 2502 if (JUMP_P (x)
b36d64df 2503 && returnjump_p (x) && ! condjump_p (x)
8fcc0f14 2504 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
cb8bacb6 2505 fatal_insn ("return not followed by barrier", x);
5496dbfc 2506 if (curr_bb && x == BB_END (curr_bb))
028f8cc7 2507 curr_bb = NULL;
b36d64df 2508 }
2509
4d2e5d52 2510 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
b36d64df 2511 internal_error
b3d6de89 2512 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2513 num_bb_notes, n_basic_blocks);
b36d64df 2514
028f8cc7 2515 return err;
b36d64df 2516}
2517\f
4a82352a 2518/* Assume that the preceding pass has possibly eliminated jump instructions
b36d64df 2519 or converted the unconditional jumps. Eliminate the edges from CFG.
2520 Return true if any edges are eliminated. */
2521
2522bool
4c9e08a4 2523purge_dead_edges (basic_block bb)
b36d64df 2524{
cd665a06 2525 edge e;
5496dbfc 2526 rtx insn = BB_END (bb), note;
b36d64df 2527 bool purged = false;
cd665a06 2528 bool found;
2529 edge_iterator ei;
b36d64df 2530
9845d120 2531 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2532 do
2533 insn = PREV_INSN (insn);
2534 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2535
11808932 2536 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
6d7dc5b9 2537 if (NONJUMP_INSN_P (insn)
11808932 2538 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2539 {
2540 rtx eqnote;
2541
2542 if (! may_trap_p (PATTERN (insn))
2543 || ((eqnote = find_reg_equal_equiv_note (insn))
2544 && ! may_trap_p (XEXP (eqnote, 0))))
2545 remove_note (insn, note);
2546 }
2547
f178f44b 2548 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
cd665a06 2549 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
f178f44b 2550 {
e38def9c 2551 bool remove = false;
2552
04166d08 2553 /* There are three types of edges we need to handle correctly here: EH
2554 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2555 latter can appear when nonlocal gotos are used. */
e38def9c 2556 if (e->flags & EDGE_ABNORMAL_CALL)
f178f44b 2557 {
e38def9c 2558 if (!CALL_P (insn))
2559 remove = true;
2560 else if (can_nonlocal_goto (insn))
2561 ;
2562 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2563 ;
4c0315d0 2564 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
2565 ;
e38def9c 2566 else
2567 remove = true;
f178f44b 2568 }
e38def9c 2569 else if (e->flags & EDGE_EH)
2570 remove = !can_throw_internal (insn);
2571
2572 if (remove)
f178f44b 2573 {
e38def9c 2574 remove_edge (e);
2575 df_set_bb_dirty (bb);
2576 purged = true;
f178f44b 2577 }
2578 else
e38def9c 2579 ei_next (&ei);
f178f44b 2580 }
5cc577b6 2581
6d7dc5b9 2582 if (JUMP_P (insn))
b36d64df 2583 {
2584 rtx note;
2585 edge b,f;
cd665a06 2586 edge_iterator ei;
5cc577b6 2587
b36d64df 2588 /* We do care only about conditional jumps and simplejumps. */
2589 if (!any_condjump_p (insn)
2590 && !returnjump_p (insn)
2591 && !simplejump_p (insn))
461eeb14 2592 return purged;
5cc577b6 2593
a2e42321 2594 /* Branch probability/prediction notes are defined only for
2595 condjumps. We've possibly turned condjump into simplejump. */
2596 if (simplejump_p (insn))
2597 {
2598 note = find_reg_note (insn, REG_BR_PROB, NULL);
2599 if (note)
2600 remove_note (insn, note);
2601 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2602 remove_note (insn, note);
2603 }
2604
cd665a06 2605 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
b36d64df 2606 {
4270b4b9 2607 /* Avoid abnormal flags to leak from computed jumps turned
2608 into simplejumps. */
db34a109 2609
a0fb7382 2610 e->flags &= ~EDGE_ABNORMAL;
4270b4b9 2611
14abf923 2612 /* See if this edge is one we should keep. */
2613 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2614 /* A conditional jump can fall through into the next
2615 block, so we should keep the edge. */
cd665a06 2616 {
2617 ei_next (&ei);
2618 continue;
2619 }
5cc577b6 2620 else if (e->dest != EXIT_BLOCK_PTR
5496dbfc 2621 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
14abf923 2622 /* If the destination block is the target of the jump,
2623 keep the edge. */
cd665a06 2624 {
2625 ei_next (&ei);
2626 continue;
2627 }
14abf923 2628 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2629 /* If the destination block is the exit block, and this
2630 instruction is a return, then keep the edge. */
cd665a06 2631 {
2632 ei_next (&ei);
2633 continue;
2634 }
14abf923 2635 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2636 /* Keep the edges that correspond to exceptions thrown by
1b64239d 2637 this instruction and rematerialize the EDGE_ABNORMAL
2638 flag we just cleared above. */
2639 {
2640 e->flags |= EDGE_ABNORMAL;
cd665a06 2641 ei_next (&ei);
1b64239d 2642 continue;
2643 }
5cc577b6 2644
14abf923 2645 /* We do not need this edge. */
3072d30e 2646 df_set_bb_dirty (bb);
b36d64df 2647 purged = true;
2648 remove_edge (e);
2649 }
5cc577b6 2650
cd665a06 2651 if (EDGE_COUNT (bb->succs) == 0 || !purged)
461eeb14 2652 return purged;
5cc577b6 2653
450d042a 2654 if (dump_file)
2655 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
5cc577b6 2656
b36d64df 2657 if (!optimize)
2658 return purged;
2659
2660 /* Redistribute probabilities. */
ea091dfd 2661 if (single_succ_p (bb))
b36d64df 2662 {
ea091dfd 2663 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2664 single_succ_edge (bb)->count = bb->count;
db34a109 2665 }
b36d64df 2666 else
2667 {
2668 note = find_reg_note (insn, REG_BR_PROB, NULL);
2669 if (!note)
2670 return purged;
5cc577b6 2671
b36d64df 2672 b = BRANCH_EDGE (bb);
2673 f = FALLTHRU_EDGE (bb);
2674 b->probability = INTVAL (XEXP (note, 0));
2675 f->probability = REG_BR_PROB_BASE - b->probability;
2676 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2677 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2678 }
5cc577b6 2679
b36d64df 2680 return purged;
2681 }
6d7dc5b9 2682 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
bf4311e9 2683 {
2684 /* First, there should not be any EH or ABCALL edges resulting
2685 from non-local gotos and the like. If there were, we shouldn't
2686 have created the sibcall in the first place. Second, there
2687 should of course never have been a fallthru edge. */
ea091dfd 2688 gcc_assert (single_succ_p (bb));
2689 gcc_assert (single_succ_edge (bb)->flags
2690 == (EDGE_SIBCALL | EDGE_ABNORMAL));
bf4311e9 2691
2692 return 0;
2693 }
b36d64df 2694
b36d64df 2695 /* If we don't see a jump insn, we don't know exactly why the block would
2696 have been broken at this point. Look for a simple, non-fallthru edge,
2697 as these are only created by conditional branches. If we find such an
2698 edge we know that there used to be a jump here and can then safely
2699 remove all non-fallthru edges. */
cd665a06 2700 found = false;
2701 FOR_EACH_EDGE (e, ei, bb->succs)
2702 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2703 {
2704 found = true;
2705 break;
2706 }
5cc577b6 2707
cd665a06 2708 if (!found)
b36d64df 2709 return purged;
5cc577b6 2710
6e81a369 2711 /* Remove all but the fake and fallthru edges. The fake edge may be
2712 the only successor for this block in the case of noreturn
2713 calls. */
cd665a06 2714 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
b36d64df 2715 {
6e81a369 2716 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
461eeb14 2717 {
3072d30e 2718 df_set_bb_dirty (bb);
461eeb14 2719 remove_edge (e);
2720 purged = true;
2721 }
cd665a06 2722 else
2723 ei_next (&ei);
b36d64df 2724 }
5cc577b6 2725
ea091dfd 2726 gcc_assert (single_succ_p (bb));
5cc577b6 2727
ea091dfd 2728 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2729 single_succ_edge (bb)->count = bb->count;
b36d64df 2730
450d042a 2731 if (dump_file)
2732 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
b3d6de89 2733 bb->index);
b36d64df 2734 return purged;
2735}
2736
5cc577b6 2737/* Search all basic blocks for potentially dead edges and purge them. Return
2738 true if some edge has been eliminated. */
b36d64df 2739
2740bool
94ee50e8 2741purge_all_dead_edges (void)
b36d64df 2742{
4c26117a 2743 int purged = false;
4c26117a 2744 basic_block bb;
0922c912 2745
4c26117a 2746 FOR_EACH_BB (bb)
0922c912 2747 {
4c26117a 2748 bool purged_here = purge_dead_edges (bb);
5cc577b6 2749
0922c912 2750 purged |= purged_here;
0922c912 2751 }
5cc577b6 2752
b36d64df 2753 return purged;
2754}
1026363d 2755
54f21e20 2756/* This is used by a few passes that emit some instructions after abnormal
2757 calls, moving the basic block's end, while they in fact do want to emit
2758 them on the fallthru edge. Look for abnormal call edges, find backward
2759 the call in the block and insert the instructions on the edge instead.
2760
2761 Similarly, handle instructions throwing exceptions internally.
2762
2763 Return true when instructions have been found and inserted on edges. */
2764
2765bool
2766fixup_abnormal_edges (void)
2767{
2768 bool inserted = false;
2769 basic_block bb;
2770
2771 FOR_EACH_BB (bb)
2772 {
2773 edge e;
2774 edge_iterator ei;
2775
2776 /* Look for cases we are interested in - calls or instructions causing
2777 exceptions. */
2778 FOR_EACH_EDGE (e, ei, bb->succs)
2779 if ((e->flags & EDGE_ABNORMAL_CALL)
2780 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
2781 == (EDGE_ABNORMAL | EDGE_EH)))
2782 break;
2783
2784 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
2785 {
2786 rtx insn;
2787
2788 /* Get past the new insns generated. Allow notes, as the insns
2789 may be already deleted. */
2790 insn = BB_END (bb);
2791 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
2792 && !can_throw_internal (insn)
2793 && insn != BB_HEAD (bb))
2794 insn = PREV_INSN (insn);
2795
2796 if (CALL_P (insn) || can_throw_internal (insn))
2797 {
2798 rtx stop, next;
2799
2800 e = find_fallthru_edge (bb->succs);
2801
2802 stop = NEXT_INSN (BB_END (bb));
2803 BB_END (bb) = insn;
2804
2805 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
2806 {
2807 next = NEXT_INSN (insn);
2808 if (INSN_P (insn))
2809 {
2810 delete_insn (insn);
2811
2812 /* Sometimes there's still the return value USE.
2813 If it's placed after a trapping call (i.e. that
2814 call is the last insn anyway), we have no fallthru
2815 edge. Simply delete this use and don't try to insert
2816 on the non-existent edge. */
2817 if (GET_CODE (PATTERN (insn)) != USE)
2818 {
2819 /* We're not deleting it, we're moving it. */
2820 INSN_DELETED_P (insn) = 0;
2821 PREV_INSN (insn) = NULL_RTX;
2822 NEXT_INSN (insn) = NULL_RTX;
2823
2824 insert_insn_on_edge (insn, e);
2825 inserted = true;
2826 }
2827 }
2828 else if (!BARRIER_P (insn))
2829 set_block_for_insn (insn, NULL);
2830 }
2831 }
2832
2833 /* It may be that we don't find any trapping insn. In this
2834 case we discovered quite late that the insn that had been
2835 marked as can_throw_internal in fact couldn't trap at all.
2836 So we should in fact delete the EH edges out of the block. */
2837 else
2838 purge_dead_edges (bb);
2839 }
2840 }
2841
2842 return inserted;
2843}
23a070f3 2844\f
2845/* Cut the insns from FIRST to LAST out of the insns stream. */
2846
2847rtx
2848unlink_insn_chain (rtx first, rtx last)
2849{
2850 rtx prevfirst = PREV_INSN (first);
2851 rtx nextlast = NEXT_INSN (last);
2852
2853 PREV_INSN (first) = NULL;
2854 NEXT_INSN (last) = NULL;
2855 if (prevfirst)
2856 NEXT_INSN (prevfirst) = nextlast;
2857 if (nextlast)
2858 PREV_INSN (nextlast) = prevfirst;
2859 else
2860 set_last_insn (prevfirst);
2861 if (!prevfirst)
2862 set_first_insn (nextlast);
2863 return first;
2864}
2865\f
2866/* Skip over inter-block insns occurring after BB which are typically
2867 associated with BB (e.g., barriers). If there are any such insns,
2868 we return the last one. Otherwise, we return the end of BB. */
2869
2870static rtx
2871skip_insns_after_block (basic_block bb)
2872{
2873 rtx insn, last_insn, next_head, prev;
2874
2875 next_head = NULL_RTX;
2876 if (bb->next_bb != EXIT_BLOCK_PTR)
2877 next_head = BB_HEAD (bb->next_bb);
2878
2879 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
2880 {
2881 if (insn == next_head)
2882 break;
2883
2884 switch (GET_CODE (insn))
2885 {
2886 case BARRIER:
2887 last_insn = insn;
2888 continue;
2889
2890 case NOTE:
2891 switch (NOTE_KIND (insn))
2892 {
2893 case NOTE_INSN_BLOCK_END:
2894 gcc_unreachable ();
2895 continue;
2896 default:
2897 continue;
2898 break;
2899 }
2900 break;
2901
2902 case CODE_LABEL:
2903 if (NEXT_INSN (insn)
2904 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
2905 {
2906 insn = NEXT_INSN (insn);
2907 last_insn = insn;
2908 continue;
2909 }
2910 break;
2911
2912 default:
2913 break;
2914 }
2915
2916 break;
2917 }
2918
2919 /* It is possible to hit contradictory sequence. For instance:
2920
2921 jump_insn
2922 NOTE_INSN_BLOCK_BEG
2923 barrier
2924
2925 Where barrier belongs to jump_insn, but the note does not. This can be
2926 created by removing the basic block originally following
2927 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
2928
2929 for (insn = last_insn; insn != BB_END (bb); insn = prev)
2930 {
2931 prev = PREV_INSN (insn);
2932 if (NOTE_P (insn))
2933 switch (NOTE_KIND (insn))
2934 {
2935 case NOTE_INSN_BLOCK_END:
2936 gcc_unreachable ();
2937 break;
2938 case NOTE_INSN_DELETED:
2939 case NOTE_INSN_DELETED_LABEL:
2940 case NOTE_INSN_DELETED_DEBUG_LABEL:
2941 continue;
2942 default:
2943 reorder_insns (insn, insn, last_insn);
2944 }
2945 }
2946
2947 return last_insn;
2948}
2949
2950/* Locate or create a label for a given basic block. */
2951
2952static rtx
2953label_for_bb (basic_block bb)
2954{
2955 rtx label = BB_HEAD (bb);
2956
2957 if (!LABEL_P (label))
2958 {
2959 if (dump_file)
2960 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
2961
2962 label = block_label (bb);
2963 }
2964
2965 return label;
2966}
2967
2968/* Locate the effective beginning and end of the insn chain for each
2969 block, as defined by skip_insns_after_block above. */
2970
2971static void
2972record_effective_endpoints (void)
2973{
2974 rtx next_insn;
2975 basic_block bb;
2976 rtx insn;
2977
2978 for (insn = get_insns ();
2979 insn
2980 && NOTE_P (insn)
2981 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
2982 insn = NEXT_INSN (insn))
2983 continue;
2984 /* No basic blocks at all? */
2985 gcc_assert (insn);
2986
2987 if (PREV_INSN (insn))
2988 cfg_layout_function_header =
2989 unlink_insn_chain (get_insns (), PREV_INSN (insn));
2990 else
2991 cfg_layout_function_header = NULL_RTX;
2992
2993 next_insn = get_insns ();
2994 FOR_EACH_BB (bb)
2995 {
2996 rtx end;
2997
2998 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
2999 BB_HEADER (bb) = unlink_insn_chain (next_insn,
3000 PREV_INSN (BB_HEAD (bb)));
3001 end = skip_insns_after_block (bb);
3002 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3003 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3004 next_insn = NEXT_INSN (BB_END (bb));
3005 }
3006
3007 cfg_layout_function_footer = next_insn;
3008 if (cfg_layout_function_footer)
3009 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3010}
3011\f
3012static unsigned int
3013into_cfg_layout_mode (void)
3014{
3015 cfg_layout_initialize (0);
3016 return 0;
3017}
3018
3019static unsigned int
3020outof_cfg_layout_mode (void)
3021{
3022 basic_block bb;
3023
3024 FOR_EACH_BB (bb)
3025 if (bb->next_bb != EXIT_BLOCK_PTR)
3026 bb->aux = bb->next_bb;
3027
3028 cfg_layout_finalize ();
3029
3030 return 0;
3031}
3032
3033struct rtl_opt_pass pass_into_cfg_layout_mode =
3034{
3035 {
3036 RTL_PASS,
3037 "into_cfglayout", /* name */
c7875731 3038 OPTGROUP_NONE, /* optinfo_flags */
23a070f3 3039 NULL, /* gate */
3040 into_cfg_layout_mode, /* execute */
3041 NULL, /* sub */
3042 NULL, /* next */
3043 0, /* static_pass_number */
3044 TV_CFG, /* tv_id */
3045 0, /* properties_required */
3046 PROP_cfglayout, /* properties_provided */
3047 0, /* properties_destroyed */
3048 0, /* todo_flags_start */
3049 0 /* todo_flags_finish */
3050 }
3051};
3052
3053struct rtl_opt_pass pass_outof_cfg_layout_mode =
3054{
3055 {
3056 RTL_PASS,
3057 "outof_cfglayout", /* name */
c7875731 3058 OPTGROUP_NONE, /* optinfo_flags */
23a070f3 3059 NULL, /* gate */
3060 outof_cfg_layout_mode, /* execute */
3061 NULL, /* sub */
3062 NULL, /* next */
3063 0, /* static_pass_number */
3064 TV_CFG, /* tv_id */
3065 0, /* properties_required */
3066 0, /* properties_provided */
3067 PROP_cfglayout, /* properties_destroyed */
3068 0, /* todo_flags_start */
3069 0 /* todo_flags_finish */
3070 }
3071};
3072\f
3073
3074/* Link the basic blocks in the correct order, compacting the basic
3075 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3076 function also clears the basic block header and footer fields.
3077
3078 This function is usually called after a pass (e.g. tracer) finishes
3079 some transformations while in cfglayout mode. The required sequence
3080 of the basic blocks is in a linked list along the bb->aux field.
3081 This functions re-links the basic block prev_bb and next_bb pointers
4a020a8c 3082 accordingly, and it compacts and renumbers the blocks.
3083
3084 FIXME: This currently works only for RTL, but the only RTL-specific
3085 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3086 to GIMPLE a long time ago, but it doesn't relink the basic block
3087 chain. It could do that (to give better initial RTL) if this function
3088 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
23a070f3 3089
3090void
3091relink_block_chain (bool stay_in_cfglayout_mode)
3092{
3093 basic_block bb, prev_bb;
3094 int index;
3095
3096 /* Maybe dump the re-ordered sequence. */
3097 if (dump_file)
3098 {
3099 fprintf (dump_file, "Reordered sequence:\n");
3100 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
3101 bb;
3102 bb = (basic_block) bb->aux, index++)
3103 {
3104 fprintf (dump_file, " %i ", index);
3105 if (get_bb_original (bb))
3106 fprintf (dump_file, "duplicate of %i ",
3107 get_bb_original (bb)->index);
3108 else if (forwarder_block_p (bb)
3109 && !LABEL_P (BB_HEAD (bb)))
3110 fprintf (dump_file, "compensation ");
3111 else
3112 fprintf (dump_file, "bb %i ", bb->index);
3113 fprintf (dump_file, " [%i]\n", bb->frequency);
3114 }
3115 }
3116
3117 /* Now reorder the blocks. */
3118 prev_bb = ENTRY_BLOCK_PTR;
3119 bb = ENTRY_BLOCK_PTR->next_bb;
3120 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3121 {
3122 bb->prev_bb = prev_bb;
3123 prev_bb->next_bb = bb;
3124 }
3125 prev_bb->next_bb = EXIT_BLOCK_PTR;
3126 EXIT_BLOCK_PTR->prev_bb = prev_bb;
3127
3128 /* Then, clean up the aux fields. */
3129 FOR_ALL_BB (bb)
3130 {
3131 bb->aux = NULL;
3132 if (!stay_in_cfglayout_mode)
3133 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3134 }
3135
3136 /* Maybe reset the original copy tables, they are not valid anymore
3137 when we renumber the basic blocks in compact_blocks. If we are
3138 are going out of cfglayout mode, don't re-allocate the tables. */
3139 free_original_copy_tables ();
3140 if (stay_in_cfglayout_mode)
3141 initialize_original_copy_tables ();
3142
3143 /* Finally, put basic_block_info in the new order. */
3144 compact_blocks ();
3145}
3146\f
3147
3148/* Given a reorder chain, rearrange the code to match. */
3149
3150static void
3151fixup_reorder_chain (void)
3152{
3153 basic_block bb;
3154 rtx insn = NULL;
3155
3156 if (cfg_layout_function_header)
3157 {
3158 set_first_insn (cfg_layout_function_header);
3159 insn = cfg_layout_function_header;
3160 while (NEXT_INSN (insn))
3161 insn = NEXT_INSN (insn);
3162 }
3163
3164 /* First do the bulk reordering -- rechain the blocks without regard to
3165 the needed changes to jumps and labels. */
3166
3167 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
3168 {
3169 if (BB_HEADER (bb))
3170 {
3171 if (insn)
3172 NEXT_INSN (insn) = BB_HEADER (bb);
3173 else
3174 set_first_insn (BB_HEADER (bb));
3175 PREV_INSN (BB_HEADER (bb)) = insn;
3176 insn = BB_HEADER (bb);
3177 while (NEXT_INSN (insn))
3178 insn = NEXT_INSN (insn);
3179 }
3180 if (insn)
3181 NEXT_INSN (insn) = BB_HEAD (bb);
3182 else
3183 set_first_insn (BB_HEAD (bb));
3184 PREV_INSN (BB_HEAD (bb)) = insn;
3185 insn = BB_END (bb);
3186 if (BB_FOOTER (bb))
3187 {
3188 NEXT_INSN (insn) = BB_FOOTER (bb);
3189 PREV_INSN (BB_FOOTER (bb)) = insn;
3190 while (NEXT_INSN (insn))
3191 insn = NEXT_INSN (insn);
3192 }
3193 }
3194
3195 NEXT_INSN (insn) = cfg_layout_function_footer;
3196 if (cfg_layout_function_footer)
3197 PREV_INSN (cfg_layout_function_footer) = insn;
3198
3199 while (NEXT_INSN (insn))
3200 insn = NEXT_INSN (insn);
3201
3202 set_last_insn (insn);
3203#ifdef ENABLE_CHECKING
3204 verify_insn_chain ();
3205#endif
3206
3207 /* Now add jumps and labels as needed to match the blocks new
3208 outgoing edges. */
3209
3210 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
3211 {
3212 edge e_fall, e_taken, e;
3213 rtx bb_end_insn;
3214 rtx ret_label = NULL_RTX;
3215 basic_block nb, src_bb;
3216 edge_iterator ei;
3217
3218 if (EDGE_COUNT (bb->succs) == 0)
3219 continue;
3220
3221 /* Find the old fallthru edge, and another non-EH edge for
3222 a taken jump. */
3223 e_taken = e_fall = NULL;
3224
3225 FOR_EACH_EDGE (e, ei, bb->succs)
3226 if (e->flags & EDGE_FALLTHRU)
3227 e_fall = e;
3228 else if (! (e->flags & EDGE_EH))
3229 e_taken = e;
3230
3231 bb_end_insn = BB_END (bb);
3232 if (JUMP_P (bb_end_insn))
3233 {
3234 ret_label = JUMP_LABEL (bb_end_insn);
3235 if (any_condjump_p (bb_end_insn))
3236 {
3237 /* This might happen if the conditional jump has side
3238 effects and could therefore not be optimized away.
3239 Make the basic block to end with a barrier in order
3240 to prevent rtl_verify_flow_info from complaining. */
3241 if (!e_fall)
3242 {
3243 gcc_assert (!onlyjump_p (bb_end_insn)
3244 || returnjump_p (bb_end_insn));
3245 BB_FOOTER (bb) = emit_barrier_after (bb_end_insn);
3246 continue;
3247 }
3248
3249 /* If the old fallthru is still next, nothing to do. */
3250 if (bb->aux == e_fall->dest
3251 || e_fall->dest == EXIT_BLOCK_PTR)
3252 continue;
3253
3254 /* The degenerated case of conditional jump jumping to the next
3255 instruction can happen for jumps with side effects. We need
3256 to construct a forwarder block and this will be done just
3257 fine by force_nonfallthru below. */
3258 if (!e_taken)
3259 ;
3260
3261 /* There is another special case: if *neither* block is next,
3262 such as happens at the very end of a function, then we'll
3263 need to add a new unconditional jump. Choose the taken
3264 edge based on known or assumed probability. */
3265 else if (bb->aux != e_taken->dest)
3266 {
3267 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
3268
3269 if (note
3270 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
3271 && invert_jump (bb_end_insn,
3272 (e_fall->dest == EXIT_BLOCK_PTR
3273 ? NULL_RTX
3274 : label_for_bb (e_fall->dest)), 0))
3275 {
3276 e_fall->flags &= ~EDGE_FALLTHRU;
3277 gcc_checking_assert (could_fall_through
3278 (e_taken->src, e_taken->dest));
3279 e_taken->flags |= EDGE_FALLTHRU;
3280 update_br_prob_note (bb);
3281 e = e_fall, e_fall = e_taken, e_taken = e;
3282 }
3283 }
3284
3285 /* If the "jumping" edge is a crossing edge, and the fall
3286 through edge is non-crossing, leave things as they are. */
3287 else if ((e_taken->flags & EDGE_CROSSING)
3288 && !(e_fall->flags & EDGE_CROSSING))
3289 continue;
3290
3291 /* Otherwise we can try to invert the jump. This will
3292 basically never fail, however, keep up the pretense. */
3293 else if (invert_jump (bb_end_insn,
3294 (e_fall->dest == EXIT_BLOCK_PTR
3295 ? NULL_RTX
3296 : label_for_bb (e_fall->dest)), 0))
3297 {
3298 e_fall->flags &= ~EDGE_FALLTHRU;
3299 gcc_checking_assert (could_fall_through
3300 (e_taken->src, e_taken->dest));
3301 e_taken->flags |= EDGE_FALLTHRU;
3302 update_br_prob_note (bb);
3303 if (LABEL_NUSES (ret_label) == 0
3304 && single_pred_p (e_taken->dest))
3305 delete_insn (ret_label);
3306 continue;
3307 }
3308 }
3309 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3310 {
3311 /* If the old fallthru is still next or if
3312 asm goto doesn't have a fallthru (e.g. when followed by
3313 __builtin_unreachable ()), nothing to do. */
3314 if (! e_fall
3315 || bb->aux == e_fall->dest
3316 || e_fall->dest == EXIT_BLOCK_PTR)
3317 continue;
3318
3319 /* Otherwise we'll have to use the fallthru fixup below. */
3320 }
3321 else
3322 {
3323 /* Otherwise we have some return, switch or computed
3324 jump. In the 99% case, there should not have been a
3325 fallthru edge. */
3326 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3327 continue;
3328 }
3329 }
3330 else
3331 {
3332 /* No fallthru implies a noreturn function with EH edges, or
3333 something similarly bizarre. In any case, we don't need to
3334 do anything. */
3335 if (! e_fall)
3336 continue;
3337
3338 /* If the fallthru block is still next, nothing to do. */
3339 if (bb->aux == e_fall->dest)
3340 continue;
3341
3342 /* A fallthru to exit block. */
3343 if (e_fall->dest == EXIT_BLOCK_PTR)
3344 continue;
3345 }
3346
3347 /* We got here if we need to add a new jump insn.
3348 Note force_nonfallthru can delete E_FALL and thus we have to
3349 save E_FALL->src prior to the call to force_nonfallthru. */
3350 src_bb = e_fall->src;
3351 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3352 if (nb)
3353 {
3354 nb->aux = bb->aux;
3355 bb->aux = nb;
3356 /* Don't process this new block. */
3357 bb = nb;
3358
3359 /* Make sure new bb is tagged for correct section (same as
3360 fall-thru source, since you cannot fall-thru across
3361 section boundaries). */
3362 BB_COPY_PARTITION (src_bb, single_pred (bb));
3363 if (flag_reorder_blocks_and_partition
3364 && targetm_common.have_named_sections
3365 && JUMP_P (BB_END (bb))
3366 && !any_condjump_p (BB_END (bb))
3367 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
3368 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
3369 }
3370 }
3371
3372 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3373
3374 /* Annoying special case - jump around dead jumptables left in the code. */
3375 FOR_EACH_BB (bb)
3376 {
3377 edge e = find_fallthru_edge (bb->succs);
3378
3379 if (e && !can_fallthru (e->src, e->dest))
3380 force_nonfallthru (e);
3381 }
3382
3383 /* Ensure goto_locus from edges has some instructions with that locus
3384 in RTL. */
3385 if (!optimize)
3386 FOR_EACH_BB (bb)
3387 {
3388 edge e;
3389 edge_iterator ei;
3390
3391 FOR_EACH_EDGE (e, ei, bb->succs)
8e7408e3 3392 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
5169661d 3393 && !(e->flags & EDGE_ABNORMAL))
23a070f3 3394 {
3395 edge e2;
3396 edge_iterator ei2;
3397 basic_block dest, nb;
3398 rtx end;
3399
3400 insn = BB_END (e->src);
3401 end = PREV_INSN (BB_HEAD (e->src));
3402 while (insn != end
5169661d 3403 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
23a070f3 3404 insn = PREV_INSN (insn);
3405 if (insn != end
5169661d 3406 && INSN_LOCATION (insn) == e->goto_locus)
23a070f3 3407 continue;
3408 if (simplejump_p (BB_END (e->src))
5169661d 3409 && !INSN_HAS_LOCATION (BB_END (e->src)))
23a070f3 3410 {
5169661d 3411 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
23a070f3 3412 continue;
3413 }
3414 dest = e->dest;
3415 if (dest == EXIT_BLOCK_PTR)
3416 {
3417 /* Non-fallthru edges to the exit block cannot be split. */
3418 if (!(e->flags & EDGE_FALLTHRU))
3419 continue;
3420 }
3421 else
3422 {
3423 insn = BB_HEAD (dest);
3424 end = NEXT_INSN (BB_END (dest));
3425 while (insn != end && !NONDEBUG_INSN_P (insn))
3426 insn = NEXT_INSN (insn);
5169661d 3427 if (insn != end && INSN_HAS_LOCATION (insn)
3428 && INSN_LOCATION (insn) == e->goto_locus)
23a070f3 3429 continue;
3430 }
3431 nb = split_edge (e);
3432 if (!INSN_P (BB_END (nb)))
3433 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3434 nb);
5169661d 3435 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
23a070f3 3436
3437 /* If there are other incoming edges to the destination block
3438 with the same goto locus, redirect them to the new block as
3439 well, this can prevent other such blocks from being created
3440 in subsequent iterations of the loop. */
3441 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
8e7408e3 3442 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
23a070f3 3443 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
5169661d 3444 && e->goto_locus == e2->goto_locus)
23a070f3 3445 redirect_edge_and_branch (e2, nb);
3446 else
3447 ei_next (&ei2);
3448 }
3449 }
3450}
3451\f
3452/* Perform sanity checks on the insn chain.
3453 1. Check that next/prev pointers are consistent in both the forward and
3454 reverse direction.
3455 2. Count insns in chain, going both directions, and check if equal.
3456 3. Check that get_last_insn () returns the actual end of chain. */
3457
3458DEBUG_FUNCTION void
3459verify_insn_chain (void)
3460{
3461 rtx x, prevx, nextx;
3462 int insn_cnt1, insn_cnt2;
3463
3464 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3465 x != 0;
3466 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
3467 gcc_assert (PREV_INSN (x) == prevx);
3468
3469 gcc_assert (prevx == get_last_insn ());
3470
3471 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
3472 x != 0;
3473 nextx = x, insn_cnt2++, x = PREV_INSN (x))
3474 gcc_assert (NEXT_INSN (x) == nextx);
3475
3476 gcc_assert (insn_cnt1 == insn_cnt2);
3477}
3478\f
3479/* If we have assembler epilogues, the block falling through to exit must
3480 be the last one in the reordered chain when we reach final. Ensure
3481 that this condition is met. */
3482static void
3483fixup_fallthru_exit_predecessor (void)
3484{
3485 edge e;
3486 basic_block bb = NULL;
3487
3488 /* This transformation is not valid before reload, because we might
3489 separate a call from the instruction that copies the return
3490 value. */
3491 gcc_assert (reload_completed);
3492
3493 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
3494 if (e)
3495 bb = e->src;
3496
3497 if (bb && bb->aux)
3498 {
3499 basic_block c = ENTRY_BLOCK_PTR->next_bb;
3500
3501 /* If the very first block is the one with the fall-through exit
3502 edge, we have to split that block. */
3503 if (c == bb)
3504 {
3505 bb = split_block (bb, NULL)->dest;
3506 bb->aux = c->aux;
3507 c->aux = bb;
3508 BB_FOOTER (bb) = BB_FOOTER (c);
3509 BB_FOOTER (c) = NULL;
3510 }
3511
3512 while (c->aux != bb)
3513 c = (basic_block) c->aux;
3514
3515 c->aux = bb->aux;
3516 while (c->aux)
3517 c = (basic_block) c->aux;
3518
3519 c->aux = bb;
3520 bb->aux = NULL;
3521 }
3522}
3523
3524/* In case there are more than one fallthru predecessors of exit, force that
3525 there is only one. */
3526
3527static void
3528force_one_exit_fallthru (void)
3529{
3530 edge e, predecessor = NULL;
3531 bool more = false;
3532 edge_iterator ei;
3533 basic_block forwarder, bb;
3534
3535 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3536 if (e->flags & EDGE_FALLTHRU)
3537 {
3538 if (predecessor == NULL)
3539 predecessor = e;
3540 else
3541 {
3542 more = true;
3543 break;
3544 }
3545 }
3546
3547 if (!more)
3548 return;
3549
3550 /* Exit has several fallthru predecessors. Create a forwarder block for
3551 them. */
3552 forwarder = split_edge (predecessor);
3553 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
3554 {
3555 if (e->src == forwarder
3556 || !(e->flags & EDGE_FALLTHRU))
3557 ei_next (&ei);
3558 else
3559 redirect_edge_and_branch_force (e, forwarder);
3560 }
3561
3562 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
3563 exit block. */
3564 FOR_EACH_BB (bb)
3565 {
3566 if (bb->aux == NULL && bb != forwarder)
3567 {
3568 bb->aux = forwarder;
3569 break;
3570 }
3571 }
3572}
3573\f
3574/* Return true in case it is possible to duplicate the basic block BB. */
3575
3576static bool
3577cfg_layout_can_duplicate_bb_p (const_basic_block bb)
3578{
3579 /* Do not attempt to duplicate tablejumps, as we need to unshare
3580 the dispatch table. This is difficult to do, as the instructions
3581 computing jump destination may be hoisted outside the basic block. */
3582 if (tablejump_p (BB_END (bb), NULL, NULL))
3583 return false;
3584
3585 /* Do not duplicate blocks containing insns that can't be copied. */
3586 if (targetm.cannot_copy_insn_p)
3587 {
3588 rtx insn = BB_HEAD (bb);
3589 while (1)
3590 {
3591 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
3592 return false;
3593 if (insn == BB_END (bb))
3594 break;
3595 insn = NEXT_INSN (insn);
3596 }
3597 }
3598
3599 return true;
3600}
3601
3602rtx
3603duplicate_insn_chain (rtx from, rtx to)
3604{
3605 rtx insn, last, copy;
3606
3607 /* Avoid updating of boundaries of previous basic block. The
3608 note will get removed from insn stream in fixup. */
3609 last = emit_note (NOTE_INSN_DELETED);
3610
3611 /* Create copy at the end of INSN chain. The chain will
3612 be reordered later. */
3613 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
3614 {
3615 switch (GET_CODE (insn))
3616 {
3617 case DEBUG_INSN:
3618 /* Don't duplicate label debug insns. */
3619 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
3620 break;
3621 /* FALLTHRU */
3622 case INSN:
3623 case CALL_INSN:
3624 case JUMP_INSN:
3625 /* Avoid copying of dispatch tables. We never duplicate
3626 tablejumps, so this can hit only in case the table got
3627 moved far from original jump. */
3628 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
3629 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
3630 {
3631 /* Avoid copying following barrier as well if any
3632 (and debug insns in between). */
3633 rtx next;
3634
3635 for (next = NEXT_INSN (insn);
3636 next != NEXT_INSN (to);
3637 next = NEXT_INSN (next))
3638 if (!DEBUG_INSN_P (next))
3639 break;
3640 if (next != NEXT_INSN (to) && BARRIER_P (next))
3641 insn = next;
3642 break;
3643 }
3644 copy = emit_copy_of_insn_after (insn, get_last_insn ());
3645 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
3646 && ANY_RETURN_P (JUMP_LABEL (insn)))
3647 JUMP_LABEL (copy) = JUMP_LABEL (insn);
3648 maybe_copy_prologue_epilogue_insn (insn, copy);
3649 break;
3650
3651 case CODE_LABEL:
3652 break;
3653
3654 case BARRIER:
3655 emit_barrier ();
3656 break;
3657
3658 case NOTE:
3659 switch (NOTE_KIND (insn))
3660 {
3661 /* In case prologue is empty and function contain label
3662 in first BB, we may want to copy the block. */
3663 case NOTE_INSN_PROLOGUE_END:
3664
3665 case NOTE_INSN_DELETED:
3666 case NOTE_INSN_DELETED_LABEL:
3667 case NOTE_INSN_DELETED_DEBUG_LABEL:
3668 /* No problem to strip these. */
3669 case NOTE_INSN_FUNCTION_BEG:
3670 /* There is always just single entry to function. */
3671 case NOTE_INSN_BASIC_BLOCK:
3672 break;
3673
3674 case NOTE_INSN_EPILOGUE_BEG:
3675 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
3676 emit_note_copy (insn);
3677 break;
3678
3679 default:
3680 /* All other notes should have already been eliminated. */
3681 gcc_unreachable ();
3682 }
3683 break;
3684 default:
3685 gcc_unreachable ();
3686 }
3687 }
3688 insn = NEXT_INSN (last);
3689 delete_insn (last);
3690 return insn;
3691}
3692
3693/* Create a duplicate of the basic block BB. */
3694
3695static basic_block
3696cfg_layout_duplicate_bb (basic_block bb)
3697{
3698 rtx insn;
3699 basic_block new_bb;
3700
3701 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
3702 new_bb = create_basic_block (insn,
3703 insn ? get_last_insn () : NULL,
3704 EXIT_BLOCK_PTR->prev_bb);
3705
3706 BB_COPY_PARTITION (new_bb, bb);
3707 if (BB_HEADER (bb))
3708 {
3709 insn = BB_HEADER (bb);
3710 while (NEXT_INSN (insn))
3711 insn = NEXT_INSN (insn);
3712 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
3713 if (insn)
3714 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
3715 }
3716
3717 if (BB_FOOTER (bb))
3718 {
3719 insn = BB_FOOTER (bb);
3720 while (NEXT_INSN (insn))
3721 insn = NEXT_INSN (insn);
3722 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
3723 if (insn)
3724 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
3725 }
3726
3727 return new_bb;
3728}
3729
3730\f
3731/* Main entry point to this module - initialize the datastructures for
3732 CFG layout changes. It keeps LOOPS up-to-date if not null.
3733
3734 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
3735
3736void
3737cfg_layout_initialize (unsigned int flags)
3738{
3739 rtx x;
3740 basic_block bb;
3741
3742 initialize_original_copy_tables ();
3743
3744 cfg_layout_rtl_register_cfg_hooks ();
3745
3746 record_effective_endpoints ();
3747
3748 /* Make sure that the targets of non local gotos are marked. */
3749 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
3750 {
3751 bb = BLOCK_FOR_INSN (XEXP (x, 0));
3752 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
3753 }
3754
3755 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
3756}
3757
3758/* Splits superblocks. */
3759void
3760break_superblocks (void)
3761{
3762 sbitmap superblocks;
3763 bool need = false;
3764 basic_block bb;
3765
3766 superblocks = sbitmap_alloc (last_basic_block);
53c5d9d4 3767 bitmap_clear (superblocks);
23a070f3 3768
3769 FOR_EACH_BB (bb)
3770 if (bb->flags & BB_SUPERBLOCK)
3771 {
3772 bb->flags &= ~BB_SUPERBLOCK;
08b7917c 3773 bitmap_set_bit (superblocks, bb->index);
23a070f3 3774 need = true;
3775 }
3776
3777 if (need)
3778 {
3779 rebuild_jump_labels (get_insns ());
3780 find_many_sub_basic_blocks (superblocks);
3781 }
3782
3783 free (superblocks);
3784}
3785
3786/* Finalize the changes: reorder insn list according to the sequence specified
3787 by aux pointers, enter compensation code, rebuild scope forest. */
3788
3789void
3790cfg_layout_finalize (void)
3791{
3792#ifdef ENABLE_CHECKING
3793 verify_flow_info ();
3794#endif
3795 force_one_exit_fallthru ();
3796 rtl_register_cfg_hooks ();
3797 if (reload_completed
3798#ifdef HAVE_epilogue
3799 && !HAVE_epilogue
3800#endif
3801 )
3802 fixup_fallthru_exit_predecessor ();
3803 fixup_reorder_chain ();
3804
3805 rebuild_jump_labels (get_insns ());
3806 delete_dead_jumptables ();
3807
3808#ifdef ENABLE_CHECKING
3809 verify_insn_chain ();
3810 verify_flow_info ();
3811#endif
3812}
3813
54f21e20 3814
1026363d 3815/* Same as split_block but update cfg_layout structures. */
5f5d4cd1 3816
3817static basic_block
4c9e08a4 3818cfg_layout_split_block (basic_block bb, void *insnp)
1026363d 3819{
f780cc25 3820 rtx insn = (rtx) insnp;
5f5d4cd1 3821 basic_block new_bb = rtl_split_block (bb, insn);
1026363d 3822
43e94e51 3823 BB_FOOTER (new_bb) = BB_FOOTER (bb);
3824 BB_FOOTER (bb) = NULL;
1026363d 3825
5f5d4cd1 3826 return new_bb;
1026363d 3827}
3828
1026363d 3829/* Redirect Edge to DEST. */
4ee9c684 3830static edge
4c9e08a4 3831cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
1026363d 3832{
3833 basic_block src = e->src;
4ee9c684 3834 edge ret;
1026363d 3835
c60fa3a7 3836 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4ee9c684 3837 return NULL;
c60fa3a7 3838
e5562ab8 3839 if (e->dest == dest)
4ee9c684 3840 return e;
c60fa3a7 3841
e5562ab8 3842 if (e->src != ENTRY_BLOCK_PTR
4ee9c684 3843 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
adee86a0 3844 {
3072d30e 3845 df_set_bb_dirty (src);
4ee9c684 3846 return ret;
adee86a0 3847 }
c60fa3a7 3848
3849 if (e->src == ENTRY_BLOCK_PTR
3850 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
3851 {
450d042a 3852 if (dump_file)
3853 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
c60fa3a7 3854 e->src->index, dest->index);
3855
3072d30e 3856 df_set_bb_dirty (e->src);
c60fa3a7 3857 redirect_edge_succ (e, dest);
4ee9c684 3858 return e;
c60fa3a7 3859 }
3860
1026363d 3861 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
3862 in the case the basic block appears to be in sequence. Avoid this
3863 transformation. */
3864
1026363d 3865 if (e->flags & EDGE_FALLTHRU)
3866 {
3867 /* Redirect any branch edges unified with the fallthru one. */
6d7dc5b9 3868 if (JUMP_P (BB_END (src))
b9de5542 3869 && label_is_jump_target_p (BB_HEAD (e->dest),
3870 BB_END (src)))
1026363d 3871 {
cc636d56 3872 edge redirected;
a0c938f0 3873
450d042a 3874 if (dump_file)
3875 fprintf (dump_file, "Fallthru edge unified with branch "
b9de5542 3876 "%i->%i redirected to %i\n",
3877 e->src->index, e->dest->index, dest->index);
3878 e->flags &= ~EDGE_FALLTHRU;
cc636d56 3879 redirected = redirect_branch_edge (e, dest);
3880 gcc_assert (redirected);
d5321455 3881 redirected->flags |= EDGE_FALLTHRU;
3882 df_set_bb_dirty (redirected->src);
3883 return redirected;
1026363d 3884 }
3885 /* In case we are redirecting fallthru edge to the branch edge
a0c938f0 3886 of conditional jump, remove it. */
cd665a06 3887 if (EDGE_COUNT (src->succs) == 2)
1026363d 3888 {
9ea57afb 3889 /* Find the edge that is different from E. */
3890 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
cd665a06 3891
1026363d 3892 if (s->dest == dest
5496dbfc 3893 && any_condjump_p (BB_END (src))
3894 && onlyjump_p (BB_END (src)))
3895 delete_insn (BB_END (src));
1026363d 3896 }
450d042a 3897 if (dump_file)
6d31b223 3898 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
c60fa3a7 3899 e->src->index, e->dest->index, dest->index);
d5321455 3900 ret = redirect_edge_succ_nodup (e, dest);
1026363d 3901 }
3902 else
c60fa3a7 3903 ret = redirect_branch_edge (e, dest);
1026363d 3904
3905 /* We don't want simplejumps in the insn stream during cfglayout. */
cc636d56 3906 gcc_assert (!simplejump_p (BB_END (src)));
1026363d 3907
3072d30e 3908 df_set_bb_dirty (src);
1026363d 3909 return ret;
3910}
3911
3912/* Simple wrapper as we always can redirect fallthru edges. */
3913static basic_block
4c9e08a4 3914cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
1026363d 3915{
cc636d56 3916 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
3917
3918 gcc_assert (redirected);
1026363d 3919 return NULL;
3920}
3921
5f5d4cd1 3922/* Same as delete_basic_block but update cfg_layout structures. */
3923
1026363d 3924static void
4c9e08a4 3925cfg_layout_delete_block (basic_block bb)
1026363d 3926{
5496dbfc 3927 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
1026363d 3928
43e94e51 3929 if (BB_HEADER (bb))
1026363d 3930 {
5496dbfc 3931 next = BB_HEAD (bb);
1026363d 3932 if (prev)
43e94e51 3933 NEXT_INSN (prev) = BB_HEADER (bb);
1026363d 3934 else
43e94e51 3935 set_first_insn (BB_HEADER (bb));
3936 PREV_INSN (BB_HEADER (bb)) = prev;
3937 insn = BB_HEADER (bb);
1026363d 3938 while (NEXT_INSN (insn))
3939 insn = NEXT_INSN (insn);
3940 NEXT_INSN (insn) = next;
3941 PREV_INSN (next) = insn;
3942 }
5496dbfc 3943 next = NEXT_INSN (BB_END (bb));
43e94e51 3944 if (BB_FOOTER (bb))
1026363d 3945 {
43e94e51 3946 insn = BB_FOOTER (bb);
c60fa3a7 3947 while (insn)
3948 {
6d7dc5b9 3949 if (BARRIER_P (insn))
c60fa3a7 3950 {
3951 if (PREV_INSN (insn))
3952 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
3953 else
43e94e51 3954 BB_FOOTER (bb) = NEXT_INSN (insn);
c60fa3a7 3955 if (NEXT_INSN (insn))
3956 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
3957 }
6d7dc5b9 3958 if (LABEL_P (insn))
c60fa3a7 3959 break;
3960 insn = NEXT_INSN (insn);
3961 }
43e94e51 3962 if (BB_FOOTER (bb))
c60fa3a7 3963 {
5496dbfc 3964 insn = BB_END (bb);
43e94e51 3965 NEXT_INSN (insn) = BB_FOOTER (bb);
3966 PREV_INSN (BB_FOOTER (bb)) = insn;
c60fa3a7 3967 while (NEXT_INSN (insn))
3968 insn = NEXT_INSN (insn);
3969 NEXT_INSN (insn) = next;
3970 if (next)
3971 PREV_INSN (next) = insn;
3972 else
3973 set_last_insn (insn);
3974 }
1026363d 3975 }
3976 if (bb->next_bb != EXIT_BLOCK_PTR)
43e94e51 3977 to = &BB_HEADER (bb->next_bb);
1026363d 3978 else
3979 to = &cfg_layout_function_footer;
7a22afab 3980
1026363d 3981 rtl_delete_block (bb);
3982
3983 if (prev)
3984 prev = NEXT_INSN (prev);
4c9e08a4 3985 else
1026363d 3986 prev = get_insns ();
3987 if (next)
3988 next = PREV_INSN (next);
4c9e08a4 3989 else
1026363d 3990 next = get_last_insn ();
3991
3992 if (next && NEXT_INSN (next) != prev)
3993 {
3994 remaints = unlink_insn_chain (prev, next);
3995 insn = remaints;
3996 while (NEXT_INSN (insn))
3997 insn = NEXT_INSN (insn);
3998 NEXT_INSN (insn) = *to;
3999 if (*to)
4000 PREV_INSN (*to) = insn;
4001 *to = remaints;
4002 }
4003}
4004
139c3f48 4005/* Return true when blocks A and B can be safely merged. */
47aaf6e6 4006
c60fa3a7 4007static bool
47aaf6e6 4008cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
c60fa3a7 4009{
4f18499c 4010 /* If we are partitioning hot/cold basic blocks, we don't want to
4011 mess up unconditional or indirect jumps that cross between hot
7562ed74 4012 and cold sections.
4013
1118aef7 4014 Basic block partitioning may result in some jumps that appear to
a0c938f0 4015 be optimizable (or blocks that appear to be mergeable), but which really
4016 must be left untouched (they are required to make it safely across
4017 partition boundaries). See the comments at the top of
1118aef7 4018 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4019
1897b881 4020 if (BB_PARTITION (a) != BB_PARTITION (b))
7562ed74 4021 return false;
4f18499c 4022
79f958cb 4023 /* Protect the loop latches. */
4024 if (current_loops && b->loop_father->latch == b)
4025 return false;
4026
d945554e 4027 /* If we would end up moving B's instructions, make sure it doesn't fall
4028 through into the exit block, since we cannot recover from a fallthrough
4029 edge into the exit block occurring in the middle of a function. */
4030 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4031 {
4032 edge e = find_fallthru_edge (b->succs);
4033 if (e && e->dest == EXIT_BLOCK_PTR)
4034 return false;
4035 }
4036
c60fa3a7 4037 /* There must be exactly one edge in between the blocks. */
ea091dfd 4038 return (single_succ_p (a)
4039 && single_succ (a) == b
4040 && single_pred_p (b) == 1
cd665a06 4041 && a != b
c60fa3a7 4042 /* Must be simple edge. */
ea091dfd 4043 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
c60fa3a7 4044 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
4aaec180 4045 /* If the jump insn has side effects, we can't kill the edge.
4046 When not optimizing, try_redirect_by_replacing_jump will
4047 not allow us to redirect an edge by replacing a table jump. */
6d7dc5b9 4048 && (!JUMP_P (BB_END (a))
4aaec180 4049 || ((!optimize || reload_completed)
5496dbfc 4050 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
c60fa3a7 4051}
4052
a53ff4c1 4053/* Merge block A and B. The blocks must be mergeable. */
4054
c60fa3a7 4055static void
4056cfg_layout_merge_blocks (basic_block a, basic_block b)
4057{
18b762f0 4058 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
d4afc00c 4059 rtx insn;
18b762f0 4060
1b4345f7 4061 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
c60fa3a7 4062
3072d30e 4063 if (dump_file)
18b762f0 4064 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4065 a->index);
3072d30e 4066
c60fa3a7 4067 /* If there was a CODE_LABEL beginning B, delete it. */
6d7dc5b9 4068 if (LABEL_P (BB_HEAD (b)))
7b07eae7 4069 {
7b07eae7 4070 delete_insn (BB_HEAD (b));
4071 }
c60fa3a7 4072
4073 /* We should have fallthru edge in a, or we can do dummy redirection to get
4074 it cleaned up. */
6d7dc5b9 4075 if (JUMP_P (BB_END (a)))
cd665a06 4076 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
cc636d56 4077 gcc_assert (!JUMP_P (BB_END (a)));
c60fa3a7 4078
8ddad41d 4079 /* When not optimizing CFG and the edge is the only place in RTL which holds
9c388755 4080 some unique locus, emit a nop with that locus in between. */
8ddad41d 4081 if (!optimize)
4082 emit_nop_for_unique_locus_between (a, b);
9c388755 4083
c60fa3a7 4084 /* Possible line number notes should appear in between. */
43e94e51 4085 if (BB_HEADER (b))
c60fa3a7 4086 {
5496dbfc 4087 rtx first = BB_END (a), last;
c60fa3a7 4088
43e94e51 4089 last = emit_insn_after_noloc (BB_HEADER (b), BB_END (a), a);
5a23e907 4090 /* The above might add a BARRIER as BB_END, but as barriers
4091 aren't valid parts of a bb, remove_insn doesn't update
4092 BB_END if it is a barrier. So adjust BB_END here. */
4093 while (BB_END (a) != first && BARRIER_P (BB_END (a)))
4094 BB_END (a) = PREV_INSN (BB_END (a));
39257eb5 4095 delete_insn_chain (NEXT_INSN (first), last, false);
43e94e51 4096 BB_HEADER (b) = NULL;
c60fa3a7 4097 }
4098
4099 /* In the case basic blocks are not adjacent, move them around. */
5496dbfc 4100 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
c60fa3a7 4101 {
d4afc00c 4102 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
f41d4683 4103
d4afc00c 4104 emit_insn_after_noloc (insn, BB_END (a), a);
c60fa3a7 4105 }
4106 /* Otherwise just re-associate the instructions. */
4107 else
4108 {
5496dbfc 4109 insn = BB_HEAD (b);
5496dbfc 4110 BB_END (a) = BB_END (b);
c60fa3a7 4111 }
4112
d4afc00c 4113 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4114 We need to explicitly call. */
4115 update_bb_for_insn_chain (insn, BB_END (b), a);
4116
4117 /* Skip possible DELETED_LABEL insn. */
4118 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4119 insn = NEXT_INSN (insn);
4120 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4121 BB_HEAD (b) = NULL;
4122 delete_insn (insn);
4123
3072d30e 4124 df_bb_delete (b->index);
4125
c60fa3a7 4126 /* Possible tablejumps and barriers should appear after the block. */
43e94e51 4127 if (BB_FOOTER (b))
c60fa3a7 4128 {
43e94e51 4129 if (!BB_FOOTER (a))
4130 BB_FOOTER (a) = BB_FOOTER (b);
c60fa3a7 4131 else
4132 {
43e94e51 4133 rtx last = BB_FOOTER (a);
c60fa3a7 4134
4135 while (NEXT_INSN (last))
4136 last = NEXT_INSN (last);
43e94e51 4137 NEXT_INSN (last) = BB_FOOTER (b);
4138 PREV_INSN (BB_FOOTER (b)) = last;
c60fa3a7 4139 }
43e94e51 4140 BB_FOOTER (b) = NULL;
c60fa3a7 4141 }
4142
18b762f0 4143 /* If B was a forwarder block, propagate the locus on the edge. */
8e7408e3 4144 if (forwarder_p
81ad108c 4145 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
18b762f0 4146 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4147
450d042a 4148 if (dump_file)
18b762f0 4149 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
c60fa3a7 4150}
4151
4152/* Split edge E. */
5f5d4cd1 4153
c60fa3a7 4154static basic_block
4155cfg_layout_split_edge (edge e)
4156{
c60fa3a7 4157 basic_block new_bb =
4158 create_basic_block (e->src != ENTRY_BLOCK_PTR
5496dbfc 4159 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
c60fa3a7 4160 NULL_RTX, e->src);
4161
e1b2b77c 4162 if (e->dest == EXIT_BLOCK_PTR)
4163 BB_COPY_PARTITION (new_bb, e->src);
4164 else
4165 BB_COPY_PARTITION (new_bb, e->dest);
3bdc4312 4166 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
c60fa3a7 4167 redirect_edge_and_branch_force (e, new_bb);
4168
4169 return new_bb;
4170}
4171
5f5d4cd1 4172/* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4173
4174static void
4175rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4176{
4177}
4178
9631926a 4179/* Return true if BB contains only labels or non-executable
4180 instructions. */
4181
4182static bool
4183rtl_block_empty_p (basic_block bb)
4184{
4185 rtx insn;
4186
4187 if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
4188 return true;
4189
4190 FOR_BB_INSNS (bb, insn)
4191 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4192 return false;
4193
4194 return true;
4195}
4196
4197/* Split a basic block if it ends with a conditional branch and if
4198 the other part of the block is not empty. */
4199
4200static basic_block
4201rtl_split_block_before_cond_jump (basic_block bb)
4202{
4203 rtx insn;
4204 rtx split_point = NULL;
4205 rtx last = NULL;
4206 bool found_code = false;
4207
4208 FOR_BB_INSNS (bb, insn)
4209 {
4210 if (any_condjump_p (insn))
4211 split_point = last;
4212 else if (NONDEBUG_INSN_P (insn))
4213 found_code = true;
4214 last = insn;
4215 }
4216
4217 /* Did not find everything. */
4218 if (found_code && split_point)
4219 return split_block (bb, split_point)->dest;
4220 else
4221 return NULL;
4222}
4223
4ee9c684 4224/* Return 1 if BB ends with a call, possibly followed by some
4225 instructions that must stay with the call, 0 otherwise. */
4226
4227static bool
47aaf6e6 4228rtl_block_ends_with_call_p (basic_block bb)
4ee9c684 4229{
4230 rtx insn = BB_END (bb);
4231
6d7dc5b9 4232 while (!CALL_P (insn)
4ee9c684 4233 && insn != BB_HEAD (bb)
f69dfa18 4234 && (keep_with_call_p (insn)
9845d120 4235 || NOTE_P (insn)
4236 || DEBUG_INSN_P (insn)))
4ee9c684 4237 insn = PREV_INSN (insn);
6d7dc5b9 4238 return (CALL_P (insn));
4ee9c684 4239}
4240
4241/* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4242
4243static bool
5493cb9a 4244rtl_block_ends_with_condjump_p (const_basic_block bb)
4ee9c684 4245{
4246 return any_condjump_p (BB_END (bb));
4247}
4248
4249/* Return true if we need to add fake edge to exit.
4250 Helper function for rtl_flow_call_edges_add. */
4251
4252static bool
5493cb9a 4253need_fake_edge_p (const_rtx insn)
4ee9c684 4254{
4255 if (!INSN_P (insn))
4256 return false;
4257
6d7dc5b9 4258 if ((CALL_P (insn)
4ee9c684 4259 && !SIBLING_CALL_P (insn)
4260 && !find_reg_note (insn, REG_NORETURN, NULL)
9c2a0c05 4261 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4ee9c684 4262 return true;
4263
4264 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4265 && MEM_VOLATILE_P (PATTERN (insn)))
4266 || (GET_CODE (PATTERN (insn)) == PARALLEL
4267 && asm_noperands (insn) != -1
4268 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4269 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4270}
4271
4272/* Add fake edges to the function exit for any non constant and non noreturn
4273 calls, volatile inline assembly in the bitmap of blocks specified by
4274 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4275 that were split.
4276
4277 The goal is to expose cases in which entering a basic block does not imply
4278 that all subsequent instructions must be executed. */
4279
4280static int
4281rtl_flow_call_edges_add (sbitmap blocks)
4282{
4283 int i;
4284 int blocks_split = 0;
4285 int last_bb = last_basic_block;
4286 bool check_last_block = false;
4287
4d2e5d52 4288 if (n_basic_blocks == NUM_FIXED_BLOCKS)
4ee9c684 4289 return 0;
4290
4291 if (! blocks)
4292 check_last_block = true;
4293 else
08b7917c 4294 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
4ee9c684 4295
4296 /* In the last basic block, before epilogue generation, there will be
4297 a fallthru edge to EXIT. Special care is required if the last insn
4298 of the last basic block is a call because make_edge folds duplicate
4299 edges, which would result in the fallthru edge also being marked
4300 fake, which would result in the fallthru edge being removed by
4301 remove_fake_edges, which would result in an invalid CFG.
4302
4303 Moreover, we can't elide the outgoing fake edge, since the block
4304 profiler needs to take this into account in order to solve the minimal
4305 spanning tree in the case that the call doesn't return.
4306
4307 Handle this by adding a dummy instruction in a new last basic block. */
4308 if (check_last_block)
4309 {
4310 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
4311 rtx insn = BB_END (bb);
4312
4313 /* Back up past insns that must be kept in the same block as a call. */
4314 while (insn != BB_HEAD (bb)
4315 && keep_with_call_p (insn))
4316 insn = PREV_INSN (insn);
4317
4318 if (need_fake_edge_p (insn))
4319 {
4320 edge e;
4321
c6356c17 4322 e = find_edge (bb, EXIT_BLOCK_PTR);
4323 if (e)
4324 {
18b42941 4325 insert_insn_on_edge (gen_use (const0_rtx), e);
c6356c17 4326 commit_edge_insertions ();
4327 }
4ee9c684 4328 }
4329 }
4330
4331 /* Now add fake edges to the function exit for any non constant
4332 calls since there is no way that we can determine if they will
4333 return or not... */
4334
4d2e5d52 4335 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4ee9c684 4336 {
4337 basic_block bb = BASIC_BLOCK (i);
4338 rtx insn;
4339 rtx prev_insn;
4340
4341 if (!bb)
4342 continue;
4343
08b7917c 4344 if (blocks && !bitmap_bit_p (blocks, i))
4ee9c684 4345 continue;
4346
4347 for (insn = BB_END (bb); ; insn = prev_insn)
4348 {
4349 prev_insn = PREV_INSN (insn);
4350 if (need_fake_edge_p (insn))
4351 {
4352 edge e;
4353 rtx split_at_insn = insn;
4354
4355 /* Don't split the block between a call and an insn that should
a0c938f0 4356 remain in the same block as the call. */
6d7dc5b9 4357 if (CALL_P (insn))
4ee9c684 4358 while (split_at_insn != BB_END (bb)
4359 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4360 split_at_insn = NEXT_INSN (split_at_insn);
4361
4362 /* The handling above of the final block before the epilogue
a0c938f0 4363 should be enough to verify that there is no edge to the exit
4ee9c684 4364 block in CFG already. Calling make_edge in such case would
4365 cause us to mark that edge as fake and remove it later. */
4366
4367#ifdef ENABLE_CHECKING
4368 if (split_at_insn == BB_END (bb))
cd665a06 4369 {
c6356c17 4370 e = find_edge (bb, EXIT_BLOCK_PTR);
4371 gcc_assert (e == NULL);
cd665a06 4372 }
4ee9c684 4373#endif
4374
4375 /* Note that the following may create a new basic block
4376 and renumber the existing basic blocks. */
4377 if (split_at_insn != BB_END (bb))
4378 {
4379 e = split_block (bb, split_at_insn);
4380 if (e)
4381 blocks_split++;
4382 }
4383
4384 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
4385 }
4386
4387 if (insn == BB_HEAD (bb))
4388 break;
4389 }
4390 }
4391
4392 if (blocks_split)
4393 verify_flow_info ();
4394
4395 return blocks_split;
4396}
4397
c50ae675 4398/* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
d90dbd3e 4399 the conditional branch target, SECOND_HEAD should be the fall-thru
c50ae675 4400 there is no need to handle this here the loop versioning code handles
4401 this. the reason for SECON_HEAD is that it is needed for condition
4402 in trees, and this should be of the same type since it is a hook. */
4403static void
4404rtl_lv_add_condition_to_bb (basic_block first_head ,
a0c938f0 4405 basic_block second_head ATTRIBUTE_UNUSED,
4406 basic_block cond_bb, void *comp_rtx)
c50ae675 4407{
4408 rtx label, seq, jump;
4409 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4410 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4411 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4412 enum machine_mode mode;
4413
4414
4415 label = block_label (first_head);
4416 mode = GET_MODE (op0);
4417 if (mode == VOIDmode)
4418 mode = GET_MODE (op1);
4419
4420 start_sequence ();
4421 op0 = force_operand (op0, NULL_RTX);
4422 op1 = force_operand (op1, NULL_RTX);
4423 do_compare_rtx_and_jump (op0, op1, comp, 0,
79ab74cc 4424 mode, NULL_RTX, NULL_RTX, label, -1);
c50ae675 4425 jump = get_last_insn ();
4426 JUMP_LABEL (jump) = label;
4427 LABEL_NUSES (label)++;
4428 seq = get_insns ();
4429 end_sequence ();
4430
4431 /* Add the new cond , in the new head. */
4432 emit_insn_after(seq, BB_END(cond_bb));
4433}
4434
4435
4436/* Given a block B with unconditional branch at its end, get the
4437 store the return the branch edge and the fall-thru edge in
4438 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4439static void
4440rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4441 edge *fallthru_edge)
4442{
4443 edge e = EDGE_SUCC (b, 0);
4444
4445 if (e->flags & EDGE_FALLTHRU)
4446 {
4447 *fallthru_edge = e;
4448 *branch_edge = EDGE_SUCC (b, 1);
4449 }
4450 else
4451 {
4452 *branch_edge = e;
4453 *fallthru_edge = EDGE_SUCC (b, 1);
4454 }
4455}
4456
e0dde8f8 4457void
4458init_rtl_bb_info (basic_block bb)
4459{
43e94e51 4460 gcc_assert (!bb->il.x.rtl);
4461 bb->il.x.head_ = NULL;
4462 bb->il.x.rtl = ggc_alloc_cleared_rtl_bb_info ();
e0dde8f8 4463}
4464
611d2ac1 4465/* Returns true if it is possible to remove edge E by redirecting
4466 it to the destination of the other edge from E->src. */
4467
4468static bool
5493cb9a 4469rtl_can_remove_branch_p (const_edge e)
611d2ac1 4470{
5493cb9a 4471 const_basic_block src = e->src;
4472 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
4473 const_rtx insn = BB_END (src), set;
611d2ac1 4474
4475 /* The conditions are taken from try_redirect_by_replacing_jump. */
4476 if (target == EXIT_BLOCK_PTR)
4477 return false;
4478
4479 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4480 return false;
4481
4482 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
4483 || BB_PARTITION (src) != BB_PARTITION (target))
4484 return false;
4485
4486 if (!onlyjump_p (insn)
4487 || tablejump_p (insn, NULL, NULL))
4488 return false;
4489
4490 set = single_set (insn);
4491 if (!set || side_effects_p (set))
4492 return false;
4493
4494 return true;
4495}
4496
0a55d497 4497static basic_block
4498rtl_duplicate_bb (basic_block bb)
4499{
4500 bb = cfg_layout_duplicate_bb (bb);
4501 bb->aux = NULL;
4502 return bb;
4503}
4504
98193482 4505/* Do book-keeping of basic block BB for the profile consistency checker.
4506 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
4507 then do post-pass accounting. Store the counting in RECORD. */
4508static void
4509rtl_account_profile_record (basic_block bb, int after_pass,
4510 struct profile_record *record)
4511{
4512 rtx insn;
4513 FOR_BB_INSNS (bb, insn)
4514 if (INSN_P (insn))
4515 {
4516 record->size[after_pass]
4517 += insn_rtx_cost (PATTERN (insn), false);
4518 if (profile_status == PROFILE_READ)
4519 record->time[after_pass]
4520 += insn_rtx_cost (PATTERN (insn), true) * bb->count;
4521 else if (profile_status == PROFILE_GUESSED)
4522 record->time[after_pass]
4523 += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
4524 }
4525}
4526
1026363d 4527/* Implementation of CFG manipulation for linearized RTL. */
4528struct cfg_hooks rtl_cfg_hooks = {
5f5d4cd1 4529 "rtl",
1026363d 4530 rtl_verify_flow_info,
028f8cc7 4531 rtl_dump_bb,
e079344a 4532 rtl_dump_bb_for_graph,
c60fa3a7 4533 rtl_create_basic_block,
1026363d 4534 rtl_redirect_edge_and_branch,
4535 rtl_redirect_edge_and_branch_force,
611d2ac1 4536 rtl_can_remove_branch_p,
1026363d 4537 rtl_delete_block,
4538 rtl_split_block,
5f5d4cd1 4539 rtl_move_block_after,
c60fa3a7 4540 rtl_can_merge_blocks, /* can_merge_blocks_p */
4541 rtl_merge_blocks,
4ee9c684 4542 rtl_predict_edge,
4543 rtl_predicted_by_p,
0a55d497 4544 cfg_layout_can_duplicate_bb_p,
4545 rtl_duplicate_bb,
5f5d4cd1 4546 rtl_split_edge,
4547 rtl_make_forwarder_block,
4ee9c684 4548 rtl_tidy_fallthru_edge,
202bbc06 4549 rtl_force_nonfallthru,
4ee9c684 4550 rtl_block_ends_with_call_p,
4551 rtl_block_ends_with_condjump_p,
26b12c91 4552 rtl_flow_call_edges_add,
4553 NULL, /* execute_on_growing_pred */
c50ae675 4554 NULL, /* execute_on_shrinking_pred */
4555 NULL, /* duplicate loop for trees */
4556 NULL, /* lv_add_condition_to_bb */
4557 NULL, /* lv_adjust_loop_header_phi*/
4558 NULL, /* extract_cond_bb_edges */
9631926a 4559 NULL, /* flush_pending_stmts */
4560 rtl_block_empty_p, /* block_empty_p */
4561 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
98193482 4562 rtl_account_profile_record,
1026363d 4563};
4564
4565/* Implementation of CFG manipulation for cfg layout RTL, where
4566 basic block connected via fallthru edges does not have to be adjacent.
4567 This representation will hopefully become the default one in future
4568 version of the compiler. */
4ee9c684 4569
1026363d 4570struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5f5d4cd1 4571 "cfglayout mode",
c60fa3a7 4572 rtl_verify_flow_info_1,
028f8cc7 4573 rtl_dump_bb,
e079344a 4574 rtl_dump_bb_for_graph,
c60fa3a7 4575 cfg_layout_create_basic_block,
1026363d 4576 cfg_layout_redirect_edge_and_branch,
4577 cfg_layout_redirect_edge_and_branch_force,
611d2ac1 4578 rtl_can_remove_branch_p,
1026363d 4579 cfg_layout_delete_block,
4580 cfg_layout_split_block,
5f5d4cd1 4581 rtl_move_block_after,
c60fa3a7 4582 cfg_layout_can_merge_blocks_p,
4583 cfg_layout_merge_blocks,
4ee9c684 4584 rtl_predict_edge,
4585 rtl_predicted_by_p,
4586 cfg_layout_can_duplicate_bb_p,
4587 cfg_layout_duplicate_bb,
5f5d4cd1 4588 cfg_layout_split_edge,
4589 rtl_make_forwarder_block,
202bbc06 4590 NULL, /* tidy_fallthru_edge */
4591 rtl_force_nonfallthru,
4ee9c684 4592 rtl_block_ends_with_call_p,
4593 rtl_block_ends_with_condjump_p,
26b12c91 4594 rtl_flow_call_edges_add,
4595 NULL, /* execute_on_growing_pred */
c50ae675 4596 NULL, /* execute_on_shrinking_pred */
4597 duplicate_loop_to_header_edge, /* duplicate loop for trees */
4598 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
4599 NULL, /* lv_adjust_loop_header_phi*/
4600 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
9631926a 4601 NULL, /* flush_pending_stmts */
4602 rtl_block_empty_p, /* block_empty_p */
4603 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
98193482 4604 rtl_account_profile_record,
1026363d 4605};
23a070f3 4606
4607#include "gt-cfgrtl.h"