]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgrtl.c
re PR middle-end/30696 (Linker failure with OpenMP and inline function)
[thirdparty/gcc.git] / gcc / cfgrtl.c
CommitLineData
ca6c03ca
JH
1/* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
4ab80063 3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
ca6c03ca
JH
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
19Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2002110-1301, USA. */
ca6c03ca 21
5f0d2358
RK
22/* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
ca6c03ca
JH
24
25 Available functionality:
bc35512f 26 - Basic CFG/RTL manipulation API documented in cfghooks.h
5f0d2358 27 - CFG-aware instruction chain manipulation
ca6c03ca 28 delete_insn, delete_insn_chain
bc35512f
JH
29 - Edge splitting and committing to edges
30 insert_insn_on_edge, commit_edge_insertions
31 - CFG updating after insn simplification
32 purge_dead_edges, purge_all_dead_edges
33
34 Functions not supposed for generic use:
5f0d2358 35 - Infrastructure to determine quickly basic block for insn
ca6c03ca 36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
5f0d2358 37 - Edge redirection with updating and optimizing of insn chain
bc35512f 38 block_label, tidy_fallthru_edge, force_nonfallthru */
ca6c03ca
JH
39\f
40#include "config.h"
41#include "system.h"
4977bab6
ZW
42#include "coretypes.h"
43#include "tm.h"
ca6c03ca
JH
44#include "tree.h"
45#include "rtl.h"
46#include "hard-reg-set.h"
47#include "basic-block.h"
48#include "regs.h"
49#include "flags.h"
50#include "output.h"
51#include "function.h"
52#include "except.h"
53#include "toplev.h"
54#include "tm_p.h"
55#include "obstack.h"
0a2ed1f1 56#include "insn-config.h"
9ee634e3 57#include "cfglayout.h"
ff25ef99 58#include "expr.h"
9fb32434 59#include "target.h"
1cb7dfc3 60#include "cfgloop.h"
5e2d947c 61#include "ggc.h"
ef330312 62#include "tree-pass.h"
ca6c03ca 63
d329e058
AJ
64static int can_delete_note_p (rtx);
65static int can_delete_label_p (rtx);
2ac66157 66static void commit_one_edge_insertion (edge);
d329e058 67static basic_block rtl_split_edge (edge);
f470c378 68static bool rtl_move_block_after (basic_block, basic_block);
d329e058 69static int rtl_verify_flow_info (void);
f470c378 70static basic_block cfg_layout_split_block (basic_block, void *);
6de9cd9a 71static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
d329e058
AJ
72static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
73static void cfg_layout_delete_block (basic_block);
74static void rtl_delete_block (basic_block);
75static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
6de9cd9a 76static edge rtl_redirect_edge_and_branch (edge, basic_block);
f470c378
ZD
77static basic_block rtl_split_block (basic_block, void *);
78static void rtl_dump_bb (basic_block, FILE *, int);
d329e058 79static int rtl_verify_flow_info_1 (void);
f470c378 80static void rtl_make_forwarder_block (edge);
ca6c03ca
JH
81\f
82/* Return true if NOTE is not one of the ones that must be kept paired,
5f0d2358 83 so that we may simply delete it. */
ca6c03ca
JH
84
85static int
d329e058 86can_delete_note_p (rtx note)
ca6c03ca
JH
87{
88 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
87c8b4be 89 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
ca6c03ca
JH
90}
91
92/* True if a given label can be deleted. */
93
94static int
d329e058 95can_delete_label_p (rtx label)
ca6c03ca 96{
5f0d2358
RK
97 return (!LABEL_PRESERVE_P (label)
98 /* User declared labels must be preserved. */
99 && LABEL_NAME (label) == 0
610d2478 100 && !in_expr_list_p (forced_labels, label));
ca6c03ca
JH
101}
102
103/* Delete INSN by patching it out. Return the next insn. */
104
105rtx
d329e058 106delete_insn (rtx insn)
ca6c03ca
JH
107{
108 rtx next = NEXT_INSN (insn);
109 rtx note;
110 bool really_delete = true;
111
4b4bf941 112 if (LABEL_P (insn))
ca6c03ca
JH
113 {
114 /* Some labels can't be directly removed from the INSN chain, as they
c22cacf3
MS
115 might be references via variables, constant pool etc.
116 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
ca6c03ca
JH
117 if (! can_delete_label_p (insn))
118 {
119 const char *name = LABEL_NAME (insn);
120
121 really_delete = false;
122 PUT_CODE (insn, NOTE);
123 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
6773e15f 124 NOTE_DELETED_LABEL_NAME (insn) = name;
ca6c03ca 125 }
5f0d2358 126
ca6c03ca
JH
127 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
128 }
129
130 if (really_delete)
131 {
cda94cbb 132 /* If this insn has already been deleted, something is very wrong. */
341c100f 133 gcc_assert (!INSN_DELETED_P (insn));
ca6c03ca
JH
134 remove_insn (insn);
135 INSN_DELETED_P (insn) = 1;
136 }
137
138 /* If deleting a jump, decrement the use count of the label. Deleting
139 the label itself should happen in the normal course of block merging. */
4b4bf941 140 if (JUMP_P (insn)
ca6c03ca 141 && JUMP_LABEL (insn)
4b4bf941 142 && LABEL_P (JUMP_LABEL (insn)))
ca6c03ca
JH
143 LABEL_NUSES (JUMP_LABEL (insn))--;
144
145 /* Also if deleting an insn that references a label. */
9295a326
JZ
146 else
147 {
148 while ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
4b4bf941 149 && LABEL_P (XEXP (note, 0)))
9295a326
JZ
150 {
151 LABEL_NUSES (XEXP (note, 0))--;
152 remove_note (insn, note);
153 }
154 }
ca6c03ca 155
4b4bf941 156 if (JUMP_P (insn)
ca6c03ca
JH
157 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
158 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
159 {
160 rtx pat = PATTERN (insn);
161 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
162 int len = XVECLEN (pat, diff_vec_p);
163 int i;
164
165 for (i = 0; i < len; i++)
a124fcda
RH
166 {
167 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
168
169 /* When deleting code in bulk (e.g. removing many unreachable
170 blocks) we can delete a label that's a target of the vector
171 before deleting the vector itself. */
4b4bf941 172 if (!NOTE_P (label))
a124fcda
RH
173 LABEL_NUSES (label)--;
174 }
ca6c03ca
JH
175 }
176
177 return next;
178}
179
3dec4024
JH
180/* Like delete_insn but also purge dead edges from BB. */
181rtx
d329e058 182delete_insn_and_edges (rtx insn)
3dec4024
JH
183{
184 rtx x;
185 bool purge = false;
186
ba4f7968 187 if (INSN_P (insn)
3dec4024 188 && BLOCK_FOR_INSN (insn)
a813c111 189 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
3dec4024
JH
190 purge = true;
191 x = delete_insn (insn);
192 if (purge)
193 purge_dead_edges (BLOCK_FOR_INSN (insn));
194 return x;
195}
196
ca6c03ca
JH
197/* Unlink a chain of insns between START and FINISH, leaving notes
198 that must be paired. */
199
200void
d329e058 201delete_insn_chain (rtx start, rtx finish)
ca6c03ca 202{
ca6c03ca
JH
203 rtx next;
204
5f0d2358
RK
205 /* Unchain the insns one by one. It would be quicker to delete all of these
206 with a single unchaining, rather than one at a time, but we need to keep
207 the NOTE's. */
ca6c03ca
JH
208 while (1)
209 {
210 next = NEXT_INSN (start);
4b4bf941 211 if (NOTE_P (start) && !can_delete_note_p (start))
ca6c03ca
JH
212 ;
213 else
214 next = delete_insn (start);
215
216 if (start == finish)
217 break;
218 start = next;
219 }
220}
3dec4024
JH
221
222/* Like delete_insn but also purge dead edges from BB. */
223void
d329e058 224delete_insn_chain_and_edges (rtx first, rtx last)
3dec4024
JH
225{
226 bool purge = false;
227
ba4f7968 228 if (INSN_P (last)
3dec4024 229 && BLOCK_FOR_INSN (last)
a813c111 230 && BB_END (BLOCK_FOR_INSN (last)) == last)
3dec4024
JH
231 purge = true;
232 delete_insn_chain (first, last);
233 if (purge)
234 purge_dead_edges (BLOCK_FOR_INSN (last));
235}
ca6c03ca 236\f
5f0d2358
RK
237/* Create a new basic block consisting of the instructions between HEAD and END
238 inclusive. This function is designed to allow fast BB construction - reuses
239 the note and basic block struct in BB_NOTE, if any and do not grow
240 BASIC_BLOCK chain and should be used directly only by CFG construction code.
241 END can be NULL in to create new empty basic block before HEAD. Both END
918ed612
ZD
242 and HEAD can be NULL to create basic block at the end of INSN chain.
243 AFTER is the basic block we should be put after. */
ca6c03ca
JH
244
245basic_block
d329e058 246create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
ca6c03ca
JH
247{
248 basic_block bb;
249
250 if (bb_note
ca6c03ca
JH
251 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
252 && bb->aux == NULL)
253 {
254 /* If we found an existing note, thread it back onto the chain. */
255
256 rtx after;
257
4b4bf941 258 if (LABEL_P (head))
ca6c03ca
JH
259 after = head;
260 else
261 {
262 after = PREV_INSN (head);
263 head = bb_note;
264 }
265
266 if (after != bb_note && NEXT_INSN (after) != bb_note)
ba4f7968 267 reorder_insns_nobb (bb_note, bb_note, after);
ca6c03ca
JH
268 }
269 else
270 {
271 /* Otherwise we must create a note and a basic block structure. */
272
273 bb = alloc_block ();
274
5e2d947c 275 init_rtl_bb_info (bb);
ca6c03ca 276 if (!head && !end)
5f0d2358
RK
277 head = end = bb_note
278 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
4b4bf941 279 else if (LABEL_P (head) && end)
ca6c03ca
JH
280 {
281 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
282 if (head == end)
283 end = bb_note;
284 }
285 else
286 {
287 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
288 head = bb_note;
289 if (!end)
290 end = head;
291 }
5f0d2358 292
ca6c03ca
JH
293 NOTE_BASIC_BLOCK (bb_note) = bb;
294 }
295
296 /* Always include the bb note in the block. */
297 if (NEXT_INSN (end) == bb_note)
298 end = bb_note;
299
a813c111
SB
300 BB_HEAD (bb) = head;
301 BB_END (bb) = end;
852c6ec7 302 bb->index = last_basic_block++;
5e2d947c 303 bb->flags = BB_NEW | BB_RTL;
918ed612 304 link_block (bb, after);
68f9b844 305 SET_BASIC_BLOCK (bb->index, bb);
ba4f7968 306 update_bb_for_insn (bb);
076c7ab8 307 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
ca6c03ca
JH
308
309 /* Tag the block so that we know it has been used when considering
310 other basic block notes. */
311 bb->aux = bb;
312
313 return bb;
314}
315
5f0d2358 316/* Create new basic block consisting of instructions in between HEAD and END
918ed612 317 and place it to the BB chain after block AFTER. END can be NULL in to
5f0d2358
RK
318 create new empty basic block before HEAD. Both END and HEAD can be NULL to
319 create basic block at the end of INSN chain. */
ca6c03ca 320
bc35512f
JH
321static basic_block
322rtl_create_basic_block (void *headp, void *endp, basic_block after)
ca6c03ca 323{
bc35512f 324 rtx head = headp, end = endp;
ca6c03ca 325 basic_block bb;
0b17ab2f 326
7eca0767 327 /* Grow the basic block array if needed. */
68f9b844 328 if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
7eca0767
JH
329 {
330 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
a590ac65 331 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
7eca0767 332 }
0b17ab2f 333
bf77398c 334 n_basic_blocks++;
ca6c03ca 335
852c6ec7 336 bb = create_basic_block_structure (head, end, NULL, after);
ca6c03ca
JH
337 bb->aux = NULL;
338 return bb;
339}
bc35512f
JH
340
341static basic_block
342cfg_layout_create_basic_block (void *head, void *end, basic_block after)
343{
344 basic_block newbb = rtl_create_basic_block (head, end, after);
345
bc35512f
JH
346 return newbb;
347}
ca6c03ca
JH
348\f
349/* Delete the insns in a (non-live) block. We physically delete every
350 non-deleted-note insn, and update the flow graph appropriately.
351
352 Return nonzero if we deleted an exception handler. */
353
354/* ??? Preserving all such notes strikes me as wrong. It would be nice
355 to post-process the stream to remove empty blocks, loops, ranges, etc. */
356
f0fda11c 357static void
d329e058 358rtl_delete_block (basic_block b)
ca6c03ca 359{
96370780 360 rtx insn, end;
ca6c03ca
JH
361
362 /* If the head of this block is a CODE_LABEL, then it might be the
f39e46ba
SB
363 label for an exception handler which can't be reached. We need
364 to remove the label from the exception_handler_label list. */
a813c111 365 insn = BB_HEAD (b);
4b4bf941 366 if (LABEL_P (insn))
ca6c03ca
JH
367 maybe_remove_eh_handler (insn);
368
96370780 369 end = get_last_bb_insn (b);
ca6c03ca
JH
370
371 /* Selectively delete the entire chain. */
a813c111 372 BB_HEAD (b) = NULL;
ca6c03ca 373 delete_insn_chain (insn, end);
370adb7b
JH
374 if (b->il.rtl->global_live_at_start)
375 {
376 FREE_REG_SET (b->il.rtl->global_live_at_start);
377 FREE_REG_SET (b->il.rtl->global_live_at_end);
378 b->il.rtl->global_live_at_start = NULL;
379 b->il.rtl->global_live_at_end = NULL;
380 }
ca6c03ca
JH
381}
382\f
852c6ec7 383/* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
ca6c03ca
JH
384
385void
d329e058 386compute_bb_for_insn (void)
ca6c03ca 387{
e0082a72 388 basic_block bb;
ca6c03ca 389
e0082a72 390 FOR_EACH_BB (bb)
ca6c03ca 391 {
a813c111 392 rtx end = BB_END (bb);
5f0d2358 393 rtx insn;
ca6c03ca 394
a813c111 395 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
ca6c03ca 396 {
ba4f7968 397 BLOCK_FOR_INSN (insn) = bb;
ca6c03ca
JH
398 if (insn == end)
399 break;
ca6c03ca
JH
400 }
401 }
402}
403
404/* Release the basic_block_for_insn array. */
405
c2924966 406unsigned int
d329e058 407free_bb_for_insn (void)
ca6c03ca 408{
ba4f7968
JH
409 rtx insn;
410 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 411 if (!BARRIER_P (insn))
ba4f7968 412 BLOCK_FOR_INSN (insn) = NULL;
c2924966 413 return 0;
ca6c03ca
JH
414}
415
ef330312
PB
416struct tree_opt_pass pass_free_cfg =
417{
418 NULL, /* name */
419 NULL, /* gate */
420 free_bb_for_insn, /* execute */
421 NULL, /* sub */
422 NULL, /* next */
423 0, /* static_pass_number */
424 0, /* tv_id */
425 0, /* properties_required */
426 0, /* properties_provided */
427 PROP_cfg, /* properties_destroyed */
428 0, /* todo_flags_start */
429 0, /* todo_flags_finish */
430 0 /* letter */
431};
432
91278841
AP
433/* Return RTX to emit after when we want to emit code on the entry of function. */
434rtx
435entry_of_function (void)
436{
c22cacf3 437 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
24bd1a0b 438 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
91278841
AP
439}
440
11b904a1
BS
441/* Emit INSN at the entry point of the function, ensuring that it is only
442 executed once per function. */
443void
444emit_insn_at_entry (rtx insn)
445{
446 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
447 edge e = ei_safe_edge (ei);
5419bc7f 448 gcc_assert (e->flags & EDGE_FALLTHRU);
11b904a1
BS
449
450 insert_insn_on_edge (insn, e);
451 commit_edge_insertions ();
452}
453
ca6c03ca
JH
454/* Update insns block within BB. */
455
456void
d329e058 457update_bb_for_insn (basic_block bb)
ca6c03ca
JH
458{
459 rtx insn;
460
a813c111 461 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
ca6c03ca 462 {
4b4bf941 463 if (!BARRIER_P (insn))
bcc53e2a 464 set_block_for_insn (insn, bb);
a813c111 465 if (insn == BB_END (bb))
ca6c03ca
JH
466 break;
467 }
468}
ca6c03ca 469\f
f470c378
ZD
470/* Creates a new basic block just after basic block B by splitting
471 everything after specified instruction I. */
ca6c03ca 472
f470c378 473static basic_block
d329e058 474rtl_split_block (basic_block bb, void *insnp)
ca6c03ca
JH
475{
476 basic_block new_bb;
9ee634e3 477 rtx insn = insnp;
f470c378 478 edge e;
628f6a4e 479 edge_iterator ei;
ca6c03ca 480
f470c378
ZD
481 if (!insn)
482 {
483 insn = first_insn_after_basic_block_note (bb);
484
485 if (insn)
486 insn = PREV_INSN (insn);
487 else
488 insn = get_last_insn ();
489 }
490
491 /* We probably should check type of the insn so that we do not create
492 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
493 bother. */
494 if (insn == BB_END (bb))
495 emit_note_after (NOTE_INSN_DELETED, insn);
ca6c03ca
JH
496
497 /* Create the new basic block. */
a813c111 498 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
076c7ab8 499 BB_COPY_PARTITION (new_bb, bb);
a813c111 500 BB_END (bb) = insn;
ca6c03ca
JH
501
502 /* Redirect the outgoing edges. */
628f6a4e
BE
503 new_bb->succs = bb->succs;
504 bb->succs = NULL;
505 FOR_EACH_EDGE (e, ei, new_bb->succs)
ca6c03ca
JH
506 e->src = new_bb;
507
5e2d947c 508 if (bb->il.rtl->global_live_at_start)
ca6c03ca 509 {
5e2d947c
JH
510 new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
511 new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
512 COPY_REG_SET (new_bb->il.rtl->global_live_at_end, bb->il.rtl->global_live_at_end);
ca6c03ca
JH
513
514 /* We now have to calculate which registers are live at the end
515 of the split basic block and at the start of the new basic
516 block. Start with those registers that are known to be live
517 at the end of the original basic block and get
518 propagate_block to determine which registers are live. */
5e2d947c
JH
519 COPY_REG_SET (new_bb->il.rtl->global_live_at_start, bb->il.rtl->global_live_at_end);
520 propagate_block (new_bb, new_bb->il.rtl->global_live_at_start, NULL, NULL, 0);
521 COPY_REG_SET (bb->il.rtl->global_live_at_end,
522 new_bb->il.rtl->global_live_at_start);
0a2ed1f1
JH
523#ifdef HAVE_conditional_execution
524 /* In the presence of conditional execution we are not able to update
525 liveness precisely. */
526 if (reload_completed)
527 {
528 bb->flags |= BB_DIRTY;
529 new_bb->flags |= BB_DIRTY;
530 }
531#endif
ca6c03ca
JH
532 }
533
f470c378 534 return new_bb;
bc35512f
JH
535}
536
ca6c03ca 537/* Blocks A and B are to be merged into a single block A. The insns
bc35512f 538 are already contiguous. */
ca6c03ca 539
bc35512f
JH
540static void
541rtl_merge_blocks (basic_block a, basic_block b)
ca6c03ca 542{
a813c111 543 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
ca6c03ca
JH
544 rtx del_first = NULL_RTX, del_last = NULL_RTX;
545 int b_empty = 0;
546
547 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 548 if (LABEL_P (b_head))
ca6c03ca 549 {
2c97f8e4
RH
550 /* This might have been an EH label that no longer has incoming
551 EH edges. Update data structures to match. */
552 maybe_remove_eh_handler (b_head);
c22cacf3 553
ca6c03ca
JH
554 /* Detect basic blocks with nothing but a label. This can happen
555 in particular at the end of a function. */
556 if (b_head == b_end)
557 b_empty = 1;
5f0d2358 558
ca6c03ca
JH
559 del_first = del_last = b_head;
560 b_head = NEXT_INSN (b_head);
561 }
562
5f0d2358
RK
563 /* Delete the basic block note and handle blocks containing just that
564 note. */
ca6c03ca
JH
565 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
566 {
567 if (b_head == b_end)
568 b_empty = 1;
569 if (! del_last)
570 del_first = b_head;
5f0d2358 571
ca6c03ca
JH
572 del_last = b_head;
573 b_head = NEXT_INSN (b_head);
574 }
575
576 /* If there was a jump out of A, delete it. */
4b4bf941 577 if (JUMP_P (a_end))
ca6c03ca
JH
578 {
579 rtx prev;
580
581 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
4b4bf941 582 if (!NOTE_P (prev)
ca6c03ca 583 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
a813c111 584 || prev == BB_HEAD (a))
ca6c03ca
JH
585 break;
586
587 del_first = a_end;
588
589#ifdef HAVE_cc0
590 /* If this was a conditional jump, we need to also delete
591 the insn that set cc0. */
592 if (only_sets_cc0_p (prev))
593 {
594 rtx tmp = prev;
5f0d2358 595
ca6c03ca
JH
596 prev = prev_nonnote_insn (prev);
597 if (!prev)
a813c111 598 prev = BB_HEAD (a);
ca6c03ca
JH
599 del_first = tmp;
600 }
601#endif
602
603 a_end = PREV_INSN (del_first);
604 }
4b4bf941 605 else if (BARRIER_P (NEXT_INSN (a_end)))
ca6c03ca
JH
606 del_first = NEXT_INSN (a_end);
607
ca6c03ca
JH
608 /* Delete everything marked above as well as crap that might be
609 hanging out between the two blocks. */
f470c378 610 BB_HEAD (b) = NULL;
ca6c03ca
JH
611 delete_insn_chain (del_first, del_last);
612
613 /* Reassociate the insns of B with A. */
614 if (!b_empty)
615 {
ba4f7968 616 rtx x;
5f0d2358 617
ba4f7968
JH
618 for (x = a_end; x != b_end; x = NEXT_INSN (x))
619 set_block_for_insn (x, a);
5f0d2358 620
ba4f7968 621 set_block_for_insn (b_end, a);
5f0d2358 622
ca6c03ca
JH
623 a_end = b_end;
624 }
5f0d2358 625
a813c111 626 BB_END (a) = a_end;
5e2d947c 627 a->il.rtl->global_live_at_end = b->il.rtl->global_live_at_end;
ca6c03ca 628}
bc35512f
JH
629
630/* Return true when block A and B can be merged. */
631static bool
632rtl_can_merge_blocks (basic_block a,basic_block b)
633{
750054a2
CT
634 /* If we are partitioning hot/cold basic blocks, we don't want to
635 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
636 and cold sections.
637
8e8d5162 638 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
639 be optimizable (or blocks that appear to be mergeable), but which really
640 must be left untouched (they are required to make it safely across
641 partition boundaries). See the comments at the top of
8e8d5162 642 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
076c7ab8 643
87c8b4be 644 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 645 return false;
750054a2 646
bc35512f 647 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
648 return (single_succ_p (a)
649 && single_succ (a) == b
650 && single_pred_p (b)
628f6a4e 651 && a != b
bc35512f 652 /* Must be simple edge. */
c5cbcccf 653 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
bc35512f
JH
654 && a->next_bb == b
655 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
656 /* If the jump insn has side effects,
657 we can't kill the edge. */
4b4bf941 658 && (!JUMP_P (BB_END (a))
e24e7211 659 || (reload_completed
a813c111 660 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f 661}
ca6c03ca 662\f
5f0d2358
RK
663/* Return the label in the head of basic block BLOCK. Create one if it doesn't
664 exist. */
ca6c03ca
JH
665
666rtx
d329e058 667block_label (basic_block block)
ca6c03ca
JH
668{
669 if (block == EXIT_BLOCK_PTR)
670 return NULL_RTX;
5f0d2358 671
4b4bf941 672 if (!LABEL_P (BB_HEAD (block)))
ca6c03ca 673 {
a813c111 674 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
ca6c03ca 675 }
5f0d2358 676
a813c111 677 return BB_HEAD (block);
ca6c03ca
JH
678}
679
680/* Attempt to perform edge redirection by replacing possibly complex jump
5f0d2358
RK
681 instruction by unconditional jump or removing jump completely. This can
682 apply only if all edges now point to the same block. The parameters and
683 return values are equivalent to redirect_edge_and_branch. */
ca6c03ca 684
6de9cd9a 685edge
bc35512f 686try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
ca6c03ca
JH
687{
688 basic_block src = e->src;
a813c111 689 rtx insn = BB_END (src), kill_from;
e1233a7d 690 rtx set;
ca6c03ca 691 int fallthru = 0;
750054a2
CT
692
693 /* If we are partitioning hot/cold basic blocks, we don't want to
694 mess up unconditional or indirect jumps that cross between hot
8e8d5162
CT
695 and cold sections.
696
697 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
698 be optimizable (or blocks that appear to be mergeable), but which really
699 must be left untouched (they are required to make it safely across
700 partition boundaries). See the comments at the top of
8e8d5162 701 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
c22cacf3 702
87c8b4be
CT
703 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
704 || BB_PARTITION (src) != BB_PARTITION (target))
9cf84a3c 705 return NULL;
750054a2 706
6a66a8a7
KH
707 /* We can replace or remove a complex jump only when we have exactly
708 two edges. Also, if we have exactly one outgoing edge, we can
709 redirect that. */
710 if (EDGE_COUNT (src->succs) >= 3
711 /* Verify that all targets will be TARGET. Specifically, the
712 edge that is not E must also go to TARGET. */
713 || (EDGE_COUNT (src->succs) == 2
714 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
715 return NULL;
5f0d2358 716
6a66a8a7 717 if (!onlyjump_p (insn))
6de9cd9a 718 return NULL;
3348b696 719 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
6de9cd9a 720 return NULL;
ca6c03ca
JH
721
722 /* Avoid removing branch with side effects. */
723 set = single_set (insn);
724 if (!set || side_effects_p (set))
6de9cd9a 725 return NULL;
ca6c03ca
JH
726
727 /* In case we zap a conditional jump, we'll need to kill
728 the cc0 setter too. */
729 kill_from = insn;
730#ifdef HAVE_cc0
9caea4a7
RS
731 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
732 && only_sets_cc0_p (PREV_INSN (insn)))
ca6c03ca
JH
733 kill_from = PREV_INSN (insn);
734#endif
735
736 /* See if we can create the fallthru edge. */
bc35512f 737 if (in_cfglayout || can_fallthru (src, target))
ca6c03ca 738 {
c263766c
RH
739 if (dump_file)
740 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
ca6c03ca
JH
741 fallthru = 1;
742
eaec9b3d 743 /* Selectively unlink whole insn chain. */
bc35512f
JH
744 if (in_cfglayout)
745 {
370369e1 746 rtx insn = src->il.rtl->footer;
bc35512f 747
c22cacf3 748 delete_insn_chain (kill_from, BB_END (src));
bc35512f
JH
749
750 /* Remove barriers but keep jumptables. */
751 while (insn)
752 {
4b4bf941 753 if (BARRIER_P (insn))
bc35512f
JH
754 {
755 if (PREV_INSN (insn))
756 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
757 else
370369e1 758 src->il.rtl->footer = NEXT_INSN (insn);
bc35512f
JH
759 if (NEXT_INSN (insn))
760 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
761 }
4b4bf941 762 if (LABEL_P (insn))
bc35512f
JH
763 break;
764 insn = NEXT_INSN (insn);
765 }
766 }
767 else
c22cacf3 768 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)));
ca6c03ca 769 }
5f0d2358 770
ca6c03ca
JH
771 /* If this already is simplejump, redirect it. */
772 else if (simplejump_p (insn))
773 {
774 if (e->dest == target)
6de9cd9a 775 return NULL;
c263766c
RH
776 if (dump_file)
777 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
0b17ab2f 778 INSN_UID (insn), e->dest->index, target->index);
6ee3c8e4
JJ
779 if (!redirect_jump (insn, block_label (target), 0))
780 {
341c100f
NS
781 gcc_assert (target == EXIT_BLOCK_PTR);
782 return NULL;
6ee3c8e4 783 }
ca6c03ca 784 }
5f0d2358 785
6ee3c8e4
JJ
786 /* Cannot do anything for target exit block. */
787 else if (target == EXIT_BLOCK_PTR)
6de9cd9a 788 return NULL;
6ee3c8e4 789
ca6c03ca
JH
790 /* Or replace possibly complicated jump insn by simple jump insn. */
791 else
792 {
793 rtx target_label = block_label (target);
eb5b8ad4 794 rtx barrier, label, table;
ca6c03ca 795
a7102479 796 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
a813c111 797 JUMP_LABEL (BB_END (src)) = target_label;
ca6c03ca 798 LABEL_NUSES (target_label)++;
c263766c
RH
799 if (dump_file)
800 fprintf (dump_file, "Replacing insn %i by jump %i\n",
a813c111 801 INSN_UID (insn), INSN_UID (BB_END (src)));
ca6c03ca 802
4da2eb6b 803
ca6c03ca
JH
804 delete_insn_chain (kill_from, insn);
805
4da2eb6b
RH
806 /* Recognize a tablejump that we are converting to a
807 simple jump and remove its associated CODE_LABEL
808 and ADDR_VEC or ADDR_DIFF_VEC. */
3348b696 809 if (tablejump_p (insn, &label, &table))
4da2eb6b 810 delete_insn_chain (label, table);
eb5b8ad4 811
a813c111 812 barrier = next_nonnote_insn (BB_END (src));
4b4bf941 813 if (!barrier || !BARRIER_P (barrier))
a813c111 814 emit_barrier_after (BB_END (src));
5d693491
JZ
815 else
816 {
a813c111 817 if (barrier != NEXT_INSN (BB_END (src)))
5d693491
JZ
818 {
819 /* Move the jump before barrier so that the notes
820 which originally were or were created before jump table are
821 inside the basic block. */
a813c111 822 rtx new_insn = BB_END (src);
5d693491
JZ
823 rtx tmp;
824
a813c111 825 for (tmp = NEXT_INSN (BB_END (src)); tmp != barrier;
5d693491
JZ
826 tmp = NEXT_INSN (tmp))
827 set_block_for_insn (tmp, src);
828
829 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
830 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
831
832 NEXT_INSN (new_insn) = barrier;
833 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
834
835 PREV_INSN (new_insn) = PREV_INSN (barrier);
836 PREV_INSN (barrier) = new_insn;
837 }
838 }
ca6c03ca
JH
839 }
840
841 /* Keep only one edge out and set proper flags. */
c5cbcccf 842 if (!single_succ_p (src))
628f6a4e 843 remove_edge (e);
c5cbcccf 844 gcc_assert (single_succ_p (src));
628f6a4e 845
c5cbcccf 846 e = single_succ_edge (src);
ca6c03ca
JH
847 if (fallthru)
848 e->flags = EDGE_FALLTHRU;
849 else
850 e->flags = 0;
5f0d2358 851
ca6c03ca
JH
852 e->probability = REG_BR_PROB_BASE;
853 e->count = src->count;
854
ca6c03ca
JH
855 if (e->dest != target)
856 redirect_edge_succ (e, target);
5f0d2358 857
6de9cd9a 858 return e;
ca6c03ca
JH
859}
860
6de9cd9a
DN
861/* Redirect edge representing branch of (un)conditional jump or tablejump,
862 NULL on failure */
863static edge
bc35512f 864redirect_branch_edge (edge e, basic_block target)
ca6c03ca
JH
865{
866 rtx tmp;
a813c111 867 rtx old_label = BB_HEAD (e->dest);
ca6c03ca 868 basic_block src = e->src;
a813c111 869 rtx insn = BB_END (src);
ca6c03ca 870
ca6c03ca
JH
871 /* We can only redirect non-fallthru edges of jump insn. */
872 if (e->flags & EDGE_FALLTHRU)
6de9cd9a 873 return NULL;
4b4bf941 874 else if (!JUMP_P (insn))
6de9cd9a 875 return NULL;
ca6c03ca
JH
876
877 /* Recognize a tablejump and adjust all matching cases. */
e1233a7d 878 if (tablejump_p (insn, NULL, &tmp))
ca6c03ca
JH
879 {
880 rtvec vec;
881 int j;
882 rtx new_label = block_label (target);
883
6ee3c8e4 884 if (target == EXIT_BLOCK_PTR)
6de9cd9a 885 return NULL;
ca6c03ca
JH
886 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
887 vec = XVEC (PATTERN (tmp), 0);
888 else
889 vec = XVEC (PATTERN (tmp), 1);
890
891 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
892 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
893 {
894 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
895 --LABEL_NUSES (old_label);
896 ++LABEL_NUSES (new_label);
897 }
898
f9da5064 899 /* Handle casesi dispatch insns. */
ca6c03ca
JH
900 if ((tmp = single_set (insn)) != NULL
901 && SET_DEST (tmp) == pc_rtx
902 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
903 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
904 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
905 {
4c33cb26 906 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
ca6c03ca
JH
907 new_label);
908 --LABEL_NUSES (old_label);
909 ++LABEL_NUSES (new_label);
910 }
911 }
912 else
913 {
914 /* ?? We may play the games with moving the named labels from
915 one basic block to the other in case only one computed_jump is
916 available. */
5f0d2358
RK
917 if (computed_jump_p (insn)
918 /* A return instruction can't be redirected. */
919 || returnjump_p (insn))
6de9cd9a 920 return NULL;
ca6c03ca
JH
921
922 /* If the insn doesn't go where we think, we're confused. */
341c100f 923 gcc_assert (JUMP_LABEL (insn) == old_label);
6ee3c8e4
JJ
924
925 /* If the substitution doesn't succeed, die. This can happen
926 if the back end emitted unrecognizable instructions or if
927 target is exit block on some arches. */
928 if (!redirect_jump (insn, block_label (target), 0))
929 {
341c100f
NS
930 gcc_assert (target == EXIT_BLOCK_PTR);
931 return NULL;
6ee3c8e4 932 }
ca6c03ca
JH
933 }
934
c263766c
RH
935 if (dump_file)
936 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
0b17ab2f 937 e->src->index, e->dest->index, target->index);
5f0d2358 938
ca6c03ca 939 if (e->dest != target)
6de9cd9a
DN
940 e = redirect_edge_succ_nodup (e, target);
941 return e;
bc35512f
JH
942}
943
944/* Attempt to change code to redirect edge E to TARGET. Don't do that on
945 expense of adding new instructions or reordering basic blocks.
946
947 Function can be also called with edge destination equivalent to the TARGET.
948 Then it should try the simplifications and do nothing if none is possible.
949
6de9cd9a
DN
950 Return edge representing the branch if transformation succeeded. Return NULL
951 on failure.
952 We still return NULL in case E already destinated TARGET and we didn't
953 managed to simplify instruction stream. */
bc35512f 954
6de9cd9a 955static edge
5671bf27 956rtl_redirect_edge_and_branch (edge e, basic_block target)
bc35512f 957{
6de9cd9a 958 edge ret;
f345f21a
JH
959 basic_block src = e->src;
960
bc35512f 961 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 962 return NULL;
bc35512f 963
3348b696 964 if (e->dest == target)
6de9cd9a 965 return e;
3348b696 966
6de9cd9a 967 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
f345f21a
JH
968 {
969 src->flags |= BB_DIRTY;
6de9cd9a 970 return ret;
f345f21a 971 }
bc35512f 972
6de9cd9a
DN
973 ret = redirect_branch_edge (e, target);
974 if (!ret)
975 return NULL;
5f0d2358 976
f345f21a 977 src->flags |= BB_DIRTY;
6de9cd9a 978 return ret;
ca6c03ca
JH
979}
980
4fe9b91c 981/* Like force_nonfallthru below, but additionally performs redirection
ca6c03ca
JH
982 Used by redirect_edge_and_branch_force. */
983
9167e1c0 984static basic_block
d329e058 985force_nonfallthru_and_redirect (edge e, basic_block target)
ca6c03ca 986{
a3716585 987 basic_block jump_block, new_bb = NULL, src = e->src;
ca6c03ca
JH
988 rtx note;
989 edge new_edge;
a3716585 990 int abnormal_edge_flags = 0;
ca6c03ca 991
cb9a1d9b
JH
992 /* In the case the last instruction is conditional jump to the next
993 instruction, first redirect the jump itself and then continue
b20b352b 994 by creating a basic block afterwards to redirect fallthru edge. */
cb9a1d9b 995 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
a813c111 996 && any_condjump_p (BB_END (e->src))
a813c111 997 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
cb9a1d9b
JH
998 {
999 rtx note;
58e6ae30 1000 edge b = unchecked_make_edge (e->src, target, 0);
341c100f 1001 bool redirected;
cb9a1d9b 1002
341c100f
NS
1003 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1004 gcc_assert (redirected);
c22cacf3 1005
a813c111 1006 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
cb9a1d9b
JH
1007 if (note)
1008 {
1009 int prob = INTVAL (XEXP (note, 0));
1010
1011 b->probability = prob;
1012 b->count = e->count * prob / REG_BR_PROB_BASE;
1013 e->probability -= e->probability;
1014 e->count -= b->count;
1015 if (e->probability < 0)
1016 e->probability = 0;
1017 if (e->count < 0)
1018 e->count = 0;
1019 }
1020 }
1021
ca6c03ca 1022 if (e->flags & EDGE_ABNORMAL)
a3716585
JH
1023 {
1024 /* Irritating special case - fallthru edge to the same block as abnormal
1025 edge.
1026 We can't redirect abnormal edge, but we still can split the fallthru
d329e058 1027 one and create separate abnormal edge to original destination.
a3716585 1028 This allows bb-reorder to make such edge non-fallthru. */
341c100f 1029 gcc_assert (e->dest == target);
a3716585
JH
1030 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1031 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1032 }
341c100f 1033 else
24c545ff 1034 {
341c100f
NS
1035 gcc_assert (e->flags & EDGE_FALLTHRU);
1036 if (e->src == ENTRY_BLOCK_PTR)
1037 {
1038 /* We can't redirect the entry block. Create an empty block
628f6a4e
BE
1039 at the start of the function which we use to add the new
1040 jump. */
1041 edge tmp;
1042 edge_iterator ei;
1043 bool found = false;
c22cacf3 1044
628f6a4e 1045 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
c22cacf3 1046
341c100f
NS
1047 /* Change the existing edge's source to be the new block, and add
1048 a new edge from the entry block to the new block. */
1049 e->src = bb;
628f6a4e
BE
1050 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1051 {
1052 if (tmp == e)
1053 {
865851d0 1054 VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
628f6a4e
BE
1055 found = true;
1056 break;
1057 }
1058 else
1059 ei_next (&ei);
1060 }
c22cacf3 1061
628f6a4e 1062 gcc_assert (found);
c22cacf3 1063
d4e6fecb 1064 VEC_safe_push (edge, gc, bb->succs, e);
341c100f
NS
1065 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1066 }
24c545ff
BS
1067 }
1068
628f6a4e 1069 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags)
ca6c03ca
JH
1070 {
1071 /* Create the new structures. */
31a78298 1072
79019985
RH
1073 /* If the old block ended with a tablejump, skip its table
1074 by searching forward from there. Otherwise start searching
1075 forward from the last instruction of the old block. */
a813c111
SB
1076 if (!tablejump_p (BB_END (e->src), NULL, &note))
1077 note = BB_END (e->src);
31a78298
RH
1078 note = NEXT_INSN (note);
1079
31a78298 1080 jump_block = create_basic_block (note, NULL, e->src);
ca6c03ca
JH
1081 jump_block->count = e->count;
1082 jump_block->frequency = EDGE_FREQUENCY (e);
1083 jump_block->loop_depth = target->loop_depth;
1084
5e2d947c 1085 if (target->il.rtl->global_live_at_start)
ca6c03ca 1086 {
5e2d947c
JH
1087 jump_block->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
1088 jump_block->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1089 COPY_REG_SET (jump_block->il.rtl->global_live_at_start,
1090 target->il.rtl->global_live_at_start);
1091 COPY_REG_SET (jump_block->il.rtl->global_live_at_end,
1092 target->il.rtl->global_live_at_start);
ca6c03ca
JH
1093 }
1094
750054a2
CT
1095 /* Make sure new block ends up in correct hot/cold section. */
1096
076c7ab8 1097 BB_COPY_PARTITION (jump_block, e->src);
9fb32434 1098 if (flag_reorder_blocks_and_partition
87c8b4be
CT
1099 && targetm.have_named_sections
1100 && JUMP_P (BB_END (jump_block))
1101 && !any_condjump_p (BB_END (jump_block))
1102 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1103 REG_NOTES (BB_END (jump_block)) = gen_rtx_EXPR_LIST (REG_CROSSING_JUMP,
1104 NULL_RTX,
1105 REG_NOTES
1106 (BB_END
c22cacf3
MS
1107 (jump_block)));
1108
ca6c03ca
JH
1109 /* Wire edge in. */
1110 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1111 new_edge->probability = e->probability;
1112 new_edge->count = e->count;
1113
1114 /* Redirect old edge. */
1115 redirect_edge_pred (e, jump_block);
1116 e->probability = REG_BR_PROB_BASE;
1117
1118 new_bb = jump_block;
1119 }
1120 else
1121 jump_block = e->src;
5f0d2358 1122
ca6c03ca
JH
1123 e->flags &= ~EDGE_FALLTHRU;
1124 if (target == EXIT_BLOCK_PTR)
1125 {
cf22ce3c 1126#ifdef HAVE_return
a7102479 1127 emit_jump_insn_after_noloc (gen_return (), BB_END (jump_block));
cf22ce3c 1128#else
341c100f 1129 gcc_unreachable ();
cf22ce3c 1130#endif
ca6c03ca
JH
1131 }
1132 else
1133 {
1134 rtx label = block_label (target);
a7102479 1135 emit_jump_insn_after_noloc (gen_jump (label), BB_END (jump_block));
a813c111 1136 JUMP_LABEL (BB_END (jump_block)) = label;
ca6c03ca
JH
1137 LABEL_NUSES (label)++;
1138 }
5f0d2358 1139
a813c111 1140 emit_barrier_after (BB_END (jump_block));
ca6c03ca
JH
1141 redirect_edge_succ_nodup (e, target);
1142
a3716585
JH
1143 if (abnormal_edge_flags)
1144 make_edge (src, target, abnormal_edge_flags);
1145
ca6c03ca
JH
1146 return new_bb;
1147}
1148
1149/* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1150 (and possibly create new basic block) to make edge non-fallthru.
1151 Return newly created BB or NULL if none. */
5f0d2358 1152
ca6c03ca 1153basic_block
d329e058 1154force_nonfallthru (edge e)
ca6c03ca
JH
1155{
1156 return force_nonfallthru_and_redirect (e, e->dest);
1157}
1158
1159/* Redirect edge even at the expense of creating new jump insn or
1160 basic block. Return new basic block if created, NULL otherwise.
41806d92 1161 Conversion must be possible. */
ca6c03ca 1162
9ee634e3 1163static basic_block
d329e058 1164rtl_redirect_edge_and_branch_force (edge e, basic_block target)
ca6c03ca 1165{
5f0d2358
RK
1166 if (redirect_edge_and_branch (e, target)
1167 || e->dest == target)
ca6c03ca
JH
1168 return NULL;
1169
1170 /* In case the edge redirection failed, try to force it to be non-fallthru
1171 and redirect newly created simplejump. */
c0c5d392 1172 e->src->flags |= BB_DIRTY;
5f0d2358 1173 return force_nonfallthru_and_redirect (e, target);
ca6c03ca
JH
1174}
1175
1176/* The given edge should potentially be a fallthru edge. If that is in
1177 fact true, delete the jump and barriers that are in the way. */
1178
f470c378
ZD
1179static void
1180rtl_tidy_fallthru_edge (edge e)
ca6c03ca
JH
1181{
1182 rtx q;
f470c378
ZD
1183 basic_block b = e->src, c = b->next_bb;
1184
ca6c03ca
JH
1185 /* ??? In a late-running flow pass, other folks may have deleted basic
1186 blocks by nopping out blocks, leaving multiple BARRIERs between here
0fa2e4df 1187 and the target label. They ought to be chastised and fixed.
ca6c03ca
JH
1188
1189 We can also wind up with a sequence of undeletable labels between
1190 one block and the next.
1191
1192 So search through a sequence of barriers, labels, and notes for
1193 the head of block C and assert that we really do fall through. */
1194
a813c111 1195 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
9c0a0632
RH
1196 if (INSN_P (q))
1197 return;
ca6c03ca
JH
1198
1199 /* Remove what will soon cease being the jump insn from the source block.
1200 If block B consisted only of this single jump, turn it into a deleted
1201 note. */
a813c111 1202 q = BB_END (b);
4b4bf941 1203 if (JUMP_P (q)
ca6c03ca
JH
1204 && onlyjump_p (q)
1205 && (any_uncondjump_p (q)
c5cbcccf 1206 || single_succ_p (b)))
ca6c03ca
JH
1207 {
1208#ifdef HAVE_cc0
1209 /* If this was a conditional jump, we need to also delete
1210 the insn that set cc0. */
1211 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1212 q = PREV_INSN (q);
1213#endif
1214
1215 q = PREV_INSN (q);
ca6c03ca
JH
1216 }
1217
1218 /* Selectively unlink the sequence. */
a813c111
SB
1219 if (q != PREV_INSN (BB_HEAD (c)))
1220 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)));
ca6c03ca
JH
1221
1222 e->flags |= EDGE_FALLTHRU;
1223}
ca6c03ca 1224\f
f470c378
ZD
1225/* Should move basic block BB after basic block AFTER. NIY. */
1226
1227static bool
1228rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1229 basic_block after ATTRIBUTE_UNUSED)
1230{
1231 return false;
1232}
1233
ca6c03ca 1234/* Split a (typically critical) edge. Return the new block.
41806d92 1235 The edge must not be abnormal.
ca6c03ca
JH
1236
1237 ??? The code generally expects to be called on critical edges.
1238 The case of a block ending in an unconditional jump to a
1239 block with multiple predecessors is not handled optimally. */
1240
8ce33230 1241static basic_block
d329e058 1242rtl_split_edge (edge edge_in)
ca6c03ca
JH
1243{
1244 basic_block bb;
ca6c03ca
JH
1245 rtx before;
1246
1247 /* Abnormal edges cannot be split. */
341c100f 1248 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
ca6c03ca
JH
1249
1250 /* We are going to place the new block in front of edge destination.
eaec9b3d 1251 Avoid existence of fallthru predecessors. */
ca6c03ca
JH
1252 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1253 {
1254 edge e;
628f6a4e 1255 edge_iterator ei;
5f0d2358 1256
628f6a4e 1257 FOR_EACH_EDGE (e, ei, edge_in->dest->preds)
ca6c03ca
JH
1258 if (e->flags & EDGE_FALLTHRU)
1259 break;
1260
1261 if (e)
1262 force_nonfallthru (e);
1263 }
1264
96e82e0a
ZD
1265 /* Create the basic block note. */
1266 if (edge_in->dest != EXIT_BLOCK_PTR)
a813c111 1267 before = BB_HEAD (edge_in->dest);
ca6c03ca
JH
1268 else
1269 before = NULL_RTX;
1270
623a66fa
R
1271 /* If this is a fall through edge to the exit block, the blocks might be
1272 not adjacent, and the right place is the after the source. */
1273 if (edge_in->flags & EDGE_FALLTHRU && edge_in->dest == EXIT_BLOCK_PTR)
1274 {
1275 before = NEXT_INSN (BB_END (edge_in->src));
623a66fa 1276 bb = create_basic_block (before, NULL, edge_in->src);
076c7ab8 1277 BB_COPY_PARTITION (bb, edge_in->src);
623a66fa
R
1278 }
1279 else
9fb32434
CT
1280 {
1281 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
076c7ab8
ZW
1282 /* ??? Why not edge_in->dest->prev_bb here? */
1283 BB_COPY_PARTITION (bb, edge_in->dest);
9fb32434 1284 }
ca6c03ca
JH
1285
1286 /* ??? This info is likely going to be out of date very soon. */
5e2d947c 1287 if (edge_in->dest->il.rtl->global_live_at_start)
ca6c03ca 1288 {
5e2d947c
JH
1289 bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
1290 bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1291 COPY_REG_SET (bb->il.rtl->global_live_at_start,
1292 edge_in->dest->il.rtl->global_live_at_start);
1293 COPY_REG_SET (bb->il.rtl->global_live_at_end,
1294 edge_in->dest->il.rtl->global_live_at_start);
ca6c03ca
JH
1295 }
1296
4977bab6 1297 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
ca6c03ca 1298
4d6922ee 1299 /* For non-fallthru edges, we must adjust the predecessor's
ca6c03ca
JH
1300 jump instruction to target our new block. */
1301 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1302 {
341c100f
NS
1303 edge redirected = redirect_edge_and_branch (edge_in, bb);
1304 gcc_assert (redirected);
ca6c03ca
JH
1305 }
1306 else
1307 redirect_edge_succ (edge_in, bb);
1308
1309 return bb;
1310}
1311
1312/* Queue instructions for insertion on an edge between two basic blocks.
1313 The new instructions and basic blocks (if any) will not appear in the
1314 CFG until commit_edge_insertions is called. */
1315
1316void
d329e058 1317insert_insn_on_edge (rtx pattern, edge e)
ca6c03ca
JH
1318{
1319 /* We cannot insert instructions on an abnormal critical edge.
1320 It will be easier to find the culprit if we die now. */
341c100f 1321 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
ca6c03ca 1322
6de9cd9a 1323 if (e->insns.r == NULL_RTX)
ca6c03ca
JH
1324 start_sequence ();
1325 else
6de9cd9a 1326 push_to_sequence (e->insns.r);
ca6c03ca
JH
1327
1328 emit_insn (pattern);
1329
6de9cd9a 1330 e->insns.r = get_insns ();
ca6c03ca
JH
1331 end_sequence ();
1332}
1333
1334/* Update the CFG for the instructions queued on edge E. */
1335
1336static void
2ac66157 1337commit_one_edge_insertion (edge e)
ca6c03ca
JH
1338{
1339 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
eae4bc56 1340 basic_block bb = NULL;
ca6c03ca
JH
1341
1342 /* Pull the insns off the edge now since the edge might go away. */
6de9cd9a
DN
1343 insns = e->insns.r;
1344 e->insns.r = NULL_RTX;
ca6c03ca 1345
3dec4024 1346 if (!before && !after)
ca6c03ca 1347 {
3dec4024 1348 /* Figure out where to put these things. If the destination has
c22cacf3 1349 one predecessor, insert there. Except for the exit block. */
c5cbcccf 1350 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
ca6c03ca 1351 {
3dec4024
JH
1352 bb = e->dest;
1353
1354 /* Get the location correct wrt a code label, and "nice" wrt
1355 a basic block note, and before everything else. */
a813c111 1356 tmp = BB_HEAD (bb);
4b4bf941 1357 if (LABEL_P (tmp))
3dec4024
JH
1358 tmp = NEXT_INSN (tmp);
1359 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1360 tmp = NEXT_INSN (tmp);
a813c111 1361 if (tmp == BB_HEAD (bb))
3dec4024
JH
1362 before = tmp;
1363 else if (tmp)
1364 after = PREV_INSN (tmp);
1365 else
1366 after = get_last_insn ();
1367 }
1368
1369 /* If the source has one successor and the edge is not abnormal,
c22cacf3 1370 insert there. Except for the entry block. */
3dec4024 1371 else if ((e->flags & EDGE_ABNORMAL) == 0
c5cbcccf 1372 && single_succ_p (e->src)
3dec4024
JH
1373 && e->src != ENTRY_BLOCK_PTR)
1374 {
1375 bb = e->src;
1376
1377 /* It is possible to have a non-simple jump here. Consider a target
1378 where some forms of unconditional jumps clobber a register. This
1379 happens on the fr30 for example.
1380
1381 We know this block has a single successor, so we can just emit
1382 the queued insns before the jump. */
4b4bf941 1383 if (JUMP_P (BB_END (bb)))
96e82e0a 1384 before = BB_END (bb);
3dec4024
JH
1385 else
1386 {
341c100f
NS
1387 /* We'd better be fallthru, or we've lost track of
1388 what's what. */
1389 gcc_assert (e->flags & EDGE_FALLTHRU);
ca6c03ca 1390
a813c111 1391 after = BB_END (bb);
3dec4024
JH
1392 }
1393 }
1394 /* Otherwise we must split the edge. */
1395 else
1396 {
1397 bb = split_edge (e);
a813c111 1398 after = BB_END (bb);
750054a2 1399
750054a2 1400 if (flag_reorder_blocks_and_partition
9fb32434 1401 && targetm.have_named_sections
750054a2 1402 && e->src != ENTRY_BLOCK_PTR
076c7ab8 1403 && BB_PARTITION (e->src) == BB_COLD_PARTITION
bd454efd 1404 && !(e->flags & EDGE_CROSSING))
750054a2 1405 {
87c8b4be 1406 rtx bb_note, cur_insn;
750054a2
CT
1407
1408 bb_note = NULL_RTX;
1409 for (cur_insn = BB_HEAD (bb); cur_insn != NEXT_INSN (BB_END (bb));
1410 cur_insn = NEXT_INSN (cur_insn))
4b4bf941 1411 if (NOTE_P (cur_insn)
750054a2
CT
1412 && NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_BASIC_BLOCK)
1413 {
1414 bb_note = cur_insn;
1415 break;
1416 }
1417
4b4bf941 1418 if (JUMP_P (BB_END (bb))
750054a2 1419 && !any_condjump_p (BB_END (bb))
c22cacf3
MS
1420 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1421 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
750054a2 1422 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
750054a2 1423 }
ca6c03ca
JH
1424 }
1425 }
1426
ca6c03ca
JH
1427 /* Now that we've found the spot, do the insertion. */
1428
1429 if (before)
1430 {
a7102479 1431 emit_insn_before_noloc (insns, before);
ca6c03ca
JH
1432 last = prev_nonnote_insn (before);
1433 }
1434 else
a7102479 1435 last = emit_insn_after_noloc (insns, after);
ca6c03ca
JH
1436
1437 if (returnjump_p (last))
1438 {
1439 /* ??? Remove all outgoing edges from BB and add one for EXIT.
c22cacf3
MS
1440 This is not currently a problem because this only happens
1441 for the (single) epilogue, which already has a fallthru edge
1442 to EXIT. */
ca6c03ca 1443
c5cbcccf 1444 e = single_succ_edge (bb);
341c100f 1445 gcc_assert (e->dest == EXIT_BLOCK_PTR
c5cbcccf 1446 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
ca6c03ca 1447
5f0d2358 1448 e->flags &= ~EDGE_FALLTHRU;
ca6c03ca 1449 emit_barrier_after (last);
0b17ab2f 1450
ca6c03ca
JH
1451 if (before)
1452 delete_insn (before);
1453 }
341c100f
NS
1454 else
1455 gcc_assert (!JUMP_P (last));
5f0d2358 1456
12eff7b7 1457 /* Mark the basic block for find_many_sub_basic_blocks. */
05549c96
SB
1458 if (current_ir_type () != IR_RTL_CFGLAYOUT)
1459 bb->aux = &bb->aux;
ca6c03ca
JH
1460}
1461
1462/* Update the CFG for all queued instructions. */
1463
1464void
d329e058 1465commit_edge_insertions (void)
ca6c03ca 1466{
ca6c03ca 1467 basic_block bb;
9dca2ad5 1468 sbitmap blocks;
9809a362 1469 bool changed = false;
ca6c03ca
JH
1470
1471#ifdef ENABLE_CHECKING
1472 verify_flow_info ();
1473#endif
1474
e0082a72 1475 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
ca6c03ca 1476 {
628f6a4e
BE
1477 edge e;
1478 edge_iterator ei;
ca6c03ca 1479
628f6a4e
BE
1480 FOR_EACH_EDGE (e, ei, bb->succs)
1481 if (e->insns.r)
1482 {
1483 changed = true;
2ac66157 1484 commit_one_edge_insertion (e);
628f6a4e 1485 }
3dec4024 1486 }
9dca2ad5 1487
9809a362
JH
1488 if (!changed)
1489 return;
1490
05549c96
SB
1491 /* In the old rtl CFG API, it was OK to insert control flow on an
1492 edge, apparently? In cfglayout mode, this will *not* work, and
1493 the caller is responsible for making sure that control flow is
1494 valid at all times. */
1495 if (current_ir_type () == IR_RTL_CFGLAYOUT)
1496 return;
1497
9809a362
JH
1498 blocks = sbitmap_alloc (last_basic_block);
1499 sbitmap_zero (blocks);
1500 FOR_EACH_BB (bb)
1501 if (bb->aux)
1502 {
c22cacf3 1503 SET_BIT (blocks, bb->index);
9809a362
JH
1504 /* Check for forgotten bb->aux values before commit_edge_insertions
1505 call. */
341c100f 1506 gcc_assert (bb->aux == &bb->aux);
9809a362
JH
1507 bb->aux = NULL;
1508 }
1509 find_many_sub_basic_blocks (blocks);
1510 sbitmap_free (blocks);
ca6c03ca
JH
1511}
1512\f
f470c378
ZD
1513/* Print out RTL-specific basic block information (live information
1514 at start and end). */
ca6c03ca 1515
10e9fecc 1516static void
f470c378 1517rtl_dump_bb (basic_block bb, FILE *outf, int indent)
ca6c03ca
JH
1518{
1519 rtx insn;
1520 rtx last;
f470c378 1521 char *s_indent;
ca6c03ca 1522
400e39e3
KH
1523 s_indent = alloca ((size_t) indent + 1);
1524 memset (s_indent, ' ', (size_t) indent);
f470c378
ZD
1525 s_indent[indent] = '\0';
1526
1527 fprintf (outf, ";;%s Registers live at start: ", s_indent);
5e2d947c 1528 dump_regset (bb->il.rtl->global_live_at_start, outf);
ca6c03ca
JH
1529 putc ('\n', outf);
1530
a813c111 1531 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
ca6c03ca
JH
1532 insn = NEXT_INSN (insn))
1533 print_rtl_single (outf, insn);
1534
f470c378 1535 fprintf (outf, ";;%s Registers live at end: ", s_indent);
5e2d947c 1536 dump_regset (bb->il.rtl->global_live_at_end, outf);
ca6c03ca 1537 putc ('\n', outf);
ca6c03ca
JH
1538}
1539\f
1540/* Like print_rtl, but also print out live information for the start of each
1541 basic block. */
1542
1543void
d329e058 1544print_rtl_with_bb (FILE *outf, rtx rtx_first)
ca6c03ca 1545{
b3694847 1546 rtx tmp_rtx;
ca6c03ca
JH
1547
1548 if (rtx_first == 0)
1549 fprintf (outf, "(nil)\n");
1550 else
1551 {
ca6c03ca
JH
1552 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1553 int max_uid = get_max_uid ();
5ed6ace5
MD
1554 basic_block *start = XCNEWVEC (basic_block, max_uid);
1555 basic_block *end = XCNEWVEC (basic_block, max_uid);
1556 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
ca6c03ca 1557
e0082a72
ZD
1558 basic_block bb;
1559
1560 FOR_EACH_BB_REVERSE (bb)
ca6c03ca 1561 {
ca6c03ca
JH
1562 rtx x;
1563
a813c111
SB
1564 start[INSN_UID (BB_HEAD (bb))] = bb;
1565 end[INSN_UID (BB_END (bb))] = bb;
1566 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
ca6c03ca
JH
1567 {
1568 enum bb_state state = IN_MULTIPLE_BB;
5f0d2358 1569
ca6c03ca
JH
1570 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1571 state = IN_ONE_BB;
1572 in_bb_p[INSN_UID (x)] = state;
1573
a813c111 1574 if (x == BB_END (bb))
ca6c03ca
JH
1575 break;
1576 }
1577 }
1578
1579 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1580 {
1581 int did_output;
e9ec5c6b
SB
1582 edge_iterator ei;
1583 edge e;
ca6c03ca
JH
1584
1585 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1586 {
1587 fprintf (outf, ";; Start of basic block %d, registers live:",
0b17ab2f 1588 bb->index);
5e2d947c 1589 dump_regset (bb->il.rtl->global_live_at_start, outf);
ca6c03ca 1590 putc ('\n', outf);
e9ec5c6b
SB
1591 FOR_EACH_EDGE (e, ei, bb->preds)
1592 {
1593 fputs (";; Pred edge ", outf);
1594 dump_edge_info (outf, e, 0);
1595 fputc ('\n', outf);
1596 }
ca6c03ca
JH
1597 }
1598
1599 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
4b4bf941
JQ
1600 && !NOTE_P (tmp_rtx)
1601 && !BARRIER_P (tmp_rtx))
ca6c03ca
JH
1602 fprintf (outf, ";; Insn is not within a basic block\n");
1603 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1604 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1605
1606 did_output = print_rtl_single (outf, tmp_rtx);
1607
1608 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1609 {
e9ec5c6b 1610 fprintf (outf, ";; End of basic block %d, registers live:",
0b17ab2f 1611 bb->index);
5e2d947c 1612 dump_regset (bb->il.rtl->global_live_at_end, outf);
ca6c03ca 1613 putc ('\n', outf);
e9ec5c6b
SB
1614 FOR_EACH_EDGE (e, ei, bb->succs)
1615 {
1616 fputs (";; Succ edge ", outf);
1617 dump_edge_info (outf, e, 1);
1618 fputc ('\n', outf);
1619 }
ca6c03ca
JH
1620 }
1621
1622 if (did_output)
1623 putc ('\n', outf);
1624 }
1625
1626 free (start);
1627 free (end);
1628 free (in_bb_p);
1629 }
1630
1631 if (current_function_epilogue_delay_list != 0)
1632 {
1633 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1634 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1635 tmp_rtx = XEXP (tmp_rtx, 1))
1636 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1637 }
1638}
1639\f
b446e5a2 1640void
d329e058 1641update_br_prob_note (basic_block bb)
b446e5a2
JH
1642{
1643 rtx note;
4b4bf941 1644 if (!JUMP_P (BB_END (bb)))
b446e5a2 1645 return;
a813c111 1646 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
b446e5a2
JH
1647 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1648 return;
1649 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1650}
96370780
MK
1651
1652/* Get the last insn associated with block BB (that includes barriers and
1653 tablejumps after BB). */
1654rtx
1655get_last_bb_insn (basic_block bb)
1656{
1657 rtx tmp;
1658 rtx end = BB_END (bb);
1659
1660 /* Include any jump table following the basic block. */
1661 if (tablejump_p (end, NULL, &tmp))
1662 end = tmp;
1663
1664 /* Include any barriers that may follow the basic block. */
1665 tmp = next_nonnote_insn (end);
1666 while (tmp && BARRIER_P (tmp))
1667 {
1668 end = tmp;
1669 tmp = next_nonnote_insn (end);
1670 }
1671
1672 return end;
1673}
b446e5a2 1674\f
10e9fecc
JH
1675/* Verify the CFG and RTL consistency common for both underlying RTL and
1676 cfglayout RTL.
ca6c03ca
JH
1677
1678 Currently it does following checks:
1679
ca6c03ca 1680 - overlapping of basic blocks
9eab6785 1681 - insns with wrong BLOCK_FOR_INSN pointers
ca6c03ca 1682 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
f63d1bf7 1683 - tails of basic blocks (ensure that boundary is necessary)
ca6c03ca
JH
1684 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1685 and NOTE_INSN_BASIC_BLOCK
750054a2 1686 - verify that no fall_thru edge crosses hot/cold partition boundaries
9eab6785 1687 - verify that there are no pending RTL branch predictions
ca6c03ca
JH
1688
1689 In future it can be extended check a lot of other stuff as well
1690 (reachability of basic blocks, life information, etc. etc.). */
f470c378 1691
10e9fecc 1692static int
d329e058 1693rtl_verify_flow_info_1 (void)
ca6c03ca 1694{
ca6c03ca 1695 rtx x;
10e9fecc 1696 int err = 0;
94eb5ddb 1697 basic_block bb;
ca6c03ca 1698
9eab6785 1699 /* Check the general integrity of the basic blocks. */
e0082a72 1700 FOR_EACH_BB_REVERSE (bb)
ca6c03ca 1701 {
9eab6785 1702 rtx insn;
5f0d2358 1703
5e2d947c
JH
1704 if (!(bb->flags & BB_RTL))
1705 {
1706 error ("BB_RTL flag not set for block %d", bb->index);
1707 err = 1;
1708 }
1709
9eab6785
SB
1710 FOR_BB_INSNS (bb, insn)
1711 if (BLOCK_FOR_INSN (insn) != bb)
1712 {
1713 error ("insn %d basic block pointer is %d, should be %d",
1714 INSN_UID (insn),
1715 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
1716 bb->index);
1717 err = 1;
1718 }
ca6c03ca 1719
9eab6785 1720 if (bb->predictions)
ca6c03ca 1721 {
9eab6785 1722 error ("bb prediction set for block %d, but it is not used in RTL land", bb->index);
ca6c03ca
JH
1723 err = 1;
1724 }
ca6c03ca
JH
1725 }
1726
1727 /* Now check the basic blocks (boundaries etc.) */
e0082a72 1728 FOR_EACH_BB_REVERSE (bb)
ca6c03ca 1729 {
3dec4024 1730 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
3cf54412 1731 edge e, fallthru = NULL;
5a1a3e5e 1732 rtx note;
628f6a4e 1733 edge_iterator ei;
ca6c03ca 1734
2085a21f 1735 if (JUMP_P (BB_END (bb))
a813c111 1736 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
628f6a4e 1737 && EDGE_COUNT (bb->succs) >= 2
a813c111 1738 && any_condjump_p (BB_END (bb)))
5a1a3e5e 1739 {
e53de54d
JH
1740 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
1741 && profile_status != PROFILE_ABSENT)
5a1a3e5e 1742 {
0108ae51 1743 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
5a1a3e5e
JH
1744 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1745 err = 1;
1746 }
1747 }
628f6a4e 1748 FOR_EACH_EDGE (e, ei, bb->succs)
ca6c03ca 1749 {
ca6c03ca 1750 if (e->flags & EDGE_FALLTHRU)
750054a2
CT
1751 {
1752 n_fallthru++, fallthru = e;
bd454efd 1753 if ((e->flags & EDGE_CROSSING)
076c7ab8 1754 || (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
9fb32434
CT
1755 && e->src != ENTRY_BLOCK_PTR
1756 && e->dest != EXIT_BLOCK_PTR))
c22cacf3 1757 {
ab532386 1758 error ("fallthru edge crosses section boundary (bb %i)",
750054a2
CT
1759 e->src->index);
1760 err = 1;
1761 }
1762 }
3dec4024 1763
65f43cdf
ZD
1764 if ((e->flags & ~(EDGE_DFS_BACK
1765 | EDGE_CAN_FALLTHRU
1766 | EDGE_IRREDUCIBLE_LOOP
9beb1c84
CT
1767 | EDGE_LOOP_EXIT
1768 | EDGE_CROSSING)) == 0)
3dec4024
JH
1769 n_branch++;
1770
1771 if (e->flags & EDGE_ABNORMAL_CALL)
1772 n_call++;
1773
1774 if (e->flags & EDGE_EH)
1775 n_eh++;
1776 else if (e->flags & EDGE_ABNORMAL)
1777 n_abnormal++;
ca6c03ca 1778 }
5f0d2358 1779
a813c111
SB
1780 if (n_eh && GET_CODE (PATTERN (BB_END (bb))) != RESX
1781 && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
3dec4024 1782 {
ab532386 1783 error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
3dec4024
JH
1784 err = 1;
1785 }
1786 if (n_branch
4b4bf941 1787 && (!JUMP_P (BB_END (bb))
a813c111
SB
1788 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1789 || any_condjump_p (BB_END (bb))))))
3dec4024 1790 {
ab532386 1791 error ("too many outgoing branch edges from bb %i", bb->index);
3dec4024
JH
1792 err = 1;
1793 }
a813c111 1794 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
3dec4024 1795 {
ab532386 1796 error ("fallthru edge after unconditional jump %i", bb->index);
3dec4024
JH
1797 err = 1;
1798 }
a813c111 1799 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
3dec4024 1800 {
ab532386 1801 error ("wrong amount of branch edges after unconditional jump %i", bb->index);
3dec4024
JH
1802 err = 1;
1803 }
a813c111 1804 if (n_branch != 1 && any_condjump_p (BB_END (bb))
c11fd0b2 1805 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
3dec4024 1806 {
c11fd0b2
RG
1807 error ("wrong amount of branch edges after conditional jump %i",
1808 bb->index);
3dec4024
JH
1809 err = 1;
1810 }
4b4bf941 1811 if (n_call && !CALL_P (BB_END (bb)))
3dec4024 1812 {
ab532386 1813 error ("call edges for non-call insn in bb %i", bb->index);
3dec4024
JH
1814 err = 1;
1815 }
1816 if (n_abnormal
4b4bf941
JQ
1817 && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
1818 && (!JUMP_P (BB_END (bb))
a813c111
SB
1819 || any_condjump_p (BB_END (bb))
1820 || any_uncondjump_p (BB_END (bb))))
3dec4024 1821 {
ab532386 1822 error ("abnormal edges for no purpose in bb %i", bb->index);
3dec4024
JH
1823 err = 1;
1824 }
f87c27b4 1825
a813c111 1826 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
0ca541aa 1827 /* We may have a barrier inside a basic block before dead code
9524880c
HPN
1828 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
1829 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
5f0d2358
RK
1830 {
1831 debug_rtx (x);
1832 if (! BLOCK_FOR_INSN (x))
1833 error
1834 ("insn %d inside basic block %d but block_for_insn is NULL",
0b17ab2f 1835 INSN_UID (x), bb->index);
5f0d2358
RK
1836 else
1837 error
1838 ("insn %d inside basic block %d but block_for_insn is %i",
0b17ab2f 1839 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
5f0d2358
RK
1840
1841 err = 1;
1842 }
ca6c03ca
JH
1843
1844 /* OK pointers are correct. Now check the header of basic
c22cacf3 1845 block. It ought to contain optional CODE_LABEL followed
ca6c03ca 1846 by NOTE_BASIC_BLOCK. */
a813c111 1847 x = BB_HEAD (bb);
4b4bf941 1848 if (LABEL_P (x))
ca6c03ca 1849 {
a813c111 1850 if (BB_END (bb) == x)
ca6c03ca
JH
1851 {
1852 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 1853 bb->index);
ca6c03ca
JH
1854 err = 1;
1855 }
5f0d2358 1856
ca6c03ca
JH
1857 x = NEXT_INSN (x);
1858 }
5f0d2358 1859
ca6c03ca
JH
1860 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
1861 {
1862 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 1863 bb->index);
ca6c03ca
JH
1864 err = 1;
1865 }
1866
a813c111 1867 if (BB_END (bb) == x)
49243025 1868 /* Do checks for empty blocks here. */
5f0d2358 1869 ;
ca6c03ca 1870 else
5f0d2358
RK
1871 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
1872 {
1873 if (NOTE_INSN_BASIC_BLOCK_P (x))
1874 {
1875 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
0b17ab2f 1876 INSN_UID (x), bb->index);
5f0d2358
RK
1877 err = 1;
1878 }
1879
a813c111 1880 if (x == BB_END (bb))
5f0d2358 1881 break;
ca6c03ca 1882
83fd323c 1883 if (control_flow_insn_p (x))
5f0d2358 1884 {
0b17ab2f 1885 error ("in basic block %d:", bb->index);
5f0d2358
RK
1886 fatal_insn ("flow control insn inside a basic block", x);
1887 }
1888 }
ca6c03ca
JH
1889 }
1890
10e9fecc 1891 /* Clean up. */
10e9fecc
JH
1892 return err;
1893}
5f0d2358 1894
10e9fecc
JH
1895/* Verify the CFG and RTL consistency common for both underlying RTL and
1896 cfglayout RTL.
5f0d2358 1897
10e9fecc
JH
1898 Currently it does following checks:
1899 - all checks of rtl_verify_flow_info_1
9eab6785 1900 - test head/end pointers
10e9fecc
JH
1901 - check that all insns are in the basic blocks
1902 (except the switch handling code, barriers and notes)
1903 - check that all returns are followed by barriers
1904 - check that all fallthru edge points to the adjacent blocks. */
9eab6785 1905
10e9fecc 1906static int
d329e058 1907rtl_verify_flow_info (void)
10e9fecc
JH
1908{
1909 basic_block bb;
1910 int err = rtl_verify_flow_info_1 ();
1911 rtx x;
9eab6785
SB
1912 rtx last_head = get_last_insn ();
1913 basic_block *bb_info;
10e9fecc
JH
1914 int num_bb_notes;
1915 const rtx rtx_first = get_insns ();
1916 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
9eab6785
SB
1917 const int max_uid = get_max_uid ();
1918
1919 bb_info = XCNEWVEC (basic_block, max_uid);
ca6c03ca 1920
10e9fecc
JH
1921 FOR_EACH_BB_REVERSE (bb)
1922 {
1923 edge e;
628f6a4e 1924 edge_iterator ei;
9eab6785
SB
1925 rtx head = BB_HEAD (bb);
1926 rtx end = BB_END (bb);
628f6a4e 1927
9eab6785
SB
1928 /* Verify the end of the basic block is in the INSN chain. */
1929 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1930 if (x == end)
1931 break;
1932
1933 if (!x)
1934 {
1935 error ("end insn %d for block %d not found in the insn stream",
1936 INSN_UID (end), bb->index);
1937 err = 1;
1938 }
1939
1940 /* Work backwards from the end to the head of the basic block
1941 to verify the head is in the RTL chain. */
1942 for (; x != NULL_RTX; x = PREV_INSN (x))
a00d11f0 1943 {
9eab6785
SB
1944 /* While walking over the insn chain, verify insns appear
1945 in only one basic block. */
1946 if (bb_info[INSN_UID (x)] != NULL)
1947 {
1948 error ("insn %d is in multiple basic blocks (%d and %d)",
1949 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1950 err = 1;
1951 }
1952
1953 bb_info[INSN_UID (x)] = bb;
1954
1955 if (x == head)
1956 break;
1957 }
1958 if (!x)
1959 {
1960 error ("head insn %d for block %d not found in the insn stream",
1961 INSN_UID (head), bb->index);
a00d11f0
JH
1962 err = 1;
1963 }
1964
9eab6785
SB
1965 last_head = x;
1966
628f6a4e 1967 FOR_EACH_EDGE (e, ei, bb->succs)
10e9fecc
JH
1968 if (e->flags & EDGE_FALLTHRU)
1969 break;
1970 if (!e)
1971 {
1972 rtx insn;
1973
1974 /* Ensure existence of barrier in BB with no fallthru edges. */
4b4bf941 1975 for (insn = BB_END (bb); !insn || !BARRIER_P (insn);
10e9fecc
JH
1976 insn = NEXT_INSN (insn))
1977 if (!insn
4b4bf941 1978 || (NOTE_P (insn)
10e9fecc
JH
1979 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
1980 {
1981 error ("missing barrier after block %i", bb->index);
1982 err = 1;
1983 break;
1984 }
1985 }
1986 else if (e->src != ENTRY_BLOCK_PTR
1987 && e->dest != EXIT_BLOCK_PTR)
c22cacf3 1988 {
10e9fecc
JH
1989 rtx insn;
1990
1991 if (e->src->next_bb != e->dest)
1992 {
1993 error
1994 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
1995 e->src->index, e->dest->index);
1996 err = 1;
1997 }
1998 else
a813c111 1999 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
10e9fecc 2000 insn = NEXT_INSN (insn))
6be85b25 2001 if (BARRIER_P (insn) || INSN_P (insn))
10e9fecc
JH
2002 {
2003 error ("verify_flow_info: Incorrect fallthru %i->%i",
2004 e->src->index, e->dest->index);
2005 fatal_insn ("wrong insn in the fallthru edge", insn);
2006 err = 1;
2007 }
c22cacf3 2008 }
10e9fecc 2009 }
ca6c03ca 2010
9eab6785
SB
2011 free (bb_info);
2012
ca6c03ca 2013 num_bb_notes = 0;
e0082a72
ZD
2014 last_bb_seen = ENTRY_BLOCK_PTR;
2015
5f0d2358 2016 for (x = rtx_first; x; x = NEXT_INSN (x))
ca6c03ca
JH
2017 {
2018 if (NOTE_INSN_BASIC_BLOCK_P (x))
2019 {
bf77398c 2020 bb = NOTE_BASIC_BLOCK (x);
5f0d2358 2021
ca6c03ca 2022 num_bb_notes++;
e0082a72 2023 if (bb != last_bb_seen->next_bb)
10e9fecc 2024 internal_error ("basic blocks not laid down consecutively");
ca6c03ca 2025
10e9fecc 2026 curr_bb = last_bb_seen = bb;
ca6c03ca
JH
2027 }
2028
10e9fecc 2029 if (!curr_bb)
ca6c03ca
JH
2030 {
2031 switch (GET_CODE (x))
2032 {
2033 case BARRIER:
2034 case NOTE:
2035 break;
2036
2037 case CODE_LABEL:
666c27b9 2038 /* An addr_vec is placed outside any basic block. */
ca6c03ca 2039 if (NEXT_INSN (x)
4b4bf941 2040 && JUMP_P (NEXT_INSN (x))
ca6c03ca
JH
2041 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2042 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5f0d2358 2043 x = NEXT_INSN (x);
ca6c03ca
JH
2044
2045 /* But in any case, non-deletable labels can appear anywhere. */
2046 break;
2047
2048 default:
1f978f5f 2049 fatal_insn ("insn outside basic block", x);
ca6c03ca
JH
2050 }
2051 }
2052
26cae194 2053 if (JUMP_P (x)
ca6c03ca 2054 && returnjump_p (x) && ! condjump_p (x)
4b4bf941 2055 && ! (NEXT_INSN (x) && BARRIER_P (NEXT_INSN (x))))
1f978f5f 2056 fatal_insn ("return not followed by barrier", x);
a813c111 2057 if (curr_bb && x == BB_END (curr_bb))
10e9fecc 2058 curr_bb = NULL;
ca6c03ca
JH
2059 }
2060
24bd1a0b 2061 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
ca6c03ca 2062 internal_error
0b17ab2f
RH
2063 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2064 num_bb_notes, n_basic_blocks);
ca6c03ca 2065
10e9fecc 2066 return err;
ca6c03ca
JH
2067}
2068\f
eaec9b3d 2069/* Assume that the preceding pass has possibly eliminated jump instructions
ca6c03ca
JH
2070 or converted the unconditional jumps. Eliminate the edges from CFG.
2071 Return true if any edges are eliminated. */
2072
2073bool
d329e058 2074purge_dead_edges (basic_block bb)
ca6c03ca 2075{
628f6a4e 2076 edge e;
a813c111 2077 rtx insn = BB_END (bb), note;
ca6c03ca 2078 bool purged = false;
628f6a4e
BE
2079 bool found;
2080 edge_iterator ei;
ca6c03ca 2081
70da1d03 2082 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
4b4bf941 2083 if (NONJUMP_INSN_P (insn)
70da1d03
JH
2084 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2085 {
2086 rtx eqnote;
2087
2088 if (! may_trap_p (PATTERN (insn))
2089 || ((eqnote = find_reg_equal_equiv_note (insn))
2090 && ! may_trap_p (XEXP (eqnote, 0))))
2091 remove_note (insn, note);
2092 }
2093
546c093e 2094 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
628f6a4e 2095 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
546c093e 2096 {
e5f9a909
JW
2097 /* There are three types of edges we need to handle correctly here: EH
2098 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2099 latter can appear when nonlocal gotos are used. */
2100 if (e->flags & EDGE_EH)
546c093e 2101 {
e5f9a909
JW
2102 if (can_throw_internal (BB_END (bb))
2103 /* If this is a call edge, verify that this is a call insn. */
2104 && (! (e->flags & EDGE_ABNORMAL_CALL)
2105 || CALL_P (BB_END (bb))))
628f6a4e
BE
2106 {
2107 ei_next (&ei);
2108 continue;
2109 }
546c093e 2110 }
e5f9a909 2111 else if (e->flags & EDGE_ABNORMAL_CALL)
546c093e 2112 {
e5f9a909
JW
2113 if (CALL_P (BB_END (bb))
2114 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2115 || INTVAL (XEXP (note, 0)) >= 0))
628f6a4e
BE
2116 {
2117 ei_next (&ei);
2118 continue;
2119 }
546c093e
RH
2120 }
2121 else
628f6a4e
BE
2122 {
2123 ei_next (&ei);
2124 continue;
2125 }
546c093e
RH
2126
2127 remove_edge (e);
2128 bb->flags |= BB_DIRTY;
2129 purged = true;
2130 }
5f0d2358 2131
4b4bf941 2132 if (JUMP_P (insn))
ca6c03ca
JH
2133 {
2134 rtx note;
2135 edge b,f;
628f6a4e 2136 edge_iterator ei;
5f0d2358 2137
ca6c03ca
JH
2138 /* We do care only about conditional jumps and simplejumps. */
2139 if (!any_condjump_p (insn)
2140 && !returnjump_p (insn)
2141 && !simplejump_p (insn))
c51d95ec 2142 return purged;
5f0d2358 2143
5a1a3e5e
JH
2144 /* Branch probability/prediction notes are defined only for
2145 condjumps. We've possibly turned condjump into simplejump. */
2146 if (simplejump_p (insn))
2147 {
2148 note = find_reg_note (insn, REG_BR_PROB, NULL);
2149 if (note)
2150 remove_note (insn, note);
2151 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2152 remove_note (insn, note);
2153 }
2154
628f6a4e 2155 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 2156 {
7fcd7218
JH
2157 /* Avoid abnormal flags to leak from computed jumps turned
2158 into simplejumps. */
f87c27b4 2159
0e1638d4 2160 e->flags &= ~EDGE_ABNORMAL;
7fcd7218 2161
5a566bed
MM
2162 /* See if this edge is one we should keep. */
2163 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2164 /* A conditional jump can fall through into the next
2165 block, so we should keep the edge. */
628f6a4e
BE
2166 {
2167 ei_next (&ei);
2168 continue;
2169 }
5f0d2358 2170 else if (e->dest != EXIT_BLOCK_PTR
a813c111 2171 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
5a566bed
MM
2172 /* If the destination block is the target of the jump,
2173 keep the edge. */
628f6a4e
BE
2174 {
2175 ei_next (&ei);
2176 continue;
2177 }
5a566bed
MM
2178 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2179 /* If the destination block is the exit block, and this
2180 instruction is a return, then keep the edge. */
628f6a4e
BE
2181 {
2182 ei_next (&ei);
2183 continue;
2184 }
5a566bed
MM
2185 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2186 /* Keep the edges that correspond to exceptions thrown by
0b75beaa
EB
2187 this instruction and rematerialize the EDGE_ABNORMAL
2188 flag we just cleared above. */
2189 {
2190 e->flags |= EDGE_ABNORMAL;
628f6a4e 2191 ei_next (&ei);
0b75beaa
EB
2192 continue;
2193 }
5f0d2358 2194
5a566bed 2195 /* We do not need this edge. */
c51d95ec 2196 bb->flags |= BB_DIRTY;
ca6c03ca
JH
2197 purged = true;
2198 remove_edge (e);
2199 }
5f0d2358 2200
628f6a4e 2201 if (EDGE_COUNT (bb->succs) == 0 || !purged)
c51d95ec 2202 return purged;
5f0d2358 2203
c263766c
RH
2204 if (dump_file)
2205 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
5f0d2358 2206
ca6c03ca
JH
2207 if (!optimize)
2208 return purged;
2209
2210 /* Redistribute probabilities. */
c5cbcccf 2211 if (single_succ_p (bb))
ca6c03ca 2212 {
c5cbcccf
ZD
2213 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2214 single_succ_edge (bb)->count = bb->count;
f87c27b4 2215 }
ca6c03ca
JH
2216 else
2217 {
2218 note = find_reg_note (insn, REG_BR_PROB, NULL);
2219 if (!note)
2220 return purged;
5f0d2358 2221
ca6c03ca
JH
2222 b = BRANCH_EDGE (bb);
2223 f = FALLTHRU_EDGE (bb);
2224 b->probability = INTVAL (XEXP (note, 0));
2225 f->probability = REG_BR_PROB_BASE - b->probability;
2226 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2227 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2228 }
5f0d2358 2229
ca6c03ca
JH
2230 return purged;
2231 }
4b4bf941 2232 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
1722c2c8
RH
2233 {
2234 /* First, there should not be any EH or ABCALL edges resulting
2235 from non-local gotos and the like. If there were, we shouldn't
2236 have created the sibcall in the first place. Second, there
2237 should of course never have been a fallthru edge. */
c5cbcccf
ZD
2238 gcc_assert (single_succ_p (bb));
2239 gcc_assert (single_succ_edge (bb)->flags
2240 == (EDGE_SIBCALL | EDGE_ABNORMAL));
1722c2c8
RH
2241
2242 return 0;
2243 }
ca6c03ca 2244
ca6c03ca
JH
2245 /* If we don't see a jump insn, we don't know exactly why the block would
2246 have been broken at this point. Look for a simple, non-fallthru edge,
2247 as these are only created by conditional branches. If we find such an
2248 edge we know that there used to be a jump here and can then safely
2249 remove all non-fallthru edges. */
628f6a4e
BE
2250 found = false;
2251 FOR_EACH_EDGE (e, ei, bb->succs)
2252 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2253 {
2254 found = true;
2255 break;
2256 }
5f0d2358 2257
628f6a4e 2258 if (!found)
ca6c03ca 2259 return purged;
5f0d2358 2260
2afa8dce
DB
2261 /* Remove all but the fake and fallthru edges. The fake edge may be
2262 the only successor for this block in the case of noreturn
2263 calls. */
628f6a4e 2264 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 2265 {
2afa8dce 2266 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
c51d95ec
JH
2267 {
2268 bb->flags |= BB_DIRTY;
2269 remove_edge (e);
2270 purged = true;
2271 }
628f6a4e
BE
2272 else
2273 ei_next (&ei);
ca6c03ca 2274 }
5f0d2358 2275
c5cbcccf 2276 gcc_assert (single_succ_p (bb));
5f0d2358 2277
c5cbcccf
ZD
2278 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2279 single_succ_edge (bb)->count = bb->count;
ca6c03ca 2280
c263766c
RH
2281 if (dump_file)
2282 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
0b17ab2f 2283 bb->index);
ca6c03ca
JH
2284 return purged;
2285}
2286
5f0d2358
RK
2287/* Search all basic blocks for potentially dead edges and purge them. Return
2288 true if some edge has been eliminated. */
ca6c03ca
JH
2289
2290bool
25cd19de 2291purge_all_dead_edges (void)
ca6c03ca 2292{
e0082a72 2293 int purged = false;
e0082a72 2294 basic_block bb;
473fb060 2295
e0082a72 2296 FOR_EACH_BB (bb)
473fb060 2297 {
e0082a72 2298 bool purged_here = purge_dead_edges (bb);
5f0d2358 2299
473fb060 2300 purged |= purged_here;
473fb060 2301 }
5f0d2358 2302
ca6c03ca
JH
2303 return purged;
2304}
9ee634e3
JH
2305
2306/* Same as split_block but update cfg_layout structures. */
f470c378
ZD
2307
2308static basic_block
d329e058 2309cfg_layout_split_block (basic_block bb, void *insnp)
9ee634e3
JH
2310{
2311 rtx insn = insnp;
f470c378 2312 basic_block new_bb = rtl_split_block (bb, insn);
9ee634e3 2313
370369e1
JH
2314 new_bb->il.rtl->footer = bb->il.rtl->footer;
2315 bb->il.rtl->footer = NULL;
9ee634e3 2316
f470c378 2317 return new_bb;
9ee634e3
JH
2318}
2319
2320
2321/* Redirect Edge to DEST. */
6de9cd9a 2322static edge
d329e058 2323cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
9ee634e3
JH
2324{
2325 basic_block src = e->src;
6de9cd9a 2326 edge ret;
9ee634e3 2327
bc35512f 2328 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 2329 return NULL;
bc35512f 2330
3348b696 2331 if (e->dest == dest)
6de9cd9a 2332 return e;
bc35512f 2333
3348b696 2334 if (e->src != ENTRY_BLOCK_PTR
6de9cd9a 2335 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
f345f21a
JH
2336 {
2337 src->flags |= BB_DIRTY;
6de9cd9a 2338 return ret;
f345f21a 2339 }
bc35512f
JH
2340
2341 if (e->src == ENTRY_BLOCK_PTR
2342 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2343 {
c263766c
RH
2344 if (dump_file)
2345 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
bc35512f
JH
2346 e->src->index, dest->index);
2347
f345f21a 2348 e->src->flags |= BB_DIRTY;
bc35512f 2349 redirect_edge_succ (e, dest);
6de9cd9a 2350 return e;
bc35512f
JH
2351 }
2352
9ee634e3
JH
2353 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2354 in the case the basic block appears to be in sequence. Avoid this
2355 transformation. */
2356
9ee634e3
JH
2357 if (e->flags & EDGE_FALLTHRU)
2358 {
2359 /* Redirect any branch edges unified with the fallthru one. */
4b4bf941 2360 if (JUMP_P (BB_END (src))
432f982f
JH
2361 && label_is_jump_target_p (BB_HEAD (e->dest),
2362 BB_END (src)))
9ee634e3 2363 {
341c100f 2364 edge redirected;
c22cacf3 2365
c263766c
RH
2366 if (dump_file)
2367 fprintf (dump_file, "Fallthru edge unified with branch "
432f982f
JH
2368 "%i->%i redirected to %i\n",
2369 e->src->index, e->dest->index, dest->index);
2370 e->flags &= ~EDGE_FALLTHRU;
341c100f
NS
2371 redirected = redirect_branch_edge (e, dest);
2372 gcc_assert (redirected);
432f982f 2373 e->flags |= EDGE_FALLTHRU;
c22cacf3 2374 e->src->flags |= BB_DIRTY;
6de9cd9a 2375 return e;
9ee634e3
JH
2376 }
2377 /* In case we are redirecting fallthru edge to the branch edge
c22cacf3 2378 of conditional jump, remove it. */
628f6a4e 2379 if (EDGE_COUNT (src->succs) == 2)
9ee634e3 2380 {
03101c6f
KH
2381 /* Find the edge that is different from E. */
2382 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
628f6a4e 2383
9ee634e3 2384 if (s->dest == dest
a813c111
SB
2385 && any_condjump_p (BB_END (src))
2386 && onlyjump_p (BB_END (src)))
2387 delete_insn (BB_END (src));
9ee634e3 2388 }
6de9cd9a 2389 ret = redirect_edge_succ_nodup (e, dest);
c263766c
RH
2390 if (dump_file)
2391 fprintf (dump_file, "Fallthru edge %i->%i redirected to %i\n",
bc35512f 2392 e->src->index, e->dest->index, dest->index);
9ee634e3
JH
2393 }
2394 else
bc35512f 2395 ret = redirect_branch_edge (e, dest);
9ee634e3
JH
2396
2397 /* We don't want simplejumps in the insn stream during cfglayout. */
341c100f 2398 gcc_assert (!simplejump_p (BB_END (src)));
9ee634e3 2399
f345f21a 2400 src->flags |= BB_DIRTY;
9ee634e3
JH
2401 return ret;
2402}
2403
2404/* Simple wrapper as we always can redirect fallthru edges. */
2405static basic_block
d329e058 2406cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
9ee634e3 2407{
341c100f
NS
2408 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
2409
2410 gcc_assert (redirected);
9ee634e3
JH
2411 return NULL;
2412}
2413
f470c378
ZD
2414/* Same as delete_basic_block but update cfg_layout structures. */
2415
9ee634e3 2416static void
d329e058 2417cfg_layout_delete_block (basic_block bb)
9ee634e3 2418{
a813c111 2419 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
9ee634e3 2420
370369e1 2421 if (bb->il.rtl->header)
9ee634e3 2422 {
a813c111 2423 next = BB_HEAD (bb);
9ee634e3 2424 if (prev)
370369e1 2425 NEXT_INSN (prev) = bb->il.rtl->header;
9ee634e3 2426 else
370369e1
JH
2427 set_first_insn (bb->il.rtl->header);
2428 PREV_INSN (bb->il.rtl->header) = prev;
2429 insn = bb->il.rtl->header;
9ee634e3
JH
2430 while (NEXT_INSN (insn))
2431 insn = NEXT_INSN (insn);
2432 NEXT_INSN (insn) = next;
2433 PREV_INSN (next) = insn;
2434 }
a813c111 2435 next = NEXT_INSN (BB_END (bb));
370369e1 2436 if (bb->il.rtl->footer)
9ee634e3 2437 {
370369e1 2438 insn = bb->il.rtl->footer;
bc35512f
JH
2439 while (insn)
2440 {
4b4bf941 2441 if (BARRIER_P (insn))
bc35512f
JH
2442 {
2443 if (PREV_INSN (insn))
2444 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2445 else
370369e1 2446 bb->il.rtl->footer = NEXT_INSN (insn);
bc35512f
JH
2447 if (NEXT_INSN (insn))
2448 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2449 }
4b4bf941 2450 if (LABEL_P (insn))
bc35512f
JH
2451 break;
2452 insn = NEXT_INSN (insn);
2453 }
370369e1 2454 if (bb->il.rtl->footer)
bc35512f 2455 {
a813c111 2456 insn = BB_END (bb);
370369e1
JH
2457 NEXT_INSN (insn) = bb->il.rtl->footer;
2458 PREV_INSN (bb->il.rtl->footer) = insn;
bc35512f
JH
2459 while (NEXT_INSN (insn))
2460 insn = NEXT_INSN (insn);
2461 NEXT_INSN (insn) = next;
2462 if (next)
2463 PREV_INSN (next) = insn;
2464 else
2465 set_last_insn (insn);
2466 }
9ee634e3
JH
2467 }
2468 if (bb->next_bb != EXIT_BLOCK_PTR)
370369e1 2469 to = &bb->next_bb->il.rtl->header;
9ee634e3
JH
2470 else
2471 to = &cfg_layout_function_footer;
997de8ed 2472
9ee634e3
JH
2473 rtl_delete_block (bb);
2474
2475 if (prev)
2476 prev = NEXT_INSN (prev);
d329e058 2477 else
9ee634e3
JH
2478 prev = get_insns ();
2479 if (next)
2480 next = PREV_INSN (next);
d329e058 2481 else
9ee634e3
JH
2482 next = get_last_insn ();
2483
2484 if (next && NEXT_INSN (next) != prev)
2485 {
2486 remaints = unlink_insn_chain (prev, next);
2487 insn = remaints;
2488 while (NEXT_INSN (insn))
2489 insn = NEXT_INSN (insn);
2490 NEXT_INSN (insn) = *to;
2491 if (*to)
2492 PREV_INSN (*to) = insn;
2493 *to = remaints;
2494 }
2495}
2496
beb235f8 2497/* Return true when blocks A and B can be safely merged. */
bc35512f
JH
2498static bool
2499cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2500{
750054a2
CT
2501 /* If we are partitioning hot/cold basic blocks, we don't want to
2502 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
2503 and cold sections.
2504
8e8d5162 2505 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
2506 be optimizable (or blocks that appear to be mergeable), but which really
2507 must be left untouched (they are required to make it safely across
2508 partition boundaries). See the comments at the top of
8e8d5162
CT
2509 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
2510
87c8b4be 2511 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 2512 return false;
750054a2 2513
bc35512f 2514 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
2515 return (single_succ_p (a)
2516 && single_succ (a) == b
2517 && single_pred_p (b) == 1
628f6a4e 2518 && a != b
bc35512f 2519 /* Must be simple edge. */
c5cbcccf 2520 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
bc35512f
JH
2521 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2522 /* If the jump insn has side effects,
2523 we can't kill the edge. */
4b4bf941 2524 && (!JUMP_P (BB_END (a))
e24e7211 2525 || (reload_completed
a813c111 2526 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f
JH
2527}
2528
41806d92
NS
2529/* Merge block A and B. The blocks must be mergeable. */
2530
bc35512f
JH
2531static void
2532cfg_layout_merge_blocks (basic_block a, basic_block b)
2533{
2534#ifdef ENABLE_CHECKING
341c100f 2535 gcc_assert (cfg_layout_can_merge_blocks_p (a, b));
bc35512f
JH
2536#endif
2537
2538 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 2539 if (LABEL_P (BB_HEAD (b)))
2c97f8e4
RH
2540 {
2541 /* This might have been an EH label that no longer has incoming
2542 EH edges. Update data structures to match. */
2543 maybe_remove_eh_handler (BB_HEAD (b));
c22cacf3 2544
2c97f8e4
RH
2545 delete_insn (BB_HEAD (b));
2546 }
bc35512f
JH
2547
2548 /* We should have fallthru edge in a, or we can do dummy redirection to get
2549 it cleaned up. */
4b4bf941 2550 if (JUMP_P (BB_END (a)))
628f6a4e 2551 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
341c100f 2552 gcc_assert (!JUMP_P (BB_END (a)));
bc35512f
JH
2553
2554 /* Possible line number notes should appear in between. */
370369e1 2555 if (b->il.rtl->header)
bc35512f 2556 {
a813c111 2557 rtx first = BB_END (a), last;
bc35512f 2558
370369e1 2559 last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a));
bc35512f 2560 delete_insn_chain (NEXT_INSN (first), last);
370369e1 2561 b->il.rtl->header = NULL;
bc35512f
JH
2562 }
2563
2564 /* In the case basic blocks are not adjacent, move them around. */
a813c111 2565 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
bc35512f 2566 {
a813c111 2567 rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
bc35512f 2568
a7102479 2569 emit_insn_after_noloc (first, BB_END (a));
bc35512f
JH
2570 /* Skip possible DELETED_LABEL insn. */
2571 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2572 first = NEXT_INSN (first);
341c100f 2573 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (first));
a813c111 2574 BB_HEAD (b) = NULL;
bc35512f
JH
2575 delete_insn (first);
2576 }
2577 /* Otherwise just re-associate the instructions. */
2578 else
2579 {
2580 rtx insn;
2581
a813c111
SB
2582 for (insn = BB_HEAD (b);
2583 insn != NEXT_INSN (BB_END (b));
2584 insn = NEXT_INSN (insn))
bc35512f 2585 set_block_for_insn (insn, a);
a813c111 2586 insn = BB_HEAD (b);
bc35512f
JH
2587 /* Skip possible DELETED_LABEL insn. */
2588 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2589 insn = NEXT_INSN (insn);
341c100f 2590 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
a813c111
SB
2591 BB_HEAD (b) = NULL;
2592 BB_END (a) = BB_END (b);
bc35512f
JH
2593 delete_insn (insn);
2594 }
2595
2596 /* Possible tablejumps and barriers should appear after the block. */
370369e1 2597 if (b->il.rtl->footer)
bc35512f 2598 {
370369e1
JH
2599 if (!a->il.rtl->footer)
2600 a->il.rtl->footer = b->il.rtl->footer;
bc35512f
JH
2601 else
2602 {
370369e1 2603 rtx last = a->il.rtl->footer;
bc35512f
JH
2604
2605 while (NEXT_INSN (last))
2606 last = NEXT_INSN (last);
370369e1
JH
2607 NEXT_INSN (last) = b->il.rtl->footer;
2608 PREV_INSN (b->il.rtl->footer) = last;
bc35512f 2609 }
370369e1 2610 b->il.rtl->footer = NULL;
bc35512f 2611 }
5e2d947c 2612 a->il.rtl->global_live_at_end = b->il.rtl->global_live_at_end;
bc35512f 2613
c263766c
RH
2614 if (dump_file)
2615 fprintf (dump_file, "Merged blocks %d and %d.\n",
bc35512f 2616 a->index, b->index);
bc35512f
JH
2617}
2618
2619/* Split edge E. */
f470c378 2620
bc35512f
JH
2621static basic_block
2622cfg_layout_split_edge (edge e)
2623{
bc35512f
JH
2624 basic_block new_bb =
2625 create_basic_block (e->src != ENTRY_BLOCK_PTR
a813c111 2626 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
bc35512f
JH
2627 NULL_RTX, e->src);
2628
af874237
JW
2629 /* ??? This info is likely going to be out of date very soon, but we must
2630 create it to avoid getting an ICE later. */
5e2d947c 2631 if (e->dest->il.rtl->global_live_at_start)
af874237 2632 {
5e2d947c
JH
2633 new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
2634 new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
2635 COPY_REG_SET (new_bb->il.rtl->global_live_at_start,
2636 e->dest->il.rtl->global_live_at_start);
2637 COPY_REG_SET (new_bb->il.rtl->global_live_at_end,
2638 e->dest->il.rtl->global_live_at_start);
af874237
JW
2639 }
2640
a9b2ee88 2641 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
bc35512f
JH
2642 redirect_edge_and_branch_force (e, new_bb);
2643
2644 return new_bb;
2645}
2646
f470c378
ZD
2647/* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
2648
2649static void
2650rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
2651{
2652}
2653
6de9cd9a
DN
2654/* Return 1 if BB ends with a call, possibly followed by some
2655 instructions that must stay with the call, 0 otherwise. */
2656
2657static bool
2658rtl_block_ends_with_call_p (basic_block bb)
2659{
2660 rtx insn = BB_END (bb);
2661
4b4bf941 2662 while (!CALL_P (insn)
6de9cd9a
DN
2663 && insn != BB_HEAD (bb)
2664 && keep_with_call_p (insn))
2665 insn = PREV_INSN (insn);
4b4bf941 2666 return (CALL_P (insn));
6de9cd9a
DN
2667}
2668
2669/* Return 1 if BB ends with a conditional branch, 0 otherwise. */
2670
2671static bool
2672rtl_block_ends_with_condjump_p (basic_block bb)
2673{
2674 return any_condjump_p (BB_END (bb));
2675}
2676
2677/* Return true if we need to add fake edge to exit.
2678 Helper function for rtl_flow_call_edges_add. */
2679
2680static bool
2681need_fake_edge_p (rtx insn)
2682{
2683 if (!INSN_P (insn))
2684 return false;
2685
4b4bf941 2686 if ((CALL_P (insn)
6de9cd9a
DN
2687 && !SIBLING_CALL_P (insn)
2688 && !find_reg_note (insn, REG_NORETURN, NULL)
6de9cd9a
DN
2689 && !CONST_OR_PURE_CALL_P (insn)))
2690 return true;
2691
2692 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2693 && MEM_VOLATILE_P (PATTERN (insn)))
2694 || (GET_CODE (PATTERN (insn)) == PARALLEL
2695 && asm_noperands (insn) != -1
2696 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2697 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2698}
2699
2700/* Add fake edges to the function exit for any non constant and non noreturn
2701 calls, volatile inline assembly in the bitmap of blocks specified by
2702 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
2703 that were split.
2704
2705 The goal is to expose cases in which entering a basic block does not imply
2706 that all subsequent instructions must be executed. */
2707
2708static int
2709rtl_flow_call_edges_add (sbitmap blocks)
2710{
2711 int i;
2712 int blocks_split = 0;
2713 int last_bb = last_basic_block;
2714 bool check_last_block = false;
2715
24bd1a0b 2716 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6de9cd9a
DN
2717 return 0;
2718
2719 if (! blocks)
2720 check_last_block = true;
2721 else
2722 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
2723
2724 /* In the last basic block, before epilogue generation, there will be
2725 a fallthru edge to EXIT. Special care is required if the last insn
2726 of the last basic block is a call because make_edge folds duplicate
2727 edges, which would result in the fallthru edge also being marked
2728 fake, which would result in the fallthru edge being removed by
2729 remove_fake_edges, which would result in an invalid CFG.
2730
2731 Moreover, we can't elide the outgoing fake edge, since the block
2732 profiler needs to take this into account in order to solve the minimal
2733 spanning tree in the case that the call doesn't return.
2734
2735 Handle this by adding a dummy instruction in a new last basic block. */
2736 if (check_last_block)
2737 {
2738 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
2739 rtx insn = BB_END (bb);
2740
2741 /* Back up past insns that must be kept in the same block as a call. */
2742 while (insn != BB_HEAD (bb)
2743 && keep_with_call_p (insn))
2744 insn = PREV_INSN (insn);
2745
2746 if (need_fake_edge_p (insn))
2747 {
2748 edge e;
2749
9ff3d2de
JL
2750 e = find_edge (bb, EXIT_BLOCK_PTR);
2751 if (e)
2752 {
2753 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
2754 commit_edge_insertions ();
2755 }
6de9cd9a
DN
2756 }
2757 }
2758
2759 /* Now add fake edges to the function exit for any non constant
2760 calls since there is no way that we can determine if they will
2761 return or not... */
2762
24bd1a0b 2763 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
6de9cd9a
DN
2764 {
2765 basic_block bb = BASIC_BLOCK (i);
2766 rtx insn;
2767 rtx prev_insn;
2768
2769 if (!bb)
2770 continue;
2771
2772 if (blocks && !TEST_BIT (blocks, i))
2773 continue;
2774
2775 for (insn = BB_END (bb); ; insn = prev_insn)
2776 {
2777 prev_insn = PREV_INSN (insn);
2778 if (need_fake_edge_p (insn))
2779 {
2780 edge e;
2781 rtx split_at_insn = insn;
2782
2783 /* Don't split the block between a call and an insn that should
c22cacf3 2784 remain in the same block as the call. */
4b4bf941 2785 if (CALL_P (insn))
6de9cd9a
DN
2786 while (split_at_insn != BB_END (bb)
2787 && keep_with_call_p (NEXT_INSN (split_at_insn)))
2788 split_at_insn = NEXT_INSN (split_at_insn);
2789
2790 /* The handling above of the final block before the epilogue
c22cacf3 2791 should be enough to verify that there is no edge to the exit
6de9cd9a
DN
2792 block in CFG already. Calling make_edge in such case would
2793 cause us to mark that edge as fake and remove it later. */
2794
2795#ifdef ENABLE_CHECKING
2796 if (split_at_insn == BB_END (bb))
628f6a4e 2797 {
9ff3d2de
JL
2798 e = find_edge (bb, EXIT_BLOCK_PTR);
2799 gcc_assert (e == NULL);
628f6a4e 2800 }
6de9cd9a
DN
2801#endif
2802
2803 /* Note that the following may create a new basic block
2804 and renumber the existing basic blocks. */
2805 if (split_at_insn != BB_END (bb))
2806 {
2807 e = split_block (bb, split_at_insn);
2808 if (e)
2809 blocks_split++;
2810 }
2811
2812 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2813 }
2814
2815 if (insn == BB_HEAD (bb))
2816 break;
2817 }
2818 }
2819
2820 if (blocks_split)
2821 verify_flow_info ();
2822
2823 return blocks_split;
2824}
2825
1cb7dfc3 2826/* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
315682fb 2827 the conditional branch target, SECOND_HEAD should be the fall-thru
1cb7dfc3
MH
2828 there is no need to handle this here the loop versioning code handles
2829 this. the reason for SECON_HEAD is that it is needed for condition
2830 in trees, and this should be of the same type since it is a hook. */
2831static void
2832rtl_lv_add_condition_to_bb (basic_block first_head ,
c22cacf3
MS
2833 basic_block second_head ATTRIBUTE_UNUSED,
2834 basic_block cond_bb, void *comp_rtx)
1cb7dfc3
MH
2835{
2836 rtx label, seq, jump;
2837 rtx op0 = XEXP ((rtx)comp_rtx, 0);
2838 rtx op1 = XEXP ((rtx)comp_rtx, 1);
2839 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
2840 enum machine_mode mode;
2841
2842
2843 label = block_label (first_head);
2844 mode = GET_MODE (op0);
2845 if (mode == VOIDmode)
2846 mode = GET_MODE (op1);
2847
2848 start_sequence ();
2849 op0 = force_operand (op0, NULL_RTX);
2850 op1 = force_operand (op1, NULL_RTX);
2851 do_compare_rtx_and_jump (op0, op1, comp, 0,
2852 mode, NULL_RTX, NULL_RTX, label);
2853 jump = get_last_insn ();
2854 JUMP_LABEL (jump) = label;
2855 LABEL_NUSES (label)++;
2856 seq = get_insns ();
2857 end_sequence ();
2858
2859 /* Add the new cond , in the new head. */
2860 emit_insn_after(seq, BB_END(cond_bb));
2861}
2862
2863
2864/* Given a block B with unconditional branch at its end, get the
2865 store the return the branch edge and the fall-thru edge in
2866 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
2867static void
2868rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
2869 edge *fallthru_edge)
2870{
2871 edge e = EDGE_SUCC (b, 0);
2872
2873 if (e->flags & EDGE_FALLTHRU)
2874 {
2875 *fallthru_edge = e;
2876 *branch_edge = EDGE_SUCC (b, 1);
2877 }
2878 else
2879 {
2880 *branch_edge = e;
2881 *fallthru_edge = EDGE_SUCC (b, 1);
2882 }
2883}
2884
5e2d947c
JH
2885void
2886init_rtl_bb_info (basic_block bb)
2887{
2888 gcc_assert (!bb->il.rtl);
2889 bb->il.rtl = ggc_alloc_cleared (sizeof (struct rtl_bb_info));
2890}
2891
1cb7dfc3 2892
8cd37d0b
RL
2893/* Add EXPR to the end of basic block BB. */
2894
2895rtx
2896insert_insn_end_bb_new (rtx pat, basic_block bb)
2897{
2898 rtx insn = BB_END (bb);
2899 rtx new_insn;
2900 rtx pat_end = pat;
2901
2902 while (NEXT_INSN (pat_end) != NULL_RTX)
2903 pat_end = NEXT_INSN (pat_end);
2904
2905 /* If the last insn is a jump, insert EXPR in front [taking care to
2906 handle cc0, etc. properly]. Similarly we need to care trapping
2907 instructions in presence of non-call exceptions. */
2908
2909 if (JUMP_P (insn)
2910 || (NONJUMP_INSN_P (insn)
2911 && (!single_succ_p (bb)
2912 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
2913 {
2914#ifdef HAVE_cc0
2915 rtx note;
2916#endif
2917 /* If this is a jump table, then we can't insert stuff here. Since
2918 we know the previous real insn must be the tablejump, we insert
2919 the new instruction just before the tablejump. */
2920 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
2921 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
2922 insn = prev_real_insn (insn);
2923
2924#ifdef HAVE_cc0
2925 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
2926 if cc0 isn't set. */
2927 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2928 if (note)
2929 insn = XEXP (note, 0);
2930 else
2931 {
2932 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
2933 if (maybe_cc0_setter
2934 && INSN_P (maybe_cc0_setter)
2935 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
2936 insn = maybe_cc0_setter;
2937 }
2938#endif
2939 /* FIXME: What if something in cc0/jump uses value set in new
2940 insn? */
2941 new_insn = emit_insn_before_noloc (pat, insn);
2942 }
2943
2944 /* Likewise if the last insn is a call, as will happen in the presence
2945 of exception handling. */
2946 else if (CALL_P (insn)
2947 && (!single_succ_p (bb)
2948 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
2949 {
2950 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
2951 we search backward and place the instructions before the first
2952 parameter is loaded. Do this for everyone for consistency and a
2953 presumption that we'll get better code elsewhere as well. */
2954
2955 /* Since different machines initialize their parameter registers
2956 in different orders, assume nothing. Collect the set of all
2957 parameter registers. */
2958 insn = find_first_parameter_load (insn, BB_HEAD (bb));
2959
2960 /* If we found all the parameter loads, then we want to insert
2961 before the first parameter load.
2962
2963 If we did not find all the parameter loads, then we might have
2964 stopped on the head of the block, which could be a CODE_LABEL.
2965 If we inserted before the CODE_LABEL, then we would be putting
2966 the insn in the wrong basic block. In that case, put the insn
2967 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
2968 while (LABEL_P (insn)
2969 || NOTE_INSN_BASIC_BLOCK_P (insn))
2970 insn = NEXT_INSN (insn);
2971
2972 new_insn = emit_insn_before_noloc (pat, insn);
2973 }
2974 else
2975 new_insn = emit_insn_after_noloc (pat, insn);
2976
2977 return new_insn;
2978}
2979
9ee634e3
JH
2980/* Implementation of CFG manipulation for linearized RTL. */
2981struct cfg_hooks rtl_cfg_hooks = {
f470c378 2982 "rtl",
9ee634e3 2983 rtl_verify_flow_info,
10e9fecc 2984 rtl_dump_bb,
bc35512f 2985 rtl_create_basic_block,
9ee634e3
JH
2986 rtl_redirect_edge_and_branch,
2987 rtl_redirect_edge_and_branch_force,
2988 rtl_delete_block,
2989 rtl_split_block,
f470c378 2990 rtl_move_block_after,
bc35512f
JH
2991 rtl_can_merge_blocks, /* can_merge_blocks_p */
2992 rtl_merge_blocks,
6de9cd9a
DN
2993 rtl_predict_edge,
2994 rtl_predicted_by_p,
2995 NULL, /* can_duplicate_block_p */
2996 NULL, /* duplicate_block */
f470c378
ZD
2997 rtl_split_edge,
2998 rtl_make_forwarder_block,
6de9cd9a
DN
2999 rtl_tidy_fallthru_edge,
3000 rtl_block_ends_with_call_p,
3001 rtl_block_ends_with_condjump_p,
d9d4706f
KH
3002 rtl_flow_call_edges_add,
3003 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
3004 NULL, /* execute_on_shrinking_pred */
3005 NULL, /* duplicate loop for trees */
3006 NULL, /* lv_add_condition_to_bb */
3007 NULL, /* lv_adjust_loop_header_phi*/
3008 NULL, /* extract_cond_bb_edges */
c22cacf3 3009 NULL /* flush_pending_stmts */
9ee634e3
JH
3010};
3011
3012/* Implementation of CFG manipulation for cfg layout RTL, where
3013 basic block connected via fallthru edges does not have to be adjacent.
3014 This representation will hopefully become the default one in future
3015 version of the compiler. */
6de9cd9a
DN
3016
3017/* We do not want to declare these functions in a header file, since they
3018 should only be used through the cfghooks interface, and we do not want to
3019 move them here since it would require also moving quite a lot of related
3020 code. */
3021extern bool cfg_layout_can_duplicate_bb_p (basic_block);
3022extern basic_block cfg_layout_duplicate_bb (basic_block);
3023
9ee634e3 3024struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
f470c378 3025 "cfglayout mode",
bc35512f 3026 rtl_verify_flow_info_1,
10e9fecc 3027 rtl_dump_bb,
bc35512f 3028 cfg_layout_create_basic_block,
9ee634e3
JH
3029 cfg_layout_redirect_edge_and_branch,
3030 cfg_layout_redirect_edge_and_branch_force,
3031 cfg_layout_delete_block,
3032 cfg_layout_split_block,
f470c378 3033 rtl_move_block_after,
bc35512f
JH
3034 cfg_layout_can_merge_blocks_p,
3035 cfg_layout_merge_blocks,
6de9cd9a
DN
3036 rtl_predict_edge,
3037 rtl_predicted_by_p,
3038 cfg_layout_can_duplicate_bb_p,
3039 cfg_layout_duplicate_bb,
f470c378
ZD
3040 cfg_layout_split_edge,
3041 rtl_make_forwarder_block,
6de9cd9a
DN
3042 NULL,
3043 rtl_block_ends_with_call_p,
3044 rtl_block_ends_with_condjump_p,
d9d4706f
KH
3045 rtl_flow_call_edges_add,
3046 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
3047 NULL, /* execute_on_shrinking_pred */
3048 duplicate_loop_to_header_edge, /* duplicate loop for trees */
3049 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
3050 NULL, /* lv_adjust_loop_header_phi*/
3051 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
c22cacf3 3052 NULL /* flush_pending_stmts */
9ee634e3 3053};