]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgrtl.c
Makefile.def (target_modules): Don't install libffi.
[thirdparty/gcc.git] / gcc / cfgrtl.c
CommitLineData
ca6c03ca 1/* Control flow graph manipulation code for GNU compiler.
d1e082c2 2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
ca6c03ca
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
ca6c03ca
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ca6c03ca 19
5f0d2358
RK
20/* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
ca6c03ca
JH
22
23 Available functionality:
bc35512f 24 - Basic CFG/RTL manipulation API documented in cfghooks.h
5f0d2358 25 - CFG-aware instruction chain manipulation
ca6c03ca 26 delete_insn, delete_insn_chain
bc35512f
JH
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
ba5e9aca
EB
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
bc35512f
JH
33
34 Functions not supposed for generic use:
5f0d2358 35 - Infrastructure to determine quickly basic block for insn
ca6c03ca 36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
5f0d2358 37 - Edge redirection with updating and optimizing of insn chain
bc35512f 38 block_label, tidy_fallthru_edge, force_nonfallthru */
ca6c03ca
JH
39\f
40#include "config.h"
41#include "system.h"
4977bab6
ZW
42#include "coretypes.h"
43#include "tm.h"
ca6c03ca 44#include "tree.h"
ca6c03ca
JH
45#include "hard-reg-set.h"
46#include "basic-block.h"
47#include "regs.h"
48#include "flags.h"
ca6c03ca
JH
49#include "function.h"
50#include "except.h"
0cbd9993 51#include "rtl-error.h"
ca6c03ca
JH
52#include "tm_p.h"
53#include "obstack.h"
2a34bece 54#include "insn-attr.h"
0a2ed1f1 55#include "insn-config.h"
ff25ef99 56#include "expr.h"
9fb32434 57#include "target.h"
677f3fa8 58#include "common/common-target.h"
1cb7dfc3 59#include "cfgloop.h"
5e2d947c 60#include "ggc.h"
ef330312 61#include "tree-pass.h"
6fb5fa3c 62#include "df.h"
ca6c03ca 63
78bde837
SB
64/* Holds the interesting leading and trailing notes for the function.
65 Only applicable if the CFG is in cfglayout mode. */
66static GTY(()) rtx cfg_layout_function_footer;
67static GTY(()) rtx cfg_layout_function_header;
68
69static rtx skip_insns_after_block (basic_block);
70static void record_effective_endpoints (void);
71static rtx label_for_bb (basic_block);
72static void fixup_reorder_chain (void);
73
74void verify_insn_chain (void);
75static void fixup_fallthru_exit_predecessor (void);
9678086d
KG
76static int can_delete_note_p (const_rtx);
77static int can_delete_label_p (const_rtx);
d329e058 78static basic_block rtl_split_edge (edge);
f470c378 79static bool rtl_move_block_after (basic_block, basic_block);
d329e058 80static int rtl_verify_flow_info (void);
f470c378 81static basic_block cfg_layout_split_block (basic_block, void *);
6de9cd9a 82static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
d329e058
AJ
83static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
84static void cfg_layout_delete_block (basic_block);
85static void rtl_delete_block (basic_block);
86static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
6de9cd9a 87static edge rtl_redirect_edge_and_branch (edge, basic_block);
f470c378 88static basic_block rtl_split_block (basic_block, void *);
a315c44c 89static void rtl_dump_bb (FILE *, basic_block, int, int);
d329e058 90static int rtl_verify_flow_info_1 (void);
f470c378 91static void rtl_make_forwarder_block (edge);
ca6c03ca
JH
92\f
93/* Return true if NOTE is not one of the ones that must be kept paired,
5f0d2358 94 so that we may simply delete it. */
ca6c03ca
JH
95
96static int
9678086d 97can_delete_note_p (const_rtx note)
ca6c03ca 98{
cd9c1ca8
RH
99 switch (NOTE_KIND (note))
100 {
101 case NOTE_INSN_DELETED:
102 case NOTE_INSN_BASIC_BLOCK:
103 case NOTE_INSN_EPILOGUE_BEG:
104 return true;
105
106 default:
107 return false;
108 }
ca6c03ca
JH
109}
110
111/* True if a given label can be deleted. */
112
113static int
9678086d 114can_delete_label_p (const_rtx label)
ca6c03ca 115{
5f0d2358
RK
116 return (!LABEL_PRESERVE_P (label)
117 /* User declared labels must be preserved. */
118 && LABEL_NAME (label) == 0
610d2478 119 && !in_expr_list_p (forced_labels, label));
ca6c03ca
JH
120}
121
03fbe718 122/* Delete INSN by patching it out. */
ca6c03ca 123
03fbe718 124void
d329e058 125delete_insn (rtx insn)
ca6c03ca 126{
ca6c03ca
JH
127 rtx note;
128 bool really_delete = true;
129
4b4bf941 130 if (LABEL_P (insn))
ca6c03ca
JH
131 {
132 /* Some labels can't be directly removed from the INSN chain, as they
c22cacf3
MS
133 might be references via variables, constant pool etc.
134 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
ca6c03ca
JH
135 if (! can_delete_label_p (insn))
136 {
137 const char *name = LABEL_NAME (insn);
5e5df392 138 basic_block bb = BLOCK_FOR_INSN (insn);
03fbe718 139 rtx bb_note = NEXT_INSN (insn);
ca6c03ca
JH
140
141 really_delete = false;
142 PUT_CODE (insn, NOTE);
a38e7aa5 143 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
6773e15f 144 NOTE_DELETED_LABEL_NAME (insn) = name;
03fbe718 145
0f33baa9 146 /* If the note following the label starts a basic block, and the
5e5df392 147 label is a member of the same basic block, interchange the two. */
0f33baa9
TV
148 if (bb_note != NULL_RTX
149 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
5e5df392
TV
150 && bb != NULL
151 && bb == BLOCK_FOR_INSN (bb_note))
03fbe718
TV
152 {
153 reorder_insns_nobb (insn, insn, bb_note);
154 BB_HEAD (bb) = bb_note;
155 if (BB_END (bb) == bb_note)
156 BB_END (bb) = insn;
157 }
ca6c03ca 158 }
5f0d2358 159
ca6c03ca
JH
160 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
161 }
162
163 if (really_delete)
164 {
cda94cbb 165 /* If this insn has already been deleted, something is very wrong. */
341c100f 166 gcc_assert (!INSN_DELETED_P (insn));
ca6c03ca
JH
167 remove_insn (insn);
168 INSN_DELETED_P (insn) = 1;
169 }
170
171 /* If deleting a jump, decrement the use count of the label. Deleting
172 the label itself should happen in the normal course of block merging. */
cf7c4aa6 173 if (JUMP_P (insn))
9295a326 174 {
cf7c4aa6
HPN
175 if (JUMP_LABEL (insn)
176 && LABEL_P (JUMP_LABEL (insn)))
177 LABEL_NUSES (JUMP_LABEL (insn))--;
178
179 /* If there are more targets, remove them too. */
180 while ((note
181 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
4b4bf941 182 && LABEL_P (XEXP (note, 0)))
9295a326
JZ
183 {
184 LABEL_NUSES (XEXP (note, 0))--;
185 remove_note (insn, note);
186 }
187 }
ca6c03ca 188
cf7c4aa6
HPN
189 /* Also if deleting any insn that references a label as an operand. */
190 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
191 && LABEL_P (XEXP (note, 0)))
192 {
193 LABEL_NUSES (XEXP (note, 0))--;
194 remove_note (insn, note);
195 }
196
481683e1 197 if (JUMP_TABLE_DATA_P (insn))
ca6c03ca
JH
198 {
199 rtx pat = PATTERN (insn);
200 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
201 int len = XVECLEN (pat, diff_vec_p);
202 int i;
203
204 for (i = 0; i < len; i++)
a124fcda
RH
205 {
206 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
207
208 /* When deleting code in bulk (e.g. removing many unreachable
209 blocks) we can delete a label that's a target of the vector
210 before deleting the vector itself. */
4b4bf941 211 if (!NOTE_P (label))
a124fcda
RH
212 LABEL_NUSES (label)--;
213 }
ca6c03ca 214 }
ca6c03ca
JH
215}
216
3dec4024 217/* Like delete_insn but also purge dead edges from BB. */
9ed7b221 218
03fbe718 219void
d329e058 220delete_insn_and_edges (rtx insn)
3dec4024 221{
3dec4024
JH
222 bool purge = false;
223
ba4f7968 224 if (INSN_P (insn)
3dec4024 225 && BLOCK_FOR_INSN (insn)
a813c111 226 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
3dec4024 227 purge = true;
03fbe718 228 delete_insn (insn);
3dec4024
JH
229 if (purge)
230 purge_dead_edges (BLOCK_FOR_INSN (insn));
3dec4024
JH
231}
232
ca6c03ca 233/* Unlink a chain of insns between START and FINISH, leaving notes
a7b87f73
ZD
234 that must be paired. If CLEAR_BB is true, we set bb field for
235 insns that cannot be removed to NULL. */
ca6c03ca
JH
236
237void
a7b87f73 238delete_insn_chain (rtx start, rtx finish, bool clear_bb)
ca6c03ca 239{
03fbe718 240 rtx prev, current;
ca6c03ca 241
5f0d2358
RK
242 /* Unchain the insns one by one. It would be quicker to delete all of these
243 with a single unchaining, rather than one at a time, but we need to keep
244 the NOTE's. */
03fbe718 245 current = finish;
ca6c03ca
JH
246 while (1)
247 {
03fbe718
TV
248 prev = PREV_INSN (current);
249 if (NOTE_P (current) && !can_delete_note_p (current))
ca6c03ca
JH
250 ;
251 else
03fbe718 252 delete_insn (current);
ca6c03ca 253
03fbe718
TV
254 if (clear_bb && !INSN_DELETED_P (current))
255 set_block_for_insn (current, NULL);
a7b87f73 256
03fbe718 257 if (current == start)
ca6c03ca 258 break;
03fbe718 259 current = prev;
ca6c03ca
JH
260 }
261}
262\f
5f0d2358
RK
263/* Create a new basic block consisting of the instructions between HEAD and END
264 inclusive. This function is designed to allow fast BB construction - reuses
265 the note and basic block struct in BB_NOTE, if any and do not grow
266 BASIC_BLOCK chain and should be used directly only by CFG construction code.
267 END can be NULL in to create new empty basic block before HEAD. Both END
918ed612
ZD
268 and HEAD can be NULL to create basic block at the end of INSN chain.
269 AFTER is the basic block we should be put after. */
ca6c03ca
JH
270
271basic_block
d329e058 272create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
ca6c03ca
JH
273{
274 basic_block bb;
275
276 if (bb_note
ca6c03ca
JH
277 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
278 && bb->aux == NULL)
279 {
280 /* If we found an existing note, thread it back onto the chain. */
281
282 rtx after;
283
4b4bf941 284 if (LABEL_P (head))
ca6c03ca
JH
285 after = head;
286 else
287 {
288 after = PREV_INSN (head);
289 head = bb_note;
290 }
291
292 if (after != bb_note && NEXT_INSN (after) != bb_note)
ba4f7968 293 reorder_insns_nobb (bb_note, bb_note, after);
ca6c03ca
JH
294 }
295 else
296 {
297 /* Otherwise we must create a note and a basic block structure. */
298
299 bb = alloc_block ();
300
5e2d947c 301 init_rtl_bb_info (bb);
ca6c03ca 302 if (!head && !end)
5f0d2358
RK
303 head = end = bb_note
304 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
4b4bf941 305 else if (LABEL_P (head) && end)
ca6c03ca
JH
306 {
307 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
308 if (head == end)
309 end = bb_note;
310 }
311 else
312 {
313 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
314 head = bb_note;
315 if (!end)
316 end = head;
317 }
5f0d2358 318
ca6c03ca
JH
319 NOTE_BASIC_BLOCK (bb_note) = bb;
320 }
321
322 /* Always include the bb note in the block. */
323 if (NEXT_INSN (end) == bb_note)
324 end = bb_note;
325
a813c111
SB
326 BB_HEAD (bb) = head;
327 BB_END (bb) = end;
852c6ec7 328 bb->index = last_basic_block++;
5e2d947c 329 bb->flags = BB_NEW | BB_RTL;
918ed612 330 link_block (bb, after);
68f9b844 331 SET_BASIC_BLOCK (bb->index, bb);
6fb5fa3c 332 df_bb_refs_record (bb->index, false);
ba4f7968 333 update_bb_for_insn (bb);
076c7ab8 334 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
ca6c03ca
JH
335
336 /* Tag the block so that we know it has been used when considering
337 other basic block notes. */
338 bb->aux = bb;
339
340 return bb;
341}
342
5f0d2358 343/* Create new basic block consisting of instructions in between HEAD and END
ffe14686
AM
344 and place it to the BB chain after block AFTER. END can be NULL to
345 create a new empty basic block before HEAD. Both END and HEAD can be
346 NULL to create basic block at the end of INSN chain. */
ca6c03ca 347
bc35512f
JH
348static basic_block
349rtl_create_basic_block (void *headp, void *endp, basic_block after)
ca6c03ca 350{
ae50c0cb 351 rtx head = (rtx) headp, end = (rtx) endp;
ca6c03ca 352 basic_block bb;
0b17ab2f 353
7eca0767 354 /* Grow the basic block array if needed. */
9771b263 355 if ((size_t) last_basic_block >= basic_block_info->length ())
7eca0767
JH
356 {
357 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
9771b263 358 vec_safe_grow_cleared (basic_block_info, new_size);
7eca0767 359 }
0b17ab2f 360
bf77398c 361 n_basic_blocks++;
ca6c03ca 362
852c6ec7 363 bb = create_basic_block_structure (head, end, NULL, after);
ca6c03ca
JH
364 bb->aux = NULL;
365 return bb;
366}
bc35512f
JH
367
368static basic_block
369cfg_layout_create_basic_block (void *head, void *end, basic_block after)
370{
371 basic_block newbb = rtl_create_basic_block (head, end, after);
372
bc35512f
JH
373 return newbb;
374}
ca6c03ca
JH
375\f
376/* Delete the insns in a (non-live) block. We physically delete every
377 non-deleted-note insn, and update the flow graph appropriately.
378
379 Return nonzero if we deleted an exception handler. */
380
381/* ??? Preserving all such notes strikes me as wrong. It would be nice
382 to post-process the stream to remove empty blocks, loops, ranges, etc. */
383
f0fda11c 384static void
d329e058 385rtl_delete_block (basic_block b)
ca6c03ca 386{
96370780 387 rtx insn, end;
ca6c03ca
JH
388
389 /* If the head of this block is a CODE_LABEL, then it might be the
f39e46ba
SB
390 label for an exception handler which can't be reached. We need
391 to remove the label from the exception_handler_label list. */
a813c111 392 insn = BB_HEAD (b);
ca6c03ca 393
96370780 394 end = get_last_bb_insn (b);
ca6c03ca
JH
395
396 /* Selectively delete the entire chain. */
a813c111 397 BB_HEAD (b) = NULL;
a7b87f73
ZD
398 delete_insn_chain (insn, end, true);
399
6fb5fa3c
DB
400
401 if (dump_file)
402 fprintf (dump_file, "deleting block %d\n", b->index);
403 df_bb_delete (b->index);
ca6c03ca
JH
404}
405\f
852c6ec7 406/* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
ca6c03ca
JH
407
408void
d329e058 409compute_bb_for_insn (void)
ca6c03ca 410{
e0082a72 411 basic_block bb;
ca6c03ca 412
e0082a72 413 FOR_EACH_BB (bb)
ca6c03ca 414 {
a813c111 415 rtx end = BB_END (bb);
5f0d2358 416 rtx insn;
ca6c03ca 417
a813c111 418 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
ca6c03ca 419 {
ba4f7968 420 BLOCK_FOR_INSN (insn) = bb;
ca6c03ca
JH
421 if (insn == end)
422 break;
ca6c03ca
JH
423 }
424 }
425}
426
427/* Release the basic_block_for_insn array. */
428
c2924966 429unsigned int
d329e058 430free_bb_for_insn (void)
ca6c03ca 431{
ba4f7968
JH
432 rtx insn;
433 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 434 if (!BARRIER_P (insn))
ba4f7968 435 BLOCK_FOR_INSN (insn) = NULL;
c2924966 436 return 0;
ca6c03ca
JH
437}
438
2a34bece
EB
439static unsigned int
440rest_of_pass_free_cfg (void)
441{
442#ifdef DELAY_SLOTS
443 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
444 valid at that point so it would be too late to call df_analyze. */
445 if (optimize > 0 && flag_delayed_branch)
ad78b8a6
EB
446 {
447 df_note_add_problem ();
448 df_analyze ();
449 }
2a34bece
EB
450#endif
451
452 free_bb_for_insn ();
453 return 0;
454}
455
8ddbbcae 456struct rtl_opt_pass pass_free_cfg =
ef330312 457{
8ddbbcae
JH
458 {
459 RTL_PASS,
e0a42b0f 460 "*free_cfg", /* name */
2b4e6bf1 461 OPTGROUP_NONE, /* optinfo_flags */
ef330312 462 NULL, /* gate */
2a34bece 463 rest_of_pass_free_cfg, /* execute */
ef330312
PB
464 NULL, /* sub */
465 NULL, /* next */
466 0, /* static_pass_number */
7072a650 467 TV_NONE, /* tv_id */
ef330312
PB
468 0, /* properties_required */
469 0, /* properties_provided */
470 PROP_cfg, /* properties_destroyed */
471 0, /* todo_flags_start */
472 0, /* todo_flags_finish */
8ddbbcae 473 }
ef330312
PB
474};
475
91278841
AP
476/* Return RTX to emit after when we want to emit code on the entry of function. */
477rtx
478entry_of_function (void)
479{
c22cacf3 480 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
24bd1a0b 481 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
91278841
AP
482}
483
11b904a1
BS
484/* Emit INSN at the entry point of the function, ensuring that it is only
485 executed once per function. */
486void
487emit_insn_at_entry (rtx insn)
488{
489 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
490 edge e = ei_safe_edge (ei);
5419bc7f 491 gcc_assert (e->flags & EDGE_FALLTHRU);
11b904a1
BS
492
493 insert_insn_on_edge (insn, e);
494 commit_edge_insertions ();
495}
496
6c3d0e31 497/* Update BLOCK_FOR_INSN of insns between BEGIN and END
b8698a0f 498 (or BARRIER if found) and notify df of the bb change.
6c3d0e31
SP
499 The insn chain range is inclusive
500 (i.e. both BEGIN and END will be updated. */
ca6c03ca 501
6c3d0e31
SP
502static void
503update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
ca6c03ca
JH
504{
505 rtx insn;
506
6c3d0e31
SP
507 end = NEXT_INSN (end);
508 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
63642d5a
AO
509 if (!BARRIER_P (insn))
510 df_insn_change_bb (insn, bb);
ca6c03ca 511}
6c3d0e31
SP
512
513/* Update BLOCK_FOR_INSN of insns in BB to BB,
514 and notify df of the change. */
515
516void
517update_bb_for_insn (basic_block bb)
518{
519 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
520}
521
532aafad
SB
522\f
523/* Like active_insn_p, except keep the return value clobber around
524 even after reload. */
525
526static bool
527flow_active_insn_p (const_rtx insn)
528{
529 if (active_insn_p (insn))
530 return true;
531
532 /* A clobber of the function return value exists for buggy
533 programs that fail to return a value. Its effect is to
534 keep the return value from being live across the entire
535 function. If we allow it to be skipped, we introduce the
536 possibility for register lifetime confusion. */
537 if (GET_CODE (PATTERN (insn)) == CLOBBER
538 && REG_P (XEXP (PATTERN (insn), 0))
539 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
540 return true;
541
542 return false;
543}
544
545/* Return true if the block has no effect and only forwards control flow to
546 its single destination. */
532aafad
SB
547
548bool
2407343c 549contains_no_active_insn_p (const_basic_block bb)
532aafad
SB
550{
551 rtx insn;
552
553 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
554 || !single_succ_p (bb))
555 return false;
556
2407343c
JR
557 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
558 if (INSN_P (insn) && flow_active_insn_p (insn))
559 return false;
560
561 return (!INSN_P (insn)
562 || (JUMP_P (insn) && simplejump_p (insn))
563 || !flow_active_insn_p (insn));
564}
565
566/* Likewise, but protect loop latches, headers and preheaders. */
567/* FIXME: Make this a cfg hook. */
568
569bool
570forwarder_block_p (const_basic_block bb)
571{
572 if (!contains_no_active_insn_p (bb))
573 return false;
574
532aafad
SB
575 /* Protect loop latches, headers and preheaders. */
576 if (current_loops)
577 {
578 basic_block dest;
579 if (bb->loop_father->header == bb)
580 return false;
581 dest = EDGE_SUCC (bb, 0)->dest;
582 if (dest->loop_father->header == dest)
583 return false;
584 }
585
2407343c 586 return true;
532aafad
SB
587}
588
589/* Return nonzero if we can reach target from src by falling through. */
590/* FIXME: Make this a cfg hook. */
591
592bool
593can_fallthru (basic_block src, basic_block target)
594{
595 rtx insn = BB_END (src);
596 rtx insn2;
597 edge e;
598 edge_iterator ei;
599
600 if (target == EXIT_BLOCK_PTR)
601 return true;
602 if (src->next_bb != target)
603 return 0;
604 FOR_EACH_EDGE (e, ei, src->succs)
605 if (e->dest == EXIT_BLOCK_PTR
606 && e->flags & EDGE_FALLTHRU)
607 return 0;
608
609 insn2 = BB_HEAD (target);
610 if (insn2 && !active_insn_p (insn2))
611 insn2 = next_active_insn (insn2);
612
613 /* ??? Later we may add code to move jump tables offline. */
614 return next_active_insn (insn) == insn2;
615}
616
617/* Return nonzero if we could reach target from src by falling through,
618 if the target was made adjacent. If we already have a fall-through
619 edge to the exit block, we can't do that. */
620static bool
621could_fall_through (basic_block src, basic_block target)
622{
623 edge e;
624 edge_iterator ei;
625
626 if (target == EXIT_BLOCK_PTR)
627 return true;
628 FOR_EACH_EDGE (e, ei, src->succs)
629 if (e->dest == EXIT_BLOCK_PTR
630 && e->flags & EDGE_FALLTHRU)
631 return 0;
632 return true;
633}
ca6c03ca 634\f
ef2be249
RS
635/* Return the NOTE_INSN_BASIC_BLOCK of BB. */
636rtx
637bb_note (basic_block bb)
638{
639 rtx note;
640
641 note = BB_HEAD (bb);
642 if (LABEL_P (note))
643 note = NEXT_INSN (note);
644
645 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
646 return note;
647}
648
6fb5fa3c
DB
649/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
650 note associated with the BLOCK. */
651
652static rtx
653first_insn_after_basic_block_note (basic_block block)
654{
655 rtx insn;
656
657 /* Get the first instruction in the block. */
658 insn = BB_HEAD (block);
659
660 if (insn == NULL_RTX)
661 return NULL_RTX;
662 if (LABEL_P (insn))
663 insn = NEXT_INSN (insn);
664 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
665
666 return NEXT_INSN (insn);
667}
668
f470c378
ZD
669/* Creates a new basic block just after basic block B by splitting
670 everything after specified instruction I. */
ca6c03ca 671
f470c378 672static basic_block
d329e058 673rtl_split_block (basic_block bb, void *insnp)
ca6c03ca
JH
674{
675 basic_block new_bb;
ae50c0cb 676 rtx insn = (rtx) insnp;
f470c378 677 edge e;
628f6a4e 678 edge_iterator ei;
ca6c03ca 679
f470c378
ZD
680 if (!insn)
681 {
682 insn = first_insn_after_basic_block_note (bb);
683
684 if (insn)
b5b8b0ac
AO
685 {
686 rtx next = insn;
687
688 insn = PREV_INSN (insn);
689
690 /* If the block contains only debug insns, insn would have
691 been NULL in a non-debug compilation, and then we'd end
692 up emitting a DELETED note. For -fcompare-debug
693 stability, emit the note too. */
694 if (insn != BB_END (bb)
695 && DEBUG_INSN_P (next)
696 && DEBUG_INSN_P (BB_END (bb)))
697 {
698 while (next != BB_END (bb) && DEBUG_INSN_P (next))
699 next = NEXT_INSN (next);
700
701 if (next == BB_END (bb))
702 emit_note_after (NOTE_INSN_DELETED, next);
703 }
704 }
f470c378
ZD
705 else
706 insn = get_last_insn ();
707 }
708
709 /* We probably should check type of the insn so that we do not create
710 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
711 bother. */
712 if (insn == BB_END (bb))
713 emit_note_after (NOTE_INSN_DELETED, insn);
ca6c03ca
JH
714
715 /* Create the new basic block. */
a813c111 716 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
076c7ab8 717 BB_COPY_PARTITION (new_bb, bb);
a813c111 718 BB_END (bb) = insn;
ca6c03ca
JH
719
720 /* Redirect the outgoing edges. */
628f6a4e
BE
721 new_bb->succs = bb->succs;
722 bb->succs = NULL;
723 FOR_EACH_EDGE (e, ei, new_bb->succs)
ca6c03ca
JH
724 e->src = new_bb;
725
6fb5fa3c
DB
726 /* The new block starts off being dirty. */
727 df_set_bb_dirty (bb);
f470c378 728 return new_bb;
bc35512f
JH
729}
730
9be94227
EB
731/* Return true if the single edge between blocks A and B is the only place
732 in RTL which holds some unique locus. */
733
734static bool
735unique_locus_on_edge_between_p (basic_block a, basic_block b)
736{
5368224f 737 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
9be94227
EB
738 rtx insn, end;
739
2f13f2de 740 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
9be94227
EB
741 return false;
742
743 /* First scan block A backward. */
744 insn = BB_END (a);
745 end = PREV_INSN (BB_HEAD (a));
5368224f 746 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
9be94227
EB
747 insn = PREV_INSN (insn);
748
5368224f 749 if (insn != end && INSN_LOCATION (insn) == goto_locus)
9be94227
EB
750 return false;
751
752 /* Then scan block B forward. */
753 insn = BB_HEAD (b);
754 if (insn)
755 {
756 end = NEXT_INSN (BB_END (b));
757 while (insn != end && !NONDEBUG_INSN_P (insn))
758 insn = NEXT_INSN (insn);
759
5368224f
DC
760 if (insn != end && INSN_HAS_LOCATION (insn)
761 && INSN_LOCATION (insn) == goto_locus)
9be94227
EB
762 return false;
763 }
764
765 return true;
766}
767
768/* If the single edge between blocks A and B is the only place in RTL which
769 holds some unique locus, emit a nop with that locus between the blocks. */
770
771static void
772emit_nop_for_unique_locus_between (basic_block a, basic_block b)
773{
774 if (!unique_locus_on_edge_between_p (a, b))
775 return;
776
777 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
5368224f 778 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
9be94227
EB
779}
780
ca6c03ca 781/* Blocks A and B are to be merged into a single block A. The insns
bc35512f 782 are already contiguous. */
ca6c03ca 783
bc35512f
JH
784static void
785rtl_merge_blocks (basic_block a, basic_block b)
ca6c03ca 786{
a813c111 787 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
ca6c03ca 788 rtx del_first = NULL_RTX, del_last = NULL_RTX;
b5b8b0ac 789 rtx b_debug_start = b_end, b_debug_end = b_end;
50a36e42 790 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
ca6c03ca
JH
791 int b_empty = 0;
792
6fb5fa3c 793 if (dump_file)
50a36e42
EB
794 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
795 a->index);
6fb5fa3c 796
b5b8b0ac
AO
797 while (DEBUG_INSN_P (b_end))
798 b_end = PREV_INSN (b_debug_start = b_end);
799
ca6c03ca 800 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 801 if (LABEL_P (b_head))
ca6c03ca
JH
802 {
803 /* Detect basic blocks with nothing but a label. This can happen
804 in particular at the end of a function. */
805 if (b_head == b_end)
806 b_empty = 1;
5f0d2358 807
ca6c03ca
JH
808 del_first = del_last = b_head;
809 b_head = NEXT_INSN (b_head);
810 }
811
5f0d2358
RK
812 /* Delete the basic block note and handle blocks containing just that
813 note. */
ca6c03ca
JH
814 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
815 {
816 if (b_head == b_end)
817 b_empty = 1;
818 if (! del_last)
819 del_first = b_head;
5f0d2358 820
ca6c03ca
JH
821 del_last = b_head;
822 b_head = NEXT_INSN (b_head);
823 }
824
825 /* If there was a jump out of A, delete it. */
4b4bf941 826 if (JUMP_P (a_end))
ca6c03ca
JH
827 {
828 rtx prev;
829
830 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
4b4bf941 831 if (!NOTE_P (prev)
a38e7aa5 832 || NOTE_INSN_BASIC_BLOCK_P (prev)
a813c111 833 || prev == BB_HEAD (a))
ca6c03ca
JH
834 break;
835
836 del_first = a_end;
837
838#ifdef HAVE_cc0
839 /* If this was a conditional jump, we need to also delete
840 the insn that set cc0. */
841 if (only_sets_cc0_p (prev))
842 {
843 rtx tmp = prev;
5f0d2358 844
ca6c03ca
JH
845 prev = prev_nonnote_insn (prev);
846 if (!prev)
a813c111 847 prev = BB_HEAD (a);
ca6c03ca
JH
848 del_first = tmp;
849 }
850#endif
851
852 a_end = PREV_INSN (del_first);
853 }
4b4bf941 854 else if (BARRIER_P (NEXT_INSN (a_end)))
ca6c03ca
JH
855 del_first = NEXT_INSN (a_end);
856
ca6c03ca
JH
857 /* Delete everything marked above as well as crap that might be
858 hanging out between the two blocks. */
9be94227
EB
859 BB_END (a) = a_end;
860 BB_HEAD (b) = b_empty ? NULL_RTX : b_head;
a7b87f73 861 delete_insn_chain (del_first, del_last, true);
ca6c03ca 862
9be94227
EB
863 /* When not optimizing CFG and the edge is the only place in RTL which holds
864 some unique locus, emit a nop with that locus in between. */
865 if (!optimize)
866 {
867 emit_nop_for_unique_locus_between (a, b);
868 a_end = BB_END (a);
869 }
870
ca6c03ca
JH
871 /* Reassociate the insns of B with A. */
872 if (!b_empty)
873 {
b5b8b0ac 874 update_bb_for_insn_chain (a_end, b_debug_end, a);
5f0d2358 875
9be94227
EB
876 BB_END (a) = b_debug_end;
877 BB_HEAD (b) = NULL_RTX;
b5b8b0ac
AO
878 }
879 else if (b_end != b_debug_end)
880 {
881 /* Move any deleted labels and other notes between the end of A
882 and the debug insns that make up B after the debug insns,
883 bringing the debug insns into A while keeping the notes after
884 the end of A. */
885 if (NEXT_INSN (a_end) != b_debug_start)
886 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
887 b_debug_end);
888 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
9be94227 889 BB_END (a) = b_debug_end;
ca6c03ca 890 }
5f0d2358 891
6fb5fa3c 892 df_bb_delete (b->index);
50a36e42
EB
893
894 /* If B was a forwarder block, propagate the locus on the edge. */
fbc68f2a
DC
895 if (forwarder_p
896 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
50a36e42
EB
897 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
898
899 if (dump_file)
900 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
ca6c03ca 901}
bc35512f 902
6fb5fa3c 903
bc35512f 904/* Return true when block A and B can be merged. */
9678086d 905
b48d0358
DN
906static bool
907rtl_can_merge_blocks (basic_block a, basic_block b)
bc35512f 908{
750054a2
CT
909 /* If we are partitioning hot/cold basic blocks, we don't want to
910 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
911 and cold sections.
912
8e8d5162 913 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
914 be optimizable (or blocks that appear to be mergeable), but which really
915 must be left untouched (they are required to make it safely across
916 partition boundaries). See the comments at the top of
8e8d5162 917 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
076c7ab8 918
87c8b4be 919 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 920 return false;
750054a2 921
7d776ee2
RG
922 /* Protect the loop latches. */
923 if (current_loops && b->loop_father->latch == b)
924 return false;
925
bc35512f 926 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
927 return (single_succ_p (a)
928 && single_succ (a) == b
929 && single_pred_p (b)
628f6a4e 930 && a != b
bc35512f 931 /* Must be simple edge. */
c5cbcccf 932 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
bc35512f
JH
933 && a->next_bb == b
934 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
935 /* If the jump insn has side effects,
936 we can't kill the edge. */
4b4bf941 937 && (!JUMP_P (BB_END (a))
e24e7211 938 || (reload_completed
a813c111 939 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f 940}
ca6c03ca 941\f
5f0d2358
RK
942/* Return the label in the head of basic block BLOCK. Create one if it doesn't
943 exist. */
ca6c03ca
JH
944
945rtx
d329e058 946block_label (basic_block block)
ca6c03ca
JH
947{
948 if (block == EXIT_BLOCK_PTR)
949 return NULL_RTX;
5f0d2358 950
4b4bf941 951 if (!LABEL_P (BB_HEAD (block)))
ca6c03ca 952 {
a813c111 953 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
ca6c03ca 954 }
5f0d2358 955
a813c111 956 return BB_HEAD (block);
ca6c03ca
JH
957}
958
959/* Attempt to perform edge redirection by replacing possibly complex jump
5f0d2358
RK
960 instruction by unconditional jump or removing jump completely. This can
961 apply only if all edges now point to the same block. The parameters and
962 return values are equivalent to redirect_edge_and_branch. */
ca6c03ca 963
6de9cd9a 964edge
bc35512f 965try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
ca6c03ca
JH
966{
967 basic_block src = e->src;
a813c111 968 rtx insn = BB_END (src), kill_from;
e1233a7d 969 rtx set;
ca6c03ca 970 int fallthru = 0;
750054a2
CT
971
972 /* If we are partitioning hot/cold basic blocks, we don't want to
973 mess up unconditional or indirect jumps that cross between hot
8e8d5162
CT
974 and cold sections.
975
976 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
977 be optimizable (or blocks that appear to be mergeable), but which really
978 must be left untouched (they are required to make it safely across
979 partition boundaries). See the comments at the top of
8e8d5162 980 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
c22cacf3 981
87c8b4be
CT
982 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
983 || BB_PARTITION (src) != BB_PARTITION (target))
9cf84a3c 984 return NULL;
750054a2 985
6a66a8a7
KH
986 /* We can replace or remove a complex jump only when we have exactly
987 two edges. Also, if we have exactly one outgoing edge, we can
988 redirect that. */
989 if (EDGE_COUNT (src->succs) >= 3
990 /* Verify that all targets will be TARGET. Specifically, the
991 edge that is not E must also go to TARGET. */
992 || (EDGE_COUNT (src->succs) == 2
993 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
994 return NULL;
5f0d2358 995
6a66a8a7 996 if (!onlyjump_p (insn))
6de9cd9a 997 return NULL;
3348b696 998 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
6de9cd9a 999 return NULL;
ca6c03ca
JH
1000
1001 /* Avoid removing branch with side effects. */
1002 set = single_set (insn);
1003 if (!set || side_effects_p (set))
6de9cd9a 1004 return NULL;
ca6c03ca
JH
1005
1006 /* In case we zap a conditional jump, we'll need to kill
1007 the cc0 setter too. */
1008 kill_from = insn;
1009#ifdef HAVE_cc0
9caea4a7
RS
1010 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
1011 && only_sets_cc0_p (PREV_INSN (insn)))
ca6c03ca
JH
1012 kill_from = PREV_INSN (insn);
1013#endif
1014
1015 /* See if we can create the fallthru edge. */
bc35512f 1016 if (in_cfglayout || can_fallthru (src, target))
ca6c03ca 1017 {
c263766c
RH
1018 if (dump_file)
1019 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
ca6c03ca
JH
1020 fallthru = 1;
1021
eaec9b3d 1022 /* Selectively unlink whole insn chain. */
bc35512f
JH
1023 if (in_cfglayout)
1024 {
bcc708fc 1025 rtx insn = BB_FOOTER (src);
bc35512f 1026
a7b87f73 1027 delete_insn_chain (kill_from, BB_END (src), false);
bc35512f
JH
1028
1029 /* Remove barriers but keep jumptables. */
1030 while (insn)
1031 {
4b4bf941 1032 if (BARRIER_P (insn))
bc35512f
JH
1033 {
1034 if (PREV_INSN (insn))
1035 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1036 else
bcc708fc 1037 BB_FOOTER (src) = NEXT_INSN (insn);
bc35512f
JH
1038 if (NEXT_INSN (insn))
1039 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1040 }
4b4bf941 1041 if (LABEL_P (insn))
bc35512f
JH
1042 break;
1043 insn = NEXT_INSN (insn);
1044 }
1045 }
1046 else
a7b87f73
ZD
1047 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1048 false);
ca6c03ca 1049 }
5f0d2358 1050
ca6c03ca
JH
1051 /* If this already is simplejump, redirect it. */
1052 else if (simplejump_p (insn))
1053 {
1054 if (e->dest == target)
6de9cd9a 1055 return NULL;
c263766c
RH
1056 if (dump_file)
1057 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
0b17ab2f 1058 INSN_UID (insn), e->dest->index, target->index);
6ee3c8e4
JJ
1059 if (!redirect_jump (insn, block_label (target), 0))
1060 {
341c100f
NS
1061 gcc_assert (target == EXIT_BLOCK_PTR);
1062 return NULL;
6ee3c8e4 1063 }
ca6c03ca 1064 }
5f0d2358 1065
6ee3c8e4
JJ
1066 /* Cannot do anything for target exit block. */
1067 else if (target == EXIT_BLOCK_PTR)
6de9cd9a 1068 return NULL;
6ee3c8e4 1069
ca6c03ca
JH
1070 /* Or replace possibly complicated jump insn by simple jump insn. */
1071 else
1072 {
1073 rtx target_label = block_label (target);
eb5b8ad4 1074 rtx barrier, label, table;
ca6c03ca 1075
a7102479 1076 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
a813c111 1077 JUMP_LABEL (BB_END (src)) = target_label;
ca6c03ca 1078 LABEL_NUSES (target_label)++;
c263766c
RH
1079 if (dump_file)
1080 fprintf (dump_file, "Replacing insn %i by jump %i\n",
a813c111 1081 INSN_UID (insn), INSN_UID (BB_END (src)));
ca6c03ca 1082
4da2eb6b 1083
ba4807a0
LB
1084 delete_insn_chain (kill_from, insn, false);
1085
4da2eb6b
RH
1086 /* Recognize a tablejump that we are converting to a
1087 simple jump and remove its associated CODE_LABEL
1088 and ADDR_VEC or ADDR_DIFF_VEC. */
ba4807a0
LB
1089 if (tablejump_p (insn, &label, &table))
1090 delete_insn_chain (label, table, false);
eb5b8ad4 1091
a813c111 1092 barrier = next_nonnote_insn (BB_END (src));
4b4bf941 1093 if (!barrier || !BARRIER_P (barrier))
a813c111 1094 emit_barrier_after (BB_END (src));
5d693491
JZ
1095 else
1096 {
a813c111 1097 if (barrier != NEXT_INSN (BB_END (src)))
5d693491
JZ
1098 {
1099 /* Move the jump before barrier so that the notes
1100 which originally were or were created before jump table are
1101 inside the basic block. */
a813c111 1102 rtx new_insn = BB_END (src);
5d693491 1103
6c3d0e31
SP
1104 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1105 PREV_INSN (barrier), src);
5d693491
JZ
1106
1107 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1108 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1109
1110 NEXT_INSN (new_insn) = barrier;
1111 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1112
1113 PREV_INSN (new_insn) = PREV_INSN (barrier);
1114 PREV_INSN (barrier) = new_insn;
1115 }
1116 }
ca6c03ca
JH
1117 }
1118
1119 /* Keep only one edge out and set proper flags. */
c5cbcccf 1120 if (!single_succ_p (src))
628f6a4e 1121 remove_edge (e);
c5cbcccf 1122 gcc_assert (single_succ_p (src));
628f6a4e 1123
c5cbcccf 1124 e = single_succ_edge (src);
ca6c03ca
JH
1125 if (fallthru)
1126 e->flags = EDGE_FALLTHRU;
1127 else
1128 e->flags = 0;
5f0d2358 1129
ca6c03ca
JH
1130 e->probability = REG_BR_PROB_BASE;
1131 e->count = src->count;
1132
ca6c03ca
JH
1133 if (e->dest != target)
1134 redirect_edge_succ (e, target);
6de9cd9a 1135 return e;
ca6c03ca
JH
1136}
1137
4e3825db
MM
1138/* Subroutine of redirect_branch_edge that tries to patch the jump
1139 instruction INSN so that it reaches block NEW. Do this
1140 only when it originally reached block OLD. Return true if this
1141 worked or the original target wasn't OLD, return false if redirection
1142 doesn't work. */
1143
1144static bool
1145patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
ca6c03ca
JH
1146{
1147 rtx tmp;
ca6c03ca 1148 /* Recognize a tablejump and adjust all matching cases. */
e1233a7d 1149 if (tablejump_p (insn, NULL, &tmp))
ca6c03ca
JH
1150 {
1151 rtvec vec;
1152 int j;
4e3825db 1153 rtx new_label = block_label (new_bb);
ca6c03ca 1154
4e3825db
MM
1155 if (new_bb == EXIT_BLOCK_PTR)
1156 return false;
ca6c03ca
JH
1157 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1158 vec = XVEC (PATTERN (tmp), 0);
1159 else
1160 vec = XVEC (PATTERN (tmp), 1);
1161
1162 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1163 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1164 {
1165 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1166 --LABEL_NUSES (old_label);
1167 ++LABEL_NUSES (new_label);
1168 }
1169
f9da5064 1170 /* Handle casesi dispatch insns. */
ca6c03ca
JH
1171 if ((tmp = single_set (insn)) != NULL
1172 && SET_DEST (tmp) == pc_rtx
1173 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1174 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1175 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1176 {
4c33cb26 1177 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
ca6c03ca
JH
1178 new_label);
1179 --LABEL_NUSES (old_label);
1180 ++LABEL_NUSES (new_label);
1181 }
1182 }
1c384bf1
RH
1183 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1184 {
1185 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1186 rtx new_label, note;
1187
1188 if (new_bb == EXIT_BLOCK_PTR)
1189 return false;
1190 new_label = block_label (new_bb);
1191
1192 for (i = 0; i < n; ++i)
1193 {
1194 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1195 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1196 if (XEXP (old_ref, 0) == old_label)
1197 {
1198 ASM_OPERANDS_LABEL (tmp, i)
1199 = gen_rtx_LABEL_REF (Pmode, new_label);
1200 --LABEL_NUSES (old_label);
1201 ++LABEL_NUSES (new_label);
1202 }
1203 }
1204
1205 if (JUMP_LABEL (insn) == old_label)
1206 {
1207 JUMP_LABEL (insn) = new_label;
1208 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1209 if (note)
1210 remove_note (insn, note);
1211 }
1212 else
1213 {
1214 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1215 if (note)
1216 remove_note (insn, note);
1217 if (JUMP_LABEL (insn) != new_label
1218 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1219 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1220 }
3b5fda81
JJ
1221 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1222 != NULL_RTX)
1223 XEXP (note, 0) = new_label;
1c384bf1 1224 }
ca6c03ca
JH
1225 else
1226 {
1227 /* ?? We may play the games with moving the named labels from
1228 one basic block to the other in case only one computed_jump is
1229 available. */
5f0d2358
RK
1230 if (computed_jump_p (insn)
1231 /* A return instruction can't be redirected. */
1232 || returnjump_p (insn))
4e3825db 1233 return false;
6ee3c8e4 1234
4e3825db 1235 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
6ee3c8e4 1236 {
4e3825db
MM
1237 /* If the insn doesn't go where we think, we're confused. */
1238 gcc_assert (JUMP_LABEL (insn) == old_label);
1239
1240 /* If the substitution doesn't succeed, die. This can happen
1241 if the back end emitted unrecognizable instructions or if
1242 target is exit block on some arches. */
1243 if (!redirect_jump (insn, block_label (new_bb), 0))
1244 {
1245 gcc_assert (new_bb == EXIT_BLOCK_PTR);
1246 return false;
1247 }
6ee3c8e4 1248 }
ca6c03ca 1249 }
4e3825db
MM
1250 return true;
1251}
1252
1253
1254/* Redirect edge representing branch of (un)conditional jump or tablejump,
1255 NULL on failure */
1256static edge
1257redirect_branch_edge (edge e, basic_block target)
1258{
1259 rtx old_label = BB_HEAD (e->dest);
1260 basic_block src = e->src;
1261 rtx insn = BB_END (src);
1262
1263 /* We can only redirect non-fallthru edges of jump insn. */
1264 if (e->flags & EDGE_FALLTHRU)
1265 return NULL;
1266 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1267 return NULL;
1268
1269 if (!currently_expanding_to_rtl)
1270 {
1271 if (!patch_jump_insn (insn, old_label, target))
1272 return NULL;
1273 }
1274 else
1275 /* When expanding this BB might actually contain multiple
1276 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1277 Redirect all of those that match our label. */
0f346928 1278 FOR_BB_INSNS (src, insn)
4e3825db
MM
1279 if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1280 return NULL;
ca6c03ca 1281
c263766c
RH
1282 if (dump_file)
1283 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
0b17ab2f 1284 e->src->index, e->dest->index, target->index);
5f0d2358 1285
ca6c03ca 1286 if (e->dest != target)
6de9cd9a 1287 e = redirect_edge_succ_nodup (e, target);
6fb5fa3c 1288
6de9cd9a 1289 return e;
bc35512f
JH
1290}
1291
1292/* Attempt to change code to redirect edge E to TARGET. Don't do that on
1293 expense of adding new instructions or reordering basic blocks.
1294
1295 Function can be also called with edge destination equivalent to the TARGET.
1296 Then it should try the simplifications and do nothing if none is possible.
1297
6de9cd9a
DN
1298 Return edge representing the branch if transformation succeeded. Return NULL
1299 on failure.
1300 We still return NULL in case E already destinated TARGET and we didn't
1301 managed to simplify instruction stream. */
bc35512f 1302
6de9cd9a 1303static edge
5671bf27 1304rtl_redirect_edge_and_branch (edge e, basic_block target)
bc35512f 1305{
6de9cd9a 1306 edge ret;
f345f21a
JH
1307 basic_block src = e->src;
1308
bc35512f 1309 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 1310 return NULL;
bc35512f 1311
3348b696 1312 if (e->dest == target)
6de9cd9a 1313 return e;
3348b696 1314
6de9cd9a 1315 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
f345f21a 1316 {
6fb5fa3c 1317 df_set_bb_dirty (src);
6de9cd9a 1318 return ret;
f345f21a 1319 }
bc35512f 1320
6de9cd9a
DN
1321 ret = redirect_branch_edge (e, target);
1322 if (!ret)
1323 return NULL;
5f0d2358 1324
6fb5fa3c 1325 df_set_bb_dirty (src);
6de9cd9a 1326 return ret;
ca6c03ca
JH
1327}
1328
4fe9b91c 1329/* Like force_nonfallthru below, but additionally performs redirection
26898771
BS
1330 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1331 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1332 simple_return_rtx, indicating which kind of returnjump to create.
1333 It should be NULL otherwise. */
ca6c03ca 1334
dc0ff1c8 1335basic_block
26898771 1336force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
ca6c03ca 1337{
a3716585 1338 basic_block jump_block, new_bb = NULL, src = e->src;
ca6c03ca
JH
1339 rtx note;
1340 edge new_edge;
a3716585 1341 int abnormal_edge_flags = 0;
a3e6a37b 1342 bool asm_goto_edge = false;
7241571e 1343 int loc;
ca6c03ca 1344
cb9a1d9b
JH
1345 /* In the case the last instruction is conditional jump to the next
1346 instruction, first redirect the jump itself and then continue
b20b352b 1347 by creating a basic block afterwards to redirect fallthru edge. */
cb9a1d9b 1348 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
a813c111 1349 && any_condjump_p (BB_END (e->src))
a813c111 1350 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
cb9a1d9b
JH
1351 {
1352 rtx note;
58e6ae30 1353 edge b = unchecked_make_edge (e->src, target, 0);
341c100f 1354 bool redirected;
cb9a1d9b 1355
341c100f
NS
1356 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1357 gcc_assert (redirected);
c22cacf3 1358
a813c111 1359 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
cb9a1d9b
JH
1360 if (note)
1361 {
1362 int prob = INTVAL (XEXP (note, 0));
1363
1364 b->probability = prob;
1365 b->count = e->count * prob / REG_BR_PROB_BASE;
1366 e->probability -= e->probability;
1367 e->count -= b->count;
1368 if (e->probability < 0)
1369 e->probability = 0;
1370 if (e->count < 0)
1371 e->count = 0;
1372 }
1373 }
1374
ca6c03ca 1375 if (e->flags & EDGE_ABNORMAL)
a3716585
JH
1376 {
1377 /* Irritating special case - fallthru edge to the same block as abnormal
1378 edge.
1379 We can't redirect abnormal edge, but we still can split the fallthru
d329e058 1380 one and create separate abnormal edge to original destination.
a3716585 1381 This allows bb-reorder to make such edge non-fallthru. */
341c100f 1382 gcc_assert (e->dest == target);
ee44c28d
SB
1383 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1384 e->flags &= EDGE_FALLTHRU;
a3716585 1385 }
341c100f 1386 else
24c545ff 1387 {
341c100f
NS
1388 gcc_assert (e->flags & EDGE_FALLTHRU);
1389 if (e->src == ENTRY_BLOCK_PTR)
1390 {
1391 /* We can't redirect the entry block. Create an empty block
628f6a4e
BE
1392 at the start of the function which we use to add the new
1393 jump. */
1394 edge tmp;
1395 edge_iterator ei;
1396 bool found = false;
c22cacf3 1397
628f6a4e 1398 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
c22cacf3 1399
341c100f
NS
1400 /* Change the existing edge's source to be the new block, and add
1401 a new edge from the entry block to the new block. */
1402 e->src = bb;
628f6a4e
BE
1403 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1404 {
1405 if (tmp == e)
1406 {
9771b263 1407 ENTRY_BLOCK_PTR->succs->unordered_remove (ei.index);
628f6a4e
BE
1408 found = true;
1409 break;
1410 }
1411 else
1412 ei_next (&ei);
1413 }
c22cacf3 1414
628f6a4e 1415 gcc_assert (found);
c22cacf3 1416
9771b263 1417 vec_safe_push (bb->succs, e);
341c100f
NS
1418 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1419 }
24c545ff
BS
1420 }
1421
a3e6a37b 1422 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
891ca07d 1423 don't point to the target or fallthru label. */
a3e6a37b
JJ
1424 if (JUMP_P (BB_END (e->src))
1425 && target != EXIT_BLOCK_PTR
a3e6a37b
JJ
1426 && (e->flags & EDGE_FALLTHRU)
1427 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
ca6c03ca 1428 {
a3e6a37b 1429 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
5d68b7e6 1430 bool adjust_jump_target = false;
a3e6a37b
JJ
1431
1432 for (i = 0; i < n; ++i)
891ca07d
JJ
1433 {
1434 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
5d68b7e6
JJ
1435 {
1436 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1437 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1438 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1439 adjust_jump_target = true;
1440 }
891ca07d 1441 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
a3e6a37b 1442 asm_goto_edge = true;
891ca07d 1443 }
5d68b7e6
JJ
1444 if (adjust_jump_target)
1445 {
1446 rtx insn = BB_END (e->src), note;
1447 rtx old_label = BB_HEAD (e->dest);
1448 rtx new_label = BB_HEAD (target);
1449
1450 if (JUMP_LABEL (insn) == old_label)
1451 {
1452 JUMP_LABEL (insn) = new_label;
1453 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1454 if (note)
1455 remove_note (insn, note);
1456 }
1457 else
1458 {
1459 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1460 if (note)
1461 remove_note (insn, note);
1462 if (JUMP_LABEL (insn) != new_label
1463 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1464 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1465 }
1466 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1467 != NULL_RTX)
1468 XEXP (note, 0) = new_label;
1469 }
a3e6a37b
JJ
1470 }
1471
1472 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1473 {
1474 gcov_type count = e->count;
1475 int probability = e->probability;
ca6c03ca 1476 /* Create the new structures. */
31a78298 1477
79019985
RH
1478 /* If the old block ended with a tablejump, skip its table
1479 by searching forward from there. Otherwise start searching
1480 forward from the last instruction of the old block. */
a813c111
SB
1481 if (!tablejump_p (BB_END (e->src), NULL, &note))
1482 note = BB_END (e->src);
31a78298
RH
1483 note = NEXT_INSN (note);
1484
31a78298 1485 jump_block = create_basic_block (note, NULL, e->src);
a3e6a37b 1486 jump_block->count = count;
ca6c03ca 1487 jump_block->frequency = EDGE_FREQUENCY (e);
ca6c03ca 1488
750054a2
CT
1489 /* Make sure new block ends up in correct hot/cold section. */
1490
076c7ab8 1491 BB_COPY_PARTITION (jump_block, e->src);
9fb32434 1492 if (flag_reorder_blocks_and_partition
677f3fa8 1493 && targetm_common.have_named_sections
87c8b4be
CT
1494 && JUMP_P (BB_END (jump_block))
1495 && !any_condjump_p (BB_END (jump_block))
1496 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
65c5f2a6 1497 add_reg_note (BB_END (jump_block), REG_CROSSING_JUMP, NULL_RTX);
c22cacf3 1498
ca6c03ca
JH
1499 /* Wire edge in. */
1500 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
a3e6a37b
JJ
1501 new_edge->probability = probability;
1502 new_edge->count = count;
ca6c03ca
JH
1503
1504 /* Redirect old edge. */
1505 redirect_edge_pred (e, jump_block);
1506 e->probability = REG_BR_PROB_BASE;
1507
a3e6a37b
JJ
1508 /* If asm goto has any label refs to target's label,
1509 add also edge from asm goto bb to target. */
1510 if (asm_goto_edge)
1511 {
1512 new_edge->probability /= 2;
1513 new_edge->count /= 2;
1514 jump_block->count /= 2;
1515 jump_block->frequency /= 2;
1516 new_edge = make_edge (new_edge->src, target,
1517 e->flags & ~EDGE_FALLTHRU);
1518 new_edge->probability = probability - probability / 2;
1519 new_edge->count = count - count / 2;
1520 }
1521
ca6c03ca
JH
1522 new_bb = jump_block;
1523 }
1524 else
1525 jump_block = e->src;
5f0d2358 1526
2f13f2de 1527 loc = e->goto_locus;
ca6c03ca
JH
1528 e->flags &= ~EDGE_FALLTHRU;
1529 if (target == EXIT_BLOCK_PTR)
1530 {
26898771
BS
1531 if (jump_label == ret_rtx)
1532 {
cf22ce3c 1533#ifdef HAVE_return
26898771 1534 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
cf22ce3c 1535#else
26898771 1536 gcc_unreachable ();
cf22ce3c 1537#endif
26898771
BS
1538 }
1539 else
1540 {
1541 gcc_assert (jump_label == simple_return_rtx);
1542#ifdef HAVE_simple_return
1543 emit_jump_insn_after_setloc (gen_simple_return (),
1544 BB_END (jump_block), loc);
1545#else
1546 gcc_unreachable ();
1547#endif
1548 }
387748de 1549 set_return_jump_label (BB_END (jump_block));
ca6c03ca
JH
1550 }
1551 else
1552 {
1553 rtx label = block_label (target);
7241571e 1554 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
a813c111 1555 JUMP_LABEL (BB_END (jump_block)) = label;
ca6c03ca
JH
1556 LABEL_NUSES (label)++;
1557 }
5f0d2358 1558
a813c111 1559 emit_barrier_after (BB_END (jump_block));
ca6c03ca
JH
1560 redirect_edge_succ_nodup (e, target);
1561
a3716585
JH
1562 if (abnormal_edge_flags)
1563 make_edge (src, target, abnormal_edge_flags);
1564
b8698a0f 1565 df_mark_solutions_dirty ();
ca6c03ca
JH
1566 return new_bb;
1567}
1568
1569/* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1570 (and possibly create new basic block) to make edge non-fallthru.
1571 Return newly created BB or NULL if none. */
5f0d2358 1572
cf103ca4
EB
1573static basic_block
1574rtl_force_nonfallthru (edge e)
ca6c03ca 1575{
26898771 1576 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
ca6c03ca
JH
1577}
1578
1579/* Redirect edge even at the expense of creating new jump insn or
1580 basic block. Return new basic block if created, NULL otherwise.
41806d92 1581 Conversion must be possible. */
ca6c03ca 1582
9ee634e3 1583static basic_block
d329e058 1584rtl_redirect_edge_and_branch_force (edge e, basic_block target)
ca6c03ca 1585{
5f0d2358
RK
1586 if (redirect_edge_and_branch (e, target)
1587 || e->dest == target)
ca6c03ca
JH
1588 return NULL;
1589
1590 /* In case the edge redirection failed, try to force it to be non-fallthru
1591 and redirect newly created simplejump. */
6fb5fa3c 1592 df_set_bb_dirty (e->src);
26898771 1593 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
ca6c03ca
JH
1594}
1595
1596/* The given edge should potentially be a fallthru edge. If that is in
1597 fact true, delete the jump and barriers that are in the way. */
1598
f470c378
ZD
1599static void
1600rtl_tidy_fallthru_edge (edge e)
ca6c03ca
JH
1601{
1602 rtx q;
f470c378
ZD
1603 basic_block b = e->src, c = b->next_bb;
1604
ca6c03ca
JH
1605 /* ??? In a late-running flow pass, other folks may have deleted basic
1606 blocks by nopping out blocks, leaving multiple BARRIERs between here
0fa2e4df 1607 and the target label. They ought to be chastised and fixed.
ca6c03ca
JH
1608
1609 We can also wind up with a sequence of undeletable labels between
1610 one block and the next.
1611
1612 So search through a sequence of barriers, labels, and notes for
1613 the head of block C and assert that we really do fall through. */
1614
a813c111 1615 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
9c0a0632
RH
1616 if (INSN_P (q))
1617 return;
ca6c03ca
JH
1618
1619 /* Remove what will soon cease being the jump insn from the source block.
1620 If block B consisted only of this single jump, turn it into a deleted
1621 note. */
a813c111 1622 q = BB_END (b);
4b4bf941 1623 if (JUMP_P (q)
ca6c03ca
JH
1624 && onlyjump_p (q)
1625 && (any_uncondjump_p (q)
c5cbcccf 1626 || single_succ_p (b)))
ca6c03ca
JH
1627 {
1628#ifdef HAVE_cc0
1629 /* If this was a conditional jump, we need to also delete
1630 the insn that set cc0. */
1631 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1632 q = PREV_INSN (q);
1633#endif
1634
1635 q = PREV_INSN (q);
ca6c03ca
JH
1636 }
1637
1638 /* Selectively unlink the sequence. */
a813c111 1639 if (q != PREV_INSN (BB_HEAD (c)))
a7b87f73 1640 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
ca6c03ca
JH
1641
1642 e->flags |= EDGE_FALLTHRU;
1643}
ca6c03ca 1644\f
f470c378
ZD
1645/* Should move basic block BB after basic block AFTER. NIY. */
1646
1647static bool
1648rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1649 basic_block after ATTRIBUTE_UNUSED)
1650{
1651 return false;
1652}
1653
ca6c03ca 1654/* Split a (typically critical) edge. Return the new block.
41806d92 1655 The edge must not be abnormal.
ca6c03ca
JH
1656
1657 ??? The code generally expects to be called on critical edges.
1658 The case of a block ending in an unconditional jump to a
1659 block with multiple predecessors is not handled optimally. */
1660
8ce33230 1661static basic_block
d329e058 1662rtl_split_edge (edge edge_in)
ca6c03ca
JH
1663{
1664 basic_block bb;
ca6c03ca
JH
1665 rtx before;
1666
1667 /* Abnormal edges cannot be split. */
341c100f 1668 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
ca6c03ca
JH
1669
1670 /* We are going to place the new block in front of edge destination.
eaec9b3d 1671 Avoid existence of fallthru predecessors. */
ca6c03ca
JH
1672 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1673 {
0fd4b31d 1674 edge e = find_fallthru_edge (edge_in->dest->preds);
ca6c03ca
JH
1675
1676 if (e)
1677 force_nonfallthru (e);
1678 }
1679
96e82e0a
ZD
1680 /* Create the basic block note. */
1681 if (edge_in->dest != EXIT_BLOCK_PTR)
a813c111 1682 before = BB_HEAD (edge_in->dest);
ca6c03ca
JH
1683 else
1684 before = NULL_RTX;
1685
623a66fa 1686 /* If this is a fall through edge to the exit block, the blocks might be
ffe14686
AM
1687 not adjacent, and the right place is after the source. */
1688 if ((edge_in->flags & EDGE_FALLTHRU) && edge_in->dest == EXIT_BLOCK_PTR)
623a66fa
R
1689 {
1690 before = NEXT_INSN (BB_END (edge_in->src));
623a66fa 1691 bb = create_basic_block (before, NULL, edge_in->src);
076c7ab8 1692 BB_COPY_PARTITION (bb, edge_in->src);
623a66fa
R
1693 }
1694 else
9fb32434
CT
1695 {
1696 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
076c7ab8
ZW
1697 /* ??? Why not edge_in->dest->prev_bb here? */
1698 BB_COPY_PARTITION (bb, edge_in->dest);
9fb32434 1699 }
ca6c03ca 1700
4977bab6 1701 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
ca6c03ca 1702
4d6922ee 1703 /* For non-fallthru edges, we must adjust the predecessor's
ca6c03ca
JH
1704 jump instruction to target our new block. */
1705 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1706 {
341c100f
NS
1707 edge redirected = redirect_edge_and_branch (edge_in, bb);
1708 gcc_assert (redirected);
ca6c03ca
JH
1709 }
1710 else
3b5fda81
JJ
1711 {
1712 if (edge_in->src != ENTRY_BLOCK_PTR)
1713 {
1714 /* For asm goto even splitting of fallthru edge might
1715 need insn patching, as other labels might point to the
1716 old label. */
1717 rtx last = BB_END (edge_in->src);
1718 if (last
1719 && JUMP_P (last)
1720 && edge_in->dest != EXIT_BLOCK_PTR
1721 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1722 && patch_jump_insn (last, before, bb))
1723 df_set_bb_dirty (edge_in->src);
1724 }
1725 redirect_edge_succ (edge_in, bb);
1726 }
ca6c03ca
JH
1727
1728 return bb;
1729}
1730
1731/* Queue instructions for insertion on an edge between two basic blocks.
1732 The new instructions and basic blocks (if any) will not appear in the
1733 CFG until commit_edge_insertions is called. */
1734
1735void
d329e058 1736insert_insn_on_edge (rtx pattern, edge e)
ca6c03ca
JH
1737{
1738 /* We cannot insert instructions on an abnormal critical edge.
1739 It will be easier to find the culprit if we die now. */
341c100f 1740 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
ca6c03ca 1741
6de9cd9a 1742 if (e->insns.r == NULL_RTX)
ca6c03ca
JH
1743 start_sequence ();
1744 else
6de9cd9a 1745 push_to_sequence (e->insns.r);
ca6c03ca
JH
1746
1747 emit_insn (pattern);
1748
6de9cd9a 1749 e->insns.r = get_insns ();
ca6c03ca
JH
1750 end_sequence ();
1751}
1752
1753/* Update the CFG for the instructions queued on edge E. */
1754
4e3825db 1755void
2ac66157 1756commit_one_edge_insertion (edge e)
ca6c03ca
JH
1757{
1758 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
898c90c0 1759 basic_block bb;
ca6c03ca
JH
1760
1761 /* Pull the insns off the edge now since the edge might go away. */
6de9cd9a
DN
1762 insns = e->insns.r;
1763 e->insns.r = NULL_RTX;
ca6c03ca 1764
898c90c0
EB
1765 /* Figure out where to put these insns. If the destination has
1766 one predecessor, insert there. Except for the exit block. */
1767 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
ca6c03ca 1768 {
898c90c0
EB
1769 bb = e->dest;
1770
1771 /* Get the location correct wrt a code label, and "nice" wrt
1772 a basic block note, and before everything else. */
1773 tmp = BB_HEAD (bb);
1774 if (LABEL_P (tmp))
1775 tmp = NEXT_INSN (tmp);
1776 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1777 tmp = NEXT_INSN (tmp);
1778 if (tmp == BB_HEAD (bb))
1779 before = tmp;
1780 else if (tmp)
1781 after = PREV_INSN (tmp);
1782 else
1783 after = get_last_insn ();
1784 }
3dec4024 1785
898c90c0
EB
1786 /* If the source has one successor and the edge is not abnormal,
1787 insert there. Except for the entry block. */
1788 else if ((e->flags & EDGE_ABNORMAL) == 0
1789 && single_succ_p (e->src)
1790 && e->src != ENTRY_BLOCK_PTR)
1791 {
1792 bb = e->src;
3dec4024 1793
898c90c0
EB
1794 /* It is possible to have a non-simple jump here. Consider a target
1795 where some forms of unconditional jumps clobber a register. This
1796 happens on the fr30 for example.
ca6c03ca 1797
898c90c0
EB
1798 We know this block has a single successor, so we can just emit
1799 the queued insns before the jump. */
1800 if (JUMP_P (BB_END (bb)))
1801 before = BB_END (bb);
3dec4024
JH
1802 else
1803 {
898c90c0
EB
1804 /* We'd better be fallthru, or we've lost track of what's what. */
1805 gcc_assert (e->flags & EDGE_FALLTHRU);
750054a2 1806
898c90c0 1807 after = BB_END (bb);
ca6c03ca
JH
1808 }
1809 }
1810
898c90c0
EB
1811 /* Otherwise we must split the edge. */
1812 else
1813 {
1814 bb = split_edge (e);
1815 after = BB_END (bb);
ca6c03ca 1816
898c90c0 1817 if (flag_reorder_blocks_and_partition
677f3fa8 1818 && targetm_common.have_named_sections
898c90c0
EB
1819 && e->src != ENTRY_BLOCK_PTR
1820 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1821 && !(e->flags & EDGE_CROSSING)
1822 && JUMP_P (after)
1823 && !any_condjump_p (after)
1824 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1825 add_reg_note (after, REG_CROSSING_JUMP, NULL_RTX);
1826 }
1827
1828 /* Now that we've found the spot, do the insertion. */
ca6c03ca
JH
1829 if (before)
1830 {
6fb5fa3c 1831 emit_insn_before_noloc (insns, before, bb);
ca6c03ca
JH
1832 last = prev_nonnote_insn (before);
1833 }
1834 else
6fb5fa3c 1835 last = emit_insn_after_noloc (insns, after, bb);
ca6c03ca
JH
1836
1837 if (returnjump_p (last))
1838 {
1839 /* ??? Remove all outgoing edges from BB and add one for EXIT.
c22cacf3
MS
1840 This is not currently a problem because this only happens
1841 for the (single) epilogue, which already has a fallthru edge
1842 to EXIT. */
ca6c03ca 1843
c5cbcccf 1844 e = single_succ_edge (bb);
341c100f 1845 gcc_assert (e->dest == EXIT_BLOCK_PTR
c5cbcccf 1846 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
ca6c03ca 1847
5f0d2358 1848 e->flags &= ~EDGE_FALLTHRU;
ca6c03ca 1849 emit_barrier_after (last);
0b17ab2f 1850
ca6c03ca
JH
1851 if (before)
1852 delete_insn (before);
1853 }
341c100f
NS
1854 else
1855 gcc_assert (!JUMP_P (last));
ca6c03ca
JH
1856}
1857
1858/* Update the CFG for all queued instructions. */
1859
1860void
d329e058 1861commit_edge_insertions (void)
ca6c03ca 1862{
ca6c03ca
JH
1863 basic_block bb;
1864
1865#ifdef ENABLE_CHECKING
1866 verify_flow_info ();
1867#endif
1868
e0082a72 1869 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
ca6c03ca 1870 {
628f6a4e
BE
1871 edge e;
1872 edge_iterator ei;
ca6c03ca 1873
628f6a4e
BE
1874 FOR_EACH_EDGE (e, ei, bb->succs)
1875 if (e->insns.r)
cf103ca4 1876 commit_one_edge_insertion (e);
3dec4024 1877 }
ca6c03ca
JH
1878}
1879\f
6fb5fa3c 1880
f470c378 1881/* Print out RTL-specific basic block information (live information
a315c44c
SB
1882 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
1883 documented in dumpfile.h. */
ca6c03ca 1884
10e9fecc 1885static void
a315c44c 1886rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
ca6c03ca
JH
1887{
1888 rtx insn;
1889 rtx last;
f470c378 1890 char *s_indent;
ca6c03ca 1891
ae50c0cb 1892 s_indent = (char *) alloca ((size_t) indent + 1);
400e39e3 1893 memset (s_indent, ' ', (size_t) indent);
f470c378 1894 s_indent[indent] = '\0';
b8698a0f 1895
a315c44c 1896 if (df && (flags & TDF_DETAILS))
6fb5fa3c
DB
1897 {
1898 df_dump_top (bb, outf);
1899 putc ('\n', outf);
1900 }
ca6c03ca 1901
68cc3174
EB
1902 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
1903 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1904 insn = NEXT_INSN (insn))
a315c44c 1905 {
7b19209f
SB
1906 if (flags & TDF_DETAILS)
1907 df_dump_insn_top (insn, outf);
a315c44c
SB
1908 if (! (flags & TDF_SLIM))
1909 print_rtl_single (outf, insn);
1910 else
1911 dump_insn_slim (outf, insn);
7b19209f
SB
1912 if (flags & TDF_DETAILS)
1913 df_dump_insn_bottom (insn, outf);
a315c44c
SB
1914 }
1915
1916 if (df && (flags & TDF_DETAILS))
6fb5fa3c
DB
1917 {
1918 df_dump_bottom (bb, outf);
1919 putc ('\n', outf);
1920 }
1921
ca6c03ca
JH
1922}
1923\f
c4669594
SB
1924/* Like dump_function_to_file, but for RTL. Print out dataflow information
1925 for the start of each basic block. FLAGS are the TDF_* masks documented
1926 in dumpfile.h. */
ca6c03ca
JH
1927
1928void
a315c44c 1929print_rtl_with_bb (FILE *outf, const_rtx rtx_first, int flags)
ca6c03ca 1930{
22ea9ec0 1931 const_rtx tmp_rtx;
ca6c03ca
JH
1932 if (rtx_first == 0)
1933 fprintf (outf, "(nil)\n");
1934 else
1935 {
ca6c03ca
JH
1936 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1937 int max_uid = get_max_uid ();
5ed6ace5
MD
1938 basic_block *start = XCNEWVEC (basic_block, max_uid);
1939 basic_block *end = XCNEWVEC (basic_block, max_uid);
1940 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
e0082a72
ZD
1941 basic_block bb;
1942
c4669594
SB
1943 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
1944 insns, but the CFG is not maintained so the basic block info
1945 is not reliable. Therefore it's omitted from the dumps. */
1946 if (! (cfun->curr_properties & PROP_cfg))
1947 flags &= ~TDF_BLOCKS;
1948
6fb5fa3c
DB
1949 if (df)
1950 df_dump_start (outf);
1951
c4669594 1952 if (flags & TDF_BLOCKS)
ca6c03ca 1953 {
c4669594 1954 FOR_EACH_BB_REVERSE (bb)
ca6c03ca 1955 {
c4669594
SB
1956 rtx x;
1957
1958 start[INSN_UID (BB_HEAD (bb))] = bb;
1959 end[INSN_UID (BB_END (bb))] = bb;
1960 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1961 {
1962 enum bb_state state = IN_MULTIPLE_BB;
5f0d2358 1963
c4669594
SB
1964 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1965 state = IN_ONE_BB;
1966 in_bb_p[INSN_UID (x)] = state;
ca6c03ca 1967
c4669594
SB
1968 if (x == BB_END (bb))
1969 break;
1970 }
ca6c03ca
JH
1971 }
1972 }
1973
1974 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1975 {
c4669594
SB
1976 if (flags & TDF_BLOCKS)
1977 {
1978 bb = start[INSN_UID (tmp_rtx)];
1979 if (bb != NULL)
1980 {
1981 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
1982 if (df && (flags & TDF_DETAILS))
1983 df_dump_top (bb, outf);
1984 }
ca6c03ca 1985
c4669594
SB
1986 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1987 && !NOTE_P (tmp_rtx)
1988 && !BARRIER_P (tmp_rtx))
1989 fprintf (outf, ";; Insn is not within a basic block\n");
1990 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1991 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1992 }
ca6c03ca 1993
7b19209f
SB
1994 if (flags & TDF_DETAILS)
1995 df_dump_insn_top (tmp_rtx, outf);
a315c44c
SB
1996 if (! (flags & TDF_SLIM))
1997 print_rtl_single (outf, tmp_rtx);
1998 else
1999 dump_insn_slim (outf, tmp_rtx);
7b19209f
SB
2000 if (flags & TDF_DETAILS)
2001 df_dump_insn_bottom (tmp_rtx, outf);
ca6c03ca 2002
c4669594
SB
2003 if (flags & TDF_BLOCKS)
2004 {
2005 bb = end[INSN_UID (tmp_rtx)];
2006 if (bb != NULL)
2007 {
2008 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
2009 if (df && (flags & TDF_DETAILS))
2010 df_dump_bottom (bb, outf);
1b03a96d 2011 putc ('\n', outf);
c4669594
SB
2012 }
2013 }
ca6c03ca
JH
2014 }
2015
2016 free (start);
2017 free (end);
2018 free (in_bb_p);
2019 }
ca6c03ca
JH
2020}
2021\f
532aafad
SB
2022/* Update the branch probability of BB if a REG_BR_PROB is present. */
2023
b446e5a2 2024void
d329e058 2025update_br_prob_note (basic_block bb)
b446e5a2
JH
2026{
2027 rtx note;
4b4bf941 2028 if (!JUMP_P (BB_END (bb)))
b446e5a2 2029 return;
a813c111 2030 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
b446e5a2
JH
2031 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
2032 return;
2033 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
2034}
96370780
MK
2035
2036/* Get the last insn associated with block BB (that includes barriers and
2037 tablejumps after BB). */
2038rtx
2039get_last_bb_insn (basic_block bb)
2040{
2041 rtx tmp;
2042 rtx end = BB_END (bb);
2043
2044 /* Include any jump table following the basic block. */
2045 if (tablejump_p (end, NULL, &tmp))
2046 end = tmp;
2047
2048 /* Include any barriers that may follow the basic block. */
1e211590 2049 tmp = next_nonnote_insn_bb (end);
96370780
MK
2050 while (tmp && BARRIER_P (tmp))
2051 {
2052 end = tmp;
1e211590 2053 tmp = next_nonnote_insn_bb (end);
96370780
MK
2054 }
2055
2056 return end;
2057}
b446e5a2 2058\f
10e9fecc
JH
2059/* Verify the CFG and RTL consistency common for both underlying RTL and
2060 cfglayout RTL.
ca6c03ca
JH
2061
2062 Currently it does following checks:
2063
ca6c03ca 2064 - overlapping of basic blocks
9eab6785 2065 - insns with wrong BLOCK_FOR_INSN pointers
ca6c03ca 2066 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
f63d1bf7 2067 - tails of basic blocks (ensure that boundary is necessary)
ca6c03ca
JH
2068 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2069 and NOTE_INSN_BASIC_BLOCK
750054a2 2070 - verify that no fall_thru edge crosses hot/cold partition boundaries
9eab6785 2071 - verify that there are no pending RTL branch predictions
ca6c03ca
JH
2072
2073 In future it can be extended check a lot of other stuff as well
2074 (reachability of basic blocks, life information, etc. etc.). */
f470c378 2075
10e9fecc 2076static int
d329e058 2077rtl_verify_flow_info_1 (void)
ca6c03ca 2078{
ca6c03ca 2079 rtx x;
10e9fecc 2080 int err = 0;
94eb5ddb 2081 basic_block bb;
ca6c03ca 2082
9eab6785 2083 /* Check the general integrity of the basic blocks. */
e0082a72 2084 FOR_EACH_BB_REVERSE (bb)
ca6c03ca 2085 {
9eab6785 2086 rtx insn;
5f0d2358 2087
5e2d947c
JH
2088 if (!(bb->flags & BB_RTL))
2089 {
2090 error ("BB_RTL flag not set for block %d", bb->index);
2091 err = 1;
2092 }
2093
9eab6785 2094 FOR_BB_INSNS (bb, insn)
8ce9fd5d 2095 if (BLOCK_FOR_INSN (insn) != bb)
9eab6785
SB
2096 {
2097 error ("insn %d basic block pointer is %d, should be %d",
2098 INSN_UID (insn),
2099 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2100 bb->index);
2101 err = 1;
2102 }
a7b87f73 2103
bcc708fc 2104 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
a7b87f73
ZD
2105 if (!BARRIER_P (insn)
2106 && BLOCK_FOR_INSN (insn) != NULL)
2107 {
2108 error ("insn %d in header of bb %d has non-NULL basic block",
2109 INSN_UID (insn), bb->index);
2110 err = 1;
2111 }
bcc708fc 2112 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
a7b87f73
ZD
2113 if (!BARRIER_P (insn)
2114 && BLOCK_FOR_INSN (insn) != NULL)
2115 {
2116 error ("insn %d in footer of bb %d has non-NULL basic block",
2117 INSN_UID (insn), bb->index);
2118 err = 1;
2119 }
ca6c03ca
JH
2120 }
2121
2122 /* Now check the basic blocks (boundaries etc.) */
e0082a72 2123 FOR_EACH_BB_REVERSE (bb)
ca6c03ca 2124 {
f99ffaa3
EB
2125 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2126 int n_eh = 0, n_abnormal = 0;
3cf54412 2127 edge e, fallthru = NULL;
5a1a3e5e 2128 rtx note;
628f6a4e 2129 edge_iterator ei;
ca6c03ca 2130
2085a21f 2131 if (JUMP_P (BB_END (bb))
a813c111 2132 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
628f6a4e 2133 && EDGE_COUNT (bb->succs) >= 2
a813c111 2134 && any_condjump_p (BB_END (bb)))
5a1a3e5e 2135 {
e53de54d
JH
2136 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
2137 && profile_status != PROFILE_ABSENT)
5a1a3e5e 2138 {
0108ae51 2139 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
5a1a3e5e
JH
2140 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
2141 err = 1;
2142 }
2143 }
628f6a4e 2144 FOR_EACH_EDGE (e, ei, bb->succs)
ca6c03ca 2145 {
0be7e7a6
RH
2146 bool is_crossing;
2147
ca6c03ca 2148 if (e->flags & EDGE_FALLTHRU)
0be7e7a6
RH
2149 n_fallthru++, fallthru = e;
2150
2151 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2152 && e->src != ENTRY_BLOCK_PTR
2153 && e->dest != EXIT_BLOCK_PTR);
2154 if (e->flags & EDGE_CROSSING)
750054a2 2155 {
0be7e7a6
RH
2156 if (!is_crossing)
2157 {
2158 error ("EDGE_CROSSING incorrectly set across same section");
2159 err = 1;
2160 }
2161 if (e->flags & EDGE_FALLTHRU)
2162 {
f99ffaa3 2163 error ("fallthru edge crosses section boundary in bb %i",
750054a2
CT
2164 e->src->index);
2165 err = 1;
2166 }
0be7e7a6
RH
2167 if (e->flags & EDGE_EH)
2168 {
f99ffaa3 2169 error ("EH edge crosses section boundary in bb %i",
0be7e7a6
RH
2170 e->src->index);
2171 err = 1;
2172 }
2173 }
2174 else if (is_crossing)
2175 {
2176 error ("EDGE_CROSSING missing across section boundary");
2177 err = 1;
750054a2 2178 }
3dec4024 2179
65f43cdf
ZD
2180 if ((e->flags & ~(EDGE_DFS_BACK
2181 | EDGE_CAN_FALLTHRU
2182 | EDGE_IRREDUCIBLE_LOOP
9beb1c84 2183 | EDGE_LOOP_EXIT
7e872b90
JJ
2184 | EDGE_CROSSING
2185 | EDGE_PRESERVE)) == 0)
3dec4024
JH
2186 n_branch++;
2187
2188 if (e->flags & EDGE_ABNORMAL_CALL)
f99ffaa3
EB
2189 n_abnormal_call++;
2190
2191 if (e->flags & EDGE_SIBCALL)
2192 n_sibcall++;
3dec4024
JH
2193
2194 if (e->flags & EDGE_EH)
2195 n_eh++;
f99ffaa3
EB
2196
2197 if (e->flags & EDGE_ABNORMAL)
3dec4024 2198 n_abnormal++;
ca6c03ca 2199 }
5f0d2358 2200
1d65f45c 2201 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
3dec4024 2202 {
f99ffaa3 2203 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
3dec4024
JH
2204 err = 1;
2205 }
1d65f45c
RH
2206 if (n_eh > 1)
2207 {
f99ffaa3 2208 error ("too many exception handling edges in bb %i", bb->index);
1d65f45c
RH
2209 err = 1;
2210 }
3dec4024 2211 if (n_branch
4b4bf941 2212 && (!JUMP_P (BB_END (bb))
a813c111
SB
2213 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2214 || any_condjump_p (BB_END (bb))))))
3dec4024 2215 {
ab532386 2216 error ("too many outgoing branch edges from bb %i", bb->index);
3dec4024
JH
2217 err = 1;
2218 }
a813c111 2219 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
3dec4024 2220 {
f99ffaa3 2221 error ("fallthru edge after unconditional jump in bb %i", bb->index);
3dec4024
JH
2222 err = 1;
2223 }
a813c111 2224 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
3dec4024 2225 {
f99ffaa3
EB
2226 error ("wrong number of branch edges after unconditional jump"
2227 " in bb %i", bb->index);
3dec4024
JH
2228 err = 1;
2229 }
a813c111 2230 if (n_branch != 1 && any_condjump_p (BB_END (bb))
c11fd0b2 2231 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
3dec4024 2232 {
f99ffaa3
EB
2233 error ("wrong amount of branch edges after conditional jump"
2234 " in bb %i", bb->index);
2235 err = 1;
2236 }
2237 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2238 {
2239 error ("abnormal call edges for non-call insn in bb %i", bb->index);
3dec4024
JH
2240 err = 1;
2241 }
f99ffaa3 2242 if (n_sibcall && !CALL_P (BB_END (bb)))
3dec4024 2243 {
f99ffaa3 2244 error ("sibcall edges for non-call insn in bb %i", bb->index);
3dec4024
JH
2245 err = 1;
2246 }
f99ffaa3
EB
2247 if (n_abnormal > n_eh
2248 && !(CALL_P (BB_END (bb))
2249 && n_abnormal == n_abnormal_call + n_sibcall)
4b4bf941 2250 && (!JUMP_P (BB_END (bb))
a813c111
SB
2251 || any_condjump_p (BB_END (bb))
2252 || any_uncondjump_p (BB_END (bb))))
3dec4024 2253 {
ab532386 2254 error ("abnormal edges for no purpose in bb %i", bb->index);
3dec4024
JH
2255 err = 1;
2256 }
f87c27b4 2257
a813c111 2258 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
0ca541aa 2259 /* We may have a barrier inside a basic block before dead code
9524880c
HPN
2260 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
2261 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
5f0d2358
RK
2262 {
2263 debug_rtx (x);
2264 if (! BLOCK_FOR_INSN (x))
2265 error
2266 ("insn %d inside basic block %d but block_for_insn is NULL",
0b17ab2f 2267 INSN_UID (x), bb->index);
5f0d2358
RK
2268 else
2269 error
2270 ("insn %d inside basic block %d but block_for_insn is %i",
0b17ab2f 2271 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
5f0d2358
RK
2272
2273 err = 1;
2274 }
ca6c03ca
JH
2275
2276 /* OK pointers are correct. Now check the header of basic
c22cacf3 2277 block. It ought to contain optional CODE_LABEL followed
ca6c03ca 2278 by NOTE_BASIC_BLOCK. */
a813c111 2279 x = BB_HEAD (bb);
4b4bf941 2280 if (LABEL_P (x))
ca6c03ca 2281 {
a813c111 2282 if (BB_END (bb) == x)
ca6c03ca
JH
2283 {
2284 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 2285 bb->index);
ca6c03ca
JH
2286 err = 1;
2287 }
5f0d2358 2288
ca6c03ca
JH
2289 x = NEXT_INSN (x);
2290 }
5f0d2358 2291
ca6c03ca
JH
2292 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2293 {
2294 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 2295 bb->index);
ca6c03ca
JH
2296 err = 1;
2297 }
2298
a813c111 2299 if (BB_END (bb) == x)
49243025 2300 /* Do checks for empty blocks here. */
5f0d2358 2301 ;
ca6c03ca 2302 else
5f0d2358
RK
2303 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2304 {
2305 if (NOTE_INSN_BASIC_BLOCK_P (x))
2306 {
2307 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
0b17ab2f 2308 INSN_UID (x), bb->index);
5f0d2358
RK
2309 err = 1;
2310 }
2311
a813c111 2312 if (x == BB_END (bb))
5f0d2358 2313 break;
ca6c03ca 2314
83fd323c 2315 if (control_flow_insn_p (x))
5f0d2358 2316 {
0b17ab2f 2317 error ("in basic block %d:", bb->index);
5f0d2358
RK
2318 fatal_insn ("flow control insn inside a basic block", x);
2319 }
2320 }
ca6c03ca
JH
2321 }
2322
10e9fecc 2323 /* Clean up. */
10e9fecc
JH
2324 return err;
2325}
5f0d2358 2326
10e9fecc
JH
2327/* Verify the CFG and RTL consistency common for both underlying RTL and
2328 cfglayout RTL.
5f0d2358 2329
10e9fecc
JH
2330 Currently it does following checks:
2331 - all checks of rtl_verify_flow_info_1
9eab6785 2332 - test head/end pointers
10e9fecc
JH
2333 - check that all insns are in the basic blocks
2334 (except the switch handling code, barriers and notes)
2335 - check that all returns are followed by barriers
2336 - check that all fallthru edge points to the adjacent blocks. */
9eab6785 2337
10e9fecc 2338static int
d329e058 2339rtl_verify_flow_info (void)
10e9fecc
JH
2340{
2341 basic_block bb;
2342 int err = rtl_verify_flow_info_1 ();
2343 rtx x;
9eab6785
SB
2344 rtx last_head = get_last_insn ();
2345 basic_block *bb_info;
10e9fecc
JH
2346 int num_bb_notes;
2347 const rtx rtx_first = get_insns ();
2348 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
9eab6785
SB
2349 const int max_uid = get_max_uid ();
2350
2351 bb_info = XCNEWVEC (basic_block, max_uid);
ca6c03ca 2352
10e9fecc
JH
2353 FOR_EACH_BB_REVERSE (bb)
2354 {
2355 edge e;
9eab6785
SB
2356 rtx head = BB_HEAD (bb);
2357 rtx end = BB_END (bb);
628f6a4e 2358
9eab6785 2359 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
a7b87f73
ZD
2360 {
2361 /* Verify the end of the basic block is in the INSN chain. */
2362 if (x == end)
2363 break;
2364
2365 /* And that the code outside of basic blocks has NULL bb field. */
2366 if (!BARRIER_P (x)
2367 && BLOCK_FOR_INSN (x) != NULL)
2368 {
2369 error ("insn %d outside of basic blocks has non-NULL bb field",
2370 INSN_UID (x));
2371 err = 1;
2372 }
2373 }
9eab6785
SB
2374
2375 if (!x)
2376 {
2377 error ("end insn %d for block %d not found in the insn stream",
2378 INSN_UID (end), bb->index);
2379 err = 1;
2380 }
2381
2382 /* Work backwards from the end to the head of the basic block
2383 to verify the head is in the RTL chain. */
2384 for (; x != NULL_RTX; x = PREV_INSN (x))
a00d11f0 2385 {
9eab6785
SB
2386 /* While walking over the insn chain, verify insns appear
2387 in only one basic block. */
2388 if (bb_info[INSN_UID (x)] != NULL)
2389 {
2390 error ("insn %d is in multiple basic blocks (%d and %d)",
2391 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2392 err = 1;
2393 }
2394
2395 bb_info[INSN_UID (x)] = bb;
2396
2397 if (x == head)
2398 break;
2399 }
2400 if (!x)
2401 {
2402 error ("head insn %d for block %d not found in the insn stream",
2403 INSN_UID (head), bb->index);
a00d11f0
JH
2404 err = 1;
2405 }
2406
a7b87f73 2407 last_head = PREV_INSN (x);
9eab6785 2408
0fd4b31d 2409 e = find_fallthru_edge (bb->succs);
10e9fecc
JH
2410 if (!e)
2411 {
2412 rtx insn;
2413
2414 /* Ensure existence of barrier in BB with no fallthru edges. */
468059bc
DD
2415 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2416 {
2417 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
10e9fecc
JH
2418 {
2419 error ("missing barrier after block %i", bb->index);
2420 err = 1;
2421 break;
2422 }
468059bc
DD
2423 if (BARRIER_P (insn))
2424 break;
2425 }
10e9fecc
JH
2426 }
2427 else if (e->src != ENTRY_BLOCK_PTR
2428 && e->dest != EXIT_BLOCK_PTR)
c22cacf3 2429 {
10e9fecc
JH
2430 rtx insn;
2431
2432 if (e->src->next_bb != e->dest)
2433 {
2434 error
2435 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2436 e->src->index, e->dest->index);
2437 err = 1;
2438 }
2439 else
a813c111 2440 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
10e9fecc 2441 insn = NEXT_INSN (insn))
6be85b25 2442 if (BARRIER_P (insn) || INSN_P (insn))
10e9fecc
JH
2443 {
2444 error ("verify_flow_info: Incorrect fallthru %i->%i",
2445 e->src->index, e->dest->index);
2446 fatal_insn ("wrong insn in the fallthru edge", insn);
2447 err = 1;
2448 }
c22cacf3 2449 }
10e9fecc 2450 }
ca6c03ca 2451
a7b87f73
ZD
2452 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2453 {
2454 /* Check that the code before the first basic block has NULL
2455 bb field. */
2456 if (!BARRIER_P (x)
2457 && BLOCK_FOR_INSN (x) != NULL)
2458 {
2459 error ("insn %d outside of basic blocks has non-NULL bb field",
2460 INSN_UID (x));
2461 err = 1;
2462 }
2463 }
9eab6785
SB
2464 free (bb_info);
2465
ca6c03ca 2466 num_bb_notes = 0;
e0082a72
ZD
2467 last_bb_seen = ENTRY_BLOCK_PTR;
2468
5f0d2358 2469 for (x = rtx_first; x; x = NEXT_INSN (x))
ca6c03ca
JH
2470 {
2471 if (NOTE_INSN_BASIC_BLOCK_P (x))
2472 {
bf77398c 2473 bb = NOTE_BASIC_BLOCK (x);
5f0d2358 2474
ca6c03ca 2475 num_bb_notes++;
e0082a72 2476 if (bb != last_bb_seen->next_bb)
10e9fecc 2477 internal_error ("basic blocks not laid down consecutively");
ca6c03ca 2478
10e9fecc 2479 curr_bb = last_bb_seen = bb;
ca6c03ca
JH
2480 }
2481
10e9fecc 2482 if (!curr_bb)
ca6c03ca
JH
2483 {
2484 switch (GET_CODE (x))
2485 {
2486 case BARRIER:
2487 case NOTE:
2488 break;
2489
2490 case CODE_LABEL:
666c27b9 2491 /* An addr_vec is placed outside any basic block. */
ca6c03ca 2492 if (NEXT_INSN (x)
481683e1 2493 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
5f0d2358 2494 x = NEXT_INSN (x);
ca6c03ca
JH
2495
2496 /* But in any case, non-deletable labels can appear anywhere. */
2497 break;
2498
2499 default:
1f978f5f 2500 fatal_insn ("insn outside basic block", x);
ca6c03ca
JH
2501 }
2502 }
2503
26cae194 2504 if (JUMP_P (x)
ca6c03ca 2505 && returnjump_p (x) && ! condjump_p (x)
15eb3a2e 2506 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
1f978f5f 2507 fatal_insn ("return not followed by barrier", x);
a813c111 2508 if (curr_bb && x == BB_END (curr_bb))
10e9fecc 2509 curr_bb = NULL;
ca6c03ca
JH
2510 }
2511
24bd1a0b 2512 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
ca6c03ca 2513 internal_error
0b17ab2f
RH
2514 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2515 num_bb_notes, n_basic_blocks);
ca6c03ca 2516
10e9fecc 2517 return err;
ca6c03ca
JH
2518}
2519\f
eaec9b3d 2520/* Assume that the preceding pass has possibly eliminated jump instructions
ca6c03ca
JH
2521 or converted the unconditional jumps. Eliminate the edges from CFG.
2522 Return true if any edges are eliminated. */
2523
2524bool
d329e058 2525purge_dead_edges (basic_block bb)
ca6c03ca 2526{
628f6a4e 2527 edge e;
a813c111 2528 rtx insn = BB_END (bb), note;
ca6c03ca 2529 bool purged = false;
628f6a4e
BE
2530 bool found;
2531 edge_iterator ei;
ca6c03ca 2532
b5b8b0ac
AO
2533 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2534 do
2535 insn = PREV_INSN (insn);
2536 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2537
70da1d03 2538 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
4b4bf941 2539 if (NONJUMP_INSN_P (insn)
70da1d03
JH
2540 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2541 {
2542 rtx eqnote;
2543
2544 if (! may_trap_p (PATTERN (insn))
2545 || ((eqnote = find_reg_equal_equiv_note (insn))
2546 && ! may_trap_p (XEXP (eqnote, 0))))
2547 remove_note (insn, note);
2548 }
2549
546c093e 2550 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
628f6a4e 2551 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
546c093e 2552 {
1d65f45c
RH
2553 bool remove = false;
2554
e5f9a909
JW
2555 /* There are three types of edges we need to handle correctly here: EH
2556 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2557 latter can appear when nonlocal gotos are used. */
1d65f45c 2558 if (e->flags & EDGE_ABNORMAL_CALL)
546c093e 2559 {
1d65f45c
RH
2560 if (!CALL_P (insn))
2561 remove = true;
2562 else if (can_nonlocal_goto (insn))
2563 ;
2564 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2565 ;
0a35513e
AH
2566 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
2567 ;
1d65f45c
RH
2568 else
2569 remove = true;
546c093e 2570 }
1d65f45c
RH
2571 else if (e->flags & EDGE_EH)
2572 remove = !can_throw_internal (insn);
2573
2574 if (remove)
546c093e 2575 {
1d65f45c
RH
2576 remove_edge (e);
2577 df_set_bb_dirty (bb);
2578 purged = true;
546c093e
RH
2579 }
2580 else
1d65f45c 2581 ei_next (&ei);
546c093e 2582 }
5f0d2358 2583
4b4bf941 2584 if (JUMP_P (insn))
ca6c03ca
JH
2585 {
2586 rtx note;
2587 edge b,f;
628f6a4e 2588 edge_iterator ei;
5f0d2358 2589
ca6c03ca
JH
2590 /* We do care only about conditional jumps and simplejumps. */
2591 if (!any_condjump_p (insn)
2592 && !returnjump_p (insn)
2593 && !simplejump_p (insn))
c51d95ec 2594 return purged;
5f0d2358 2595
5a1a3e5e
JH
2596 /* Branch probability/prediction notes are defined only for
2597 condjumps. We've possibly turned condjump into simplejump. */
2598 if (simplejump_p (insn))
2599 {
2600 note = find_reg_note (insn, REG_BR_PROB, NULL);
2601 if (note)
2602 remove_note (insn, note);
2603 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2604 remove_note (insn, note);
2605 }
2606
628f6a4e 2607 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 2608 {
7fcd7218
JH
2609 /* Avoid abnormal flags to leak from computed jumps turned
2610 into simplejumps. */
f87c27b4 2611
0e1638d4 2612 e->flags &= ~EDGE_ABNORMAL;
7fcd7218 2613
5a566bed
MM
2614 /* See if this edge is one we should keep. */
2615 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2616 /* A conditional jump can fall through into the next
2617 block, so we should keep the edge. */
628f6a4e
BE
2618 {
2619 ei_next (&ei);
2620 continue;
2621 }
5f0d2358 2622 else if (e->dest != EXIT_BLOCK_PTR
a813c111 2623 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
5a566bed
MM
2624 /* If the destination block is the target of the jump,
2625 keep the edge. */
628f6a4e
BE
2626 {
2627 ei_next (&ei);
2628 continue;
2629 }
5a566bed
MM
2630 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2631 /* If the destination block is the exit block, and this
2632 instruction is a return, then keep the edge. */
628f6a4e
BE
2633 {
2634 ei_next (&ei);
2635 continue;
2636 }
5a566bed
MM
2637 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2638 /* Keep the edges that correspond to exceptions thrown by
0b75beaa
EB
2639 this instruction and rematerialize the EDGE_ABNORMAL
2640 flag we just cleared above. */
2641 {
2642 e->flags |= EDGE_ABNORMAL;
628f6a4e 2643 ei_next (&ei);
0b75beaa
EB
2644 continue;
2645 }
5f0d2358 2646
5a566bed 2647 /* We do not need this edge. */
6fb5fa3c 2648 df_set_bb_dirty (bb);
ca6c03ca
JH
2649 purged = true;
2650 remove_edge (e);
2651 }
5f0d2358 2652
628f6a4e 2653 if (EDGE_COUNT (bb->succs) == 0 || !purged)
c51d95ec 2654 return purged;
5f0d2358 2655
c263766c
RH
2656 if (dump_file)
2657 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
5f0d2358 2658
ca6c03ca
JH
2659 if (!optimize)
2660 return purged;
2661
2662 /* Redistribute probabilities. */
c5cbcccf 2663 if (single_succ_p (bb))
ca6c03ca 2664 {
c5cbcccf
ZD
2665 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2666 single_succ_edge (bb)->count = bb->count;
f87c27b4 2667 }
ca6c03ca
JH
2668 else
2669 {
2670 note = find_reg_note (insn, REG_BR_PROB, NULL);
2671 if (!note)
2672 return purged;
5f0d2358 2673
ca6c03ca
JH
2674 b = BRANCH_EDGE (bb);
2675 f = FALLTHRU_EDGE (bb);
2676 b->probability = INTVAL (XEXP (note, 0));
2677 f->probability = REG_BR_PROB_BASE - b->probability;
2678 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2679 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2680 }
5f0d2358 2681
ca6c03ca
JH
2682 return purged;
2683 }
4b4bf941 2684 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
1722c2c8
RH
2685 {
2686 /* First, there should not be any EH or ABCALL edges resulting
2687 from non-local gotos and the like. If there were, we shouldn't
2688 have created the sibcall in the first place. Second, there
2689 should of course never have been a fallthru edge. */
c5cbcccf
ZD
2690 gcc_assert (single_succ_p (bb));
2691 gcc_assert (single_succ_edge (bb)->flags
2692 == (EDGE_SIBCALL | EDGE_ABNORMAL));
1722c2c8
RH
2693
2694 return 0;
2695 }
ca6c03ca 2696
ca6c03ca
JH
2697 /* If we don't see a jump insn, we don't know exactly why the block would
2698 have been broken at this point. Look for a simple, non-fallthru edge,
2699 as these are only created by conditional branches. If we find such an
2700 edge we know that there used to be a jump here and can then safely
2701 remove all non-fallthru edges. */
628f6a4e
BE
2702 found = false;
2703 FOR_EACH_EDGE (e, ei, bb->succs)
2704 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2705 {
2706 found = true;
2707 break;
2708 }
5f0d2358 2709
628f6a4e 2710 if (!found)
ca6c03ca 2711 return purged;
5f0d2358 2712
2afa8dce
DB
2713 /* Remove all but the fake and fallthru edges. The fake edge may be
2714 the only successor for this block in the case of noreturn
2715 calls. */
628f6a4e 2716 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 2717 {
2afa8dce 2718 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
c51d95ec 2719 {
6fb5fa3c 2720 df_set_bb_dirty (bb);
c51d95ec
JH
2721 remove_edge (e);
2722 purged = true;
2723 }
628f6a4e
BE
2724 else
2725 ei_next (&ei);
ca6c03ca 2726 }
5f0d2358 2727
c5cbcccf 2728 gcc_assert (single_succ_p (bb));
5f0d2358 2729
c5cbcccf
ZD
2730 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2731 single_succ_edge (bb)->count = bb->count;
ca6c03ca 2732
c263766c
RH
2733 if (dump_file)
2734 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
0b17ab2f 2735 bb->index);
ca6c03ca
JH
2736 return purged;
2737}
2738
5f0d2358
RK
2739/* Search all basic blocks for potentially dead edges and purge them. Return
2740 true if some edge has been eliminated. */
ca6c03ca
JH
2741
2742bool
25cd19de 2743purge_all_dead_edges (void)
ca6c03ca 2744{
e0082a72 2745 int purged = false;
e0082a72 2746 basic_block bb;
473fb060 2747
e0082a72 2748 FOR_EACH_BB (bb)
473fb060 2749 {
e0082a72 2750 bool purged_here = purge_dead_edges (bb);
5f0d2358 2751
473fb060 2752 purged |= purged_here;
473fb060 2753 }
5f0d2358 2754
ca6c03ca
JH
2755 return purged;
2756}
9ee634e3 2757
ba5e9aca
EB
2758/* This is used by a few passes that emit some instructions after abnormal
2759 calls, moving the basic block's end, while they in fact do want to emit
2760 them on the fallthru edge. Look for abnormal call edges, find backward
2761 the call in the block and insert the instructions on the edge instead.
2762
2763 Similarly, handle instructions throwing exceptions internally.
2764
2765 Return true when instructions have been found and inserted on edges. */
2766
2767bool
2768fixup_abnormal_edges (void)
2769{
2770 bool inserted = false;
2771 basic_block bb;
2772
2773 FOR_EACH_BB (bb)
2774 {
2775 edge e;
2776 edge_iterator ei;
2777
2778 /* Look for cases we are interested in - calls or instructions causing
2779 exceptions. */
2780 FOR_EACH_EDGE (e, ei, bb->succs)
2781 if ((e->flags & EDGE_ABNORMAL_CALL)
2782 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
2783 == (EDGE_ABNORMAL | EDGE_EH)))
2784 break;
2785
2786 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
2787 {
2788 rtx insn;
2789
2790 /* Get past the new insns generated. Allow notes, as the insns
2791 may be already deleted. */
2792 insn = BB_END (bb);
2793 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
2794 && !can_throw_internal (insn)
2795 && insn != BB_HEAD (bb))
2796 insn = PREV_INSN (insn);
2797
2798 if (CALL_P (insn) || can_throw_internal (insn))
2799 {
2800 rtx stop, next;
2801
2802 e = find_fallthru_edge (bb->succs);
2803
2804 stop = NEXT_INSN (BB_END (bb));
2805 BB_END (bb) = insn;
2806
2807 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
2808 {
2809 next = NEXT_INSN (insn);
2810 if (INSN_P (insn))
2811 {
2812 delete_insn (insn);
2813
2814 /* Sometimes there's still the return value USE.
2815 If it's placed after a trapping call (i.e. that
2816 call is the last insn anyway), we have no fallthru
2817 edge. Simply delete this use and don't try to insert
2818 on the non-existent edge. */
2819 if (GET_CODE (PATTERN (insn)) != USE)
2820 {
2821 /* We're not deleting it, we're moving it. */
2822 INSN_DELETED_P (insn) = 0;
2823 PREV_INSN (insn) = NULL_RTX;
2824 NEXT_INSN (insn) = NULL_RTX;
2825
2826 insert_insn_on_edge (insn, e);
2827 inserted = true;
2828 }
2829 }
2830 else if (!BARRIER_P (insn))
2831 set_block_for_insn (insn, NULL);
2832 }
2833 }
2834
2835 /* It may be that we don't find any trapping insn. In this
2836 case we discovered quite late that the insn that had been
2837 marked as can_throw_internal in fact couldn't trap at all.
2838 So we should in fact delete the EH edges out of the block. */
2839 else
2840 purge_dead_edges (bb);
2841 }
2842 }
2843
2844 return inserted;
2845}
78bde837
SB
2846\f
2847/* Cut the insns from FIRST to LAST out of the insns stream. */
2848
2849rtx
2850unlink_insn_chain (rtx first, rtx last)
2851{
2852 rtx prevfirst = PREV_INSN (first);
2853 rtx nextlast = NEXT_INSN (last);
2854
2855 PREV_INSN (first) = NULL;
2856 NEXT_INSN (last) = NULL;
2857 if (prevfirst)
2858 NEXT_INSN (prevfirst) = nextlast;
2859 if (nextlast)
2860 PREV_INSN (nextlast) = prevfirst;
2861 else
2862 set_last_insn (prevfirst);
2863 if (!prevfirst)
2864 set_first_insn (nextlast);
2865 return first;
2866}
2867\f
2868/* Skip over inter-block insns occurring after BB which are typically
2869 associated with BB (e.g., barriers). If there are any such insns,
2870 we return the last one. Otherwise, we return the end of BB. */
2871
2872static rtx
2873skip_insns_after_block (basic_block bb)
2874{
2875 rtx insn, last_insn, next_head, prev;
2876
2877 next_head = NULL_RTX;
2878 if (bb->next_bb != EXIT_BLOCK_PTR)
2879 next_head = BB_HEAD (bb->next_bb);
2880
2881 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
2882 {
2883 if (insn == next_head)
2884 break;
2885
2886 switch (GET_CODE (insn))
2887 {
2888 case BARRIER:
2889 last_insn = insn;
2890 continue;
2891
2892 case NOTE:
2893 switch (NOTE_KIND (insn))
2894 {
2895 case NOTE_INSN_BLOCK_END:
2896 gcc_unreachable ();
2897 continue;
2898 default:
2899 continue;
2900 break;
2901 }
2902 break;
2903
2904 case CODE_LABEL:
2905 if (NEXT_INSN (insn)
2906 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
2907 {
2908 insn = NEXT_INSN (insn);
2909 last_insn = insn;
2910 continue;
2911 }
2912 break;
2913
2914 default:
2915 break;
2916 }
2917
2918 break;
2919 }
2920
2921 /* It is possible to hit contradictory sequence. For instance:
2922
2923 jump_insn
2924 NOTE_INSN_BLOCK_BEG
2925 barrier
2926
2927 Where barrier belongs to jump_insn, but the note does not. This can be
2928 created by removing the basic block originally following
2929 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
2930
2931 for (insn = last_insn; insn != BB_END (bb); insn = prev)
2932 {
2933 prev = PREV_INSN (insn);
2934 if (NOTE_P (insn))
2935 switch (NOTE_KIND (insn))
2936 {
2937 case NOTE_INSN_BLOCK_END:
2938 gcc_unreachable ();
2939 break;
2940 case NOTE_INSN_DELETED:
2941 case NOTE_INSN_DELETED_LABEL:
2942 case NOTE_INSN_DELETED_DEBUG_LABEL:
2943 continue;
2944 default:
2945 reorder_insns (insn, insn, last_insn);
2946 }
2947 }
2948
2949 return last_insn;
2950}
2951
2952/* Locate or create a label for a given basic block. */
2953
2954static rtx
2955label_for_bb (basic_block bb)
2956{
2957 rtx label = BB_HEAD (bb);
2958
2959 if (!LABEL_P (label))
2960 {
2961 if (dump_file)
2962 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
2963
2964 label = block_label (bb);
2965 }
2966
2967 return label;
2968}
2969
2970/* Locate the effective beginning and end of the insn chain for each
2971 block, as defined by skip_insns_after_block above. */
2972
2973static void
2974record_effective_endpoints (void)
2975{
2976 rtx next_insn;
2977 basic_block bb;
2978 rtx insn;
2979
2980 for (insn = get_insns ();
2981 insn
2982 && NOTE_P (insn)
2983 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
2984 insn = NEXT_INSN (insn))
2985 continue;
2986 /* No basic blocks at all? */
2987 gcc_assert (insn);
2988
2989 if (PREV_INSN (insn))
2990 cfg_layout_function_header =
2991 unlink_insn_chain (get_insns (), PREV_INSN (insn));
2992 else
2993 cfg_layout_function_header = NULL_RTX;
2994
2995 next_insn = get_insns ();
2996 FOR_EACH_BB (bb)
2997 {
2998 rtx end;
2999
3000 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3001 BB_HEADER (bb) = unlink_insn_chain (next_insn,
3002 PREV_INSN (BB_HEAD (bb)));
3003 end = skip_insns_after_block (bb);
3004 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3005 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3006 next_insn = NEXT_INSN (BB_END (bb));
3007 }
3008
3009 cfg_layout_function_footer = next_insn;
3010 if (cfg_layout_function_footer)
3011 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3012}
3013\f
3014static unsigned int
3015into_cfg_layout_mode (void)
3016{
3017 cfg_layout_initialize (0);
3018 return 0;
3019}
3020
3021static unsigned int
3022outof_cfg_layout_mode (void)
3023{
3024 basic_block bb;
3025
3026 FOR_EACH_BB (bb)
3027 if (bb->next_bb != EXIT_BLOCK_PTR)
3028 bb->aux = bb->next_bb;
3029
3030 cfg_layout_finalize ();
3031
3032 return 0;
3033}
3034
3035struct rtl_opt_pass pass_into_cfg_layout_mode =
3036{
3037 {
3038 RTL_PASS,
3039 "into_cfglayout", /* name */
2b4e6bf1 3040 OPTGROUP_NONE, /* optinfo_flags */
78bde837
SB
3041 NULL, /* gate */
3042 into_cfg_layout_mode, /* execute */
3043 NULL, /* sub */
3044 NULL, /* next */
3045 0, /* static_pass_number */
3046 TV_CFG, /* tv_id */
3047 0, /* properties_required */
3048 PROP_cfglayout, /* properties_provided */
3049 0, /* properties_destroyed */
3050 0, /* todo_flags_start */
3051 0 /* todo_flags_finish */
3052 }
3053};
3054
3055struct rtl_opt_pass pass_outof_cfg_layout_mode =
3056{
3057 {
3058 RTL_PASS,
3059 "outof_cfglayout", /* name */
2b4e6bf1 3060 OPTGROUP_NONE, /* optinfo_flags */
78bde837
SB
3061 NULL, /* gate */
3062 outof_cfg_layout_mode, /* execute */
3063 NULL, /* sub */
3064 NULL, /* next */
3065 0, /* static_pass_number */
3066 TV_CFG, /* tv_id */
3067 0, /* properties_required */
3068 0, /* properties_provided */
3069 PROP_cfglayout, /* properties_destroyed */
3070 0, /* todo_flags_start */
3071 0 /* todo_flags_finish */
3072 }
3073};
3074\f
3075
3076/* Link the basic blocks in the correct order, compacting the basic
3077 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3078 function also clears the basic block header and footer fields.
3079
3080 This function is usually called after a pass (e.g. tracer) finishes
3081 some transformations while in cfglayout mode. The required sequence
3082 of the basic blocks is in a linked list along the bb->aux field.
3083 This functions re-links the basic block prev_bb and next_bb pointers
532aafad
SB
3084 accordingly, and it compacts and renumbers the blocks.
3085
3086 FIXME: This currently works only for RTL, but the only RTL-specific
3087 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3088 to GIMPLE a long time ago, but it doesn't relink the basic block
3089 chain. It could do that (to give better initial RTL) if this function
3090 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
78bde837
SB
3091
3092void
3093relink_block_chain (bool stay_in_cfglayout_mode)
3094{
3095 basic_block bb, prev_bb;
3096 int index;
3097
3098 /* Maybe dump the re-ordered sequence. */
3099 if (dump_file)
3100 {
3101 fprintf (dump_file, "Reordered sequence:\n");
3102 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
3103 bb;
3104 bb = (basic_block) bb->aux, index++)
3105 {
3106 fprintf (dump_file, " %i ", index);
3107 if (get_bb_original (bb))
3108 fprintf (dump_file, "duplicate of %i ",
3109 get_bb_original (bb)->index);
3110 else if (forwarder_block_p (bb)
3111 && !LABEL_P (BB_HEAD (bb)))
3112 fprintf (dump_file, "compensation ");
3113 else
3114 fprintf (dump_file, "bb %i ", bb->index);
3115 fprintf (dump_file, " [%i]\n", bb->frequency);
3116 }
3117 }
3118
3119 /* Now reorder the blocks. */
3120 prev_bb = ENTRY_BLOCK_PTR;
3121 bb = ENTRY_BLOCK_PTR->next_bb;
3122 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3123 {
3124 bb->prev_bb = prev_bb;
3125 prev_bb->next_bb = bb;
3126 }
3127 prev_bb->next_bb = EXIT_BLOCK_PTR;
3128 EXIT_BLOCK_PTR->prev_bb = prev_bb;
3129
3130 /* Then, clean up the aux fields. */
3131 FOR_ALL_BB (bb)
3132 {
3133 bb->aux = NULL;
3134 if (!stay_in_cfglayout_mode)
3135 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3136 }
3137
3138 /* Maybe reset the original copy tables, they are not valid anymore
3139 when we renumber the basic blocks in compact_blocks. If we are
3140 are going out of cfglayout mode, don't re-allocate the tables. */
3141 free_original_copy_tables ();
3142 if (stay_in_cfglayout_mode)
3143 initialize_original_copy_tables ();
3144
3145 /* Finally, put basic_block_info in the new order. */
3146 compact_blocks ();
3147}
3148\f
3149
3150/* Given a reorder chain, rearrange the code to match. */
3151
3152static void
3153fixup_reorder_chain (void)
3154{
3155 basic_block bb;
3156 rtx insn = NULL;
3157
3158 if (cfg_layout_function_header)
3159 {
3160 set_first_insn (cfg_layout_function_header);
3161 insn = cfg_layout_function_header;
3162 while (NEXT_INSN (insn))
3163 insn = NEXT_INSN (insn);
3164 }
3165
3166 /* First do the bulk reordering -- rechain the blocks without regard to
3167 the needed changes to jumps and labels. */
3168
3169 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
3170 {
3171 if (BB_HEADER (bb))
3172 {
3173 if (insn)
3174 NEXT_INSN (insn) = BB_HEADER (bb);
3175 else
3176 set_first_insn (BB_HEADER (bb));
3177 PREV_INSN (BB_HEADER (bb)) = insn;
3178 insn = BB_HEADER (bb);
3179 while (NEXT_INSN (insn))
3180 insn = NEXT_INSN (insn);
3181 }
3182 if (insn)
3183 NEXT_INSN (insn) = BB_HEAD (bb);
3184 else
3185 set_first_insn (BB_HEAD (bb));
3186 PREV_INSN (BB_HEAD (bb)) = insn;
3187 insn = BB_END (bb);
3188 if (BB_FOOTER (bb))
3189 {
3190 NEXT_INSN (insn) = BB_FOOTER (bb);
3191 PREV_INSN (BB_FOOTER (bb)) = insn;
3192 while (NEXT_INSN (insn))
3193 insn = NEXT_INSN (insn);
3194 }
3195 }
3196
3197 NEXT_INSN (insn) = cfg_layout_function_footer;
3198 if (cfg_layout_function_footer)
3199 PREV_INSN (cfg_layout_function_footer) = insn;
3200
3201 while (NEXT_INSN (insn))
3202 insn = NEXT_INSN (insn);
3203
3204 set_last_insn (insn);
3205#ifdef ENABLE_CHECKING
3206 verify_insn_chain ();
3207#endif
3208
3209 /* Now add jumps and labels as needed to match the blocks new
3210 outgoing edges. */
3211
3212 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
3213 {
3214 edge e_fall, e_taken, e;
3215 rtx bb_end_insn;
3216 rtx ret_label = NULL_RTX;
3217 basic_block nb, src_bb;
3218 edge_iterator ei;
3219
3220 if (EDGE_COUNT (bb->succs) == 0)
3221 continue;
3222
3223 /* Find the old fallthru edge, and another non-EH edge for
3224 a taken jump. */
3225 e_taken = e_fall = NULL;
3226
3227 FOR_EACH_EDGE (e, ei, bb->succs)
3228 if (e->flags & EDGE_FALLTHRU)
3229 e_fall = e;
3230 else if (! (e->flags & EDGE_EH))
3231 e_taken = e;
3232
3233 bb_end_insn = BB_END (bb);
3234 if (JUMP_P (bb_end_insn))
3235 {
3236 ret_label = JUMP_LABEL (bb_end_insn);
3237 if (any_condjump_p (bb_end_insn))
3238 {
3239 /* This might happen if the conditional jump has side
3240 effects and could therefore not be optimized away.
3241 Make the basic block to end with a barrier in order
3242 to prevent rtl_verify_flow_info from complaining. */
3243 if (!e_fall)
3244 {
3245 gcc_assert (!onlyjump_p (bb_end_insn)
3246 || returnjump_p (bb_end_insn));
3247 BB_FOOTER (bb) = emit_barrier_after (bb_end_insn);
3248 continue;
3249 }
3250
3251 /* If the old fallthru is still next, nothing to do. */
3252 if (bb->aux == e_fall->dest
3253 || e_fall->dest == EXIT_BLOCK_PTR)
3254 continue;
3255
3256 /* The degenerated case of conditional jump jumping to the next
3257 instruction can happen for jumps with side effects. We need
3258 to construct a forwarder block and this will be done just
3259 fine by force_nonfallthru below. */
3260 if (!e_taken)
3261 ;
3262
3263 /* There is another special case: if *neither* block is next,
3264 such as happens at the very end of a function, then we'll
3265 need to add a new unconditional jump. Choose the taken
3266 edge based on known or assumed probability. */
3267 else if (bb->aux != e_taken->dest)
3268 {
3269 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
3270
3271 if (note
3272 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
3273 && invert_jump (bb_end_insn,
3274 (e_fall->dest == EXIT_BLOCK_PTR
3275 ? NULL_RTX
3276 : label_for_bb (e_fall->dest)), 0))
3277 {
3278 e_fall->flags &= ~EDGE_FALLTHRU;
3279 gcc_checking_assert (could_fall_through
3280 (e_taken->src, e_taken->dest));
3281 e_taken->flags |= EDGE_FALLTHRU;
3282 update_br_prob_note (bb);
3283 e = e_fall, e_fall = e_taken, e_taken = e;
3284 }
3285 }
3286
3287 /* If the "jumping" edge is a crossing edge, and the fall
3288 through edge is non-crossing, leave things as they are. */
3289 else if ((e_taken->flags & EDGE_CROSSING)
3290 && !(e_fall->flags & EDGE_CROSSING))
3291 continue;
3292
3293 /* Otherwise we can try to invert the jump. This will
3294 basically never fail, however, keep up the pretense. */
3295 else if (invert_jump (bb_end_insn,
3296 (e_fall->dest == EXIT_BLOCK_PTR
3297 ? NULL_RTX
3298 : label_for_bb (e_fall->dest)), 0))
3299 {
3300 e_fall->flags &= ~EDGE_FALLTHRU;
3301 gcc_checking_assert (could_fall_through
3302 (e_taken->src, e_taken->dest));
3303 e_taken->flags |= EDGE_FALLTHRU;
3304 update_br_prob_note (bb);
3305 if (LABEL_NUSES (ret_label) == 0
3306 && single_pred_p (e_taken->dest))
3307 delete_insn (ret_label);
3308 continue;
3309 }
3310 }
3311 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3312 {
3313 /* If the old fallthru is still next or if
3314 asm goto doesn't have a fallthru (e.g. when followed by
3315 __builtin_unreachable ()), nothing to do. */
3316 if (! e_fall
3317 || bb->aux == e_fall->dest
3318 || e_fall->dest == EXIT_BLOCK_PTR)
3319 continue;
3320
3321 /* Otherwise we'll have to use the fallthru fixup below. */
3322 }
3323 else
3324 {
3325 /* Otherwise we have some return, switch or computed
3326 jump. In the 99% case, there should not have been a
3327 fallthru edge. */
3328 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3329 continue;
3330 }
3331 }
3332 else
3333 {
3334 /* No fallthru implies a noreturn function with EH edges, or
3335 something similarly bizarre. In any case, we don't need to
3336 do anything. */
3337 if (! e_fall)
3338 continue;
3339
3340 /* If the fallthru block is still next, nothing to do. */
3341 if (bb->aux == e_fall->dest)
3342 continue;
3343
3344 /* A fallthru to exit block. */
3345 if (e_fall->dest == EXIT_BLOCK_PTR)
3346 continue;
3347 }
3348
3349 /* We got here if we need to add a new jump insn.
3350 Note force_nonfallthru can delete E_FALL and thus we have to
3351 save E_FALL->src prior to the call to force_nonfallthru. */
3352 src_bb = e_fall->src;
3353 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3354 if (nb)
3355 {
3356 nb->aux = bb->aux;
3357 bb->aux = nb;
3358 /* Don't process this new block. */
3359 bb = nb;
3360
3361 /* Make sure new bb is tagged for correct section (same as
3362 fall-thru source, since you cannot fall-thru across
3363 section boundaries). */
3364 BB_COPY_PARTITION (src_bb, single_pred (bb));
3365 if (flag_reorder_blocks_and_partition
3366 && targetm_common.have_named_sections
3367 && JUMP_P (BB_END (bb))
3368 && !any_condjump_p (BB_END (bb))
3369 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
3370 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
3371 }
3372 }
3373
3374 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3375
3376 /* Annoying special case - jump around dead jumptables left in the code. */
3377 FOR_EACH_BB (bb)
3378 {
3379 edge e = find_fallthru_edge (bb->succs);
3380
3381 if (e && !can_fallthru (e->src, e->dest))
3382 force_nonfallthru (e);
3383 }
3384
3385 /* Ensure goto_locus from edges has some instructions with that locus
3386 in RTL. */
3387 if (!optimize)
3388 FOR_EACH_BB (bb)
3389 {
3390 edge e;
3391 edge_iterator ei;
3392
3393 FOR_EACH_EDGE (e, ei, bb->succs)
2f13f2de 3394 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
5368224f 3395 && !(e->flags & EDGE_ABNORMAL))
78bde837
SB
3396 {
3397 edge e2;
3398 edge_iterator ei2;
3399 basic_block dest, nb;
3400 rtx end;
3401
3402 insn = BB_END (e->src);
3403 end = PREV_INSN (BB_HEAD (e->src));
3404 while (insn != end
5368224f 3405 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
78bde837
SB
3406 insn = PREV_INSN (insn);
3407 if (insn != end
5368224f 3408 && INSN_LOCATION (insn) == e->goto_locus)
78bde837
SB
3409 continue;
3410 if (simplejump_p (BB_END (e->src))
5368224f 3411 && !INSN_HAS_LOCATION (BB_END (e->src)))
78bde837 3412 {
5368224f 3413 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
78bde837
SB
3414 continue;
3415 }
3416 dest = e->dest;
3417 if (dest == EXIT_BLOCK_PTR)
3418 {
3419 /* Non-fallthru edges to the exit block cannot be split. */
3420 if (!(e->flags & EDGE_FALLTHRU))
3421 continue;
3422 }
3423 else
3424 {
3425 insn = BB_HEAD (dest);
3426 end = NEXT_INSN (BB_END (dest));
3427 while (insn != end && !NONDEBUG_INSN_P (insn))
3428 insn = NEXT_INSN (insn);
5368224f
DC
3429 if (insn != end && INSN_HAS_LOCATION (insn)
3430 && INSN_LOCATION (insn) == e->goto_locus)
78bde837
SB
3431 continue;
3432 }
3433 nb = split_edge (e);
3434 if (!INSN_P (BB_END (nb)))
3435 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3436 nb);
5368224f 3437 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
78bde837
SB
3438
3439 /* If there are other incoming edges to the destination block
3440 with the same goto locus, redirect them to the new block as
3441 well, this can prevent other such blocks from being created
3442 in subsequent iterations of the loop. */
3443 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
2f13f2de 3444 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
78bde837 3445 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
5368224f 3446 && e->goto_locus == e2->goto_locus)
78bde837
SB
3447 redirect_edge_and_branch (e2, nb);
3448 else
3449 ei_next (&ei2);
3450 }
3451 }
3452}
3453\f
3454/* Perform sanity checks on the insn chain.
3455 1. Check that next/prev pointers are consistent in both the forward and
3456 reverse direction.
3457 2. Count insns in chain, going both directions, and check if equal.
3458 3. Check that get_last_insn () returns the actual end of chain. */
3459
3460DEBUG_FUNCTION void
3461verify_insn_chain (void)
3462{
3463 rtx x, prevx, nextx;
3464 int insn_cnt1, insn_cnt2;
3465
3466 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3467 x != 0;
3468 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
3469 gcc_assert (PREV_INSN (x) == prevx);
3470
3471 gcc_assert (prevx == get_last_insn ());
3472
3473 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
3474 x != 0;
3475 nextx = x, insn_cnt2++, x = PREV_INSN (x))
3476 gcc_assert (NEXT_INSN (x) == nextx);
3477
3478 gcc_assert (insn_cnt1 == insn_cnt2);
3479}
3480\f
3481/* If we have assembler epilogues, the block falling through to exit must
3482 be the last one in the reordered chain when we reach final. Ensure
3483 that this condition is met. */
3484static void
3485fixup_fallthru_exit_predecessor (void)
3486{
3487 edge e;
3488 basic_block bb = NULL;
3489
3490 /* This transformation is not valid before reload, because we might
3491 separate a call from the instruction that copies the return
3492 value. */
3493 gcc_assert (reload_completed);
3494
3495 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
3496 if (e)
3497 bb = e->src;
3498
3499 if (bb && bb->aux)
3500 {
3501 basic_block c = ENTRY_BLOCK_PTR->next_bb;
3502
3503 /* If the very first block is the one with the fall-through exit
3504 edge, we have to split that block. */
3505 if (c == bb)
3506 {
3507 bb = split_block (bb, NULL)->dest;
3508 bb->aux = c->aux;
3509 c->aux = bb;
3510 BB_FOOTER (bb) = BB_FOOTER (c);
3511 BB_FOOTER (c) = NULL;
3512 }
3513
3514 while (c->aux != bb)
3515 c = (basic_block) c->aux;
3516
3517 c->aux = bb->aux;
3518 while (c->aux)
3519 c = (basic_block) c->aux;
3520
3521 c->aux = bb;
3522 bb->aux = NULL;
3523 }
3524}
3525
3526/* In case there are more than one fallthru predecessors of exit, force that
3527 there is only one. */
3528
3529static void
3530force_one_exit_fallthru (void)
3531{
3532 edge e, predecessor = NULL;
3533 bool more = false;
3534 edge_iterator ei;
3535 basic_block forwarder, bb;
3536
3537 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3538 if (e->flags & EDGE_FALLTHRU)
3539 {
3540 if (predecessor == NULL)
3541 predecessor = e;
3542 else
3543 {
3544 more = true;
3545 break;
3546 }
3547 }
3548
3549 if (!more)
3550 return;
3551
3552 /* Exit has several fallthru predecessors. Create a forwarder block for
3553 them. */
3554 forwarder = split_edge (predecessor);
3555 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
3556 {
3557 if (e->src == forwarder
3558 || !(e->flags & EDGE_FALLTHRU))
3559 ei_next (&ei);
3560 else
3561 redirect_edge_and_branch_force (e, forwarder);
3562 }
3563
3564 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
3565 exit block. */
3566 FOR_EACH_BB (bb)
3567 {
3568 if (bb->aux == NULL && bb != forwarder)
3569 {
3570 bb->aux = forwarder;
3571 break;
3572 }
3573 }
3574}
3575\f
3576/* Return true in case it is possible to duplicate the basic block BB. */
3577
3578static bool
3579cfg_layout_can_duplicate_bb_p (const_basic_block bb)
3580{
3581 /* Do not attempt to duplicate tablejumps, as we need to unshare
3582 the dispatch table. This is difficult to do, as the instructions
3583 computing jump destination may be hoisted outside the basic block. */
3584 if (tablejump_p (BB_END (bb), NULL, NULL))
3585 return false;
3586
3587 /* Do not duplicate blocks containing insns that can't be copied. */
3588 if (targetm.cannot_copy_insn_p)
3589 {
3590 rtx insn = BB_HEAD (bb);
3591 while (1)
3592 {
3593 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
3594 return false;
3595 if (insn == BB_END (bb))
3596 break;
3597 insn = NEXT_INSN (insn);
3598 }
3599 }
3600
3601 return true;
3602}
3603
3604rtx
3605duplicate_insn_chain (rtx from, rtx to)
3606{
3607 rtx insn, last, copy;
3608
3609 /* Avoid updating of boundaries of previous basic block. The
3610 note will get removed from insn stream in fixup. */
3611 last = emit_note (NOTE_INSN_DELETED);
3612
3613 /* Create copy at the end of INSN chain. The chain will
3614 be reordered later. */
3615 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
3616 {
3617 switch (GET_CODE (insn))
3618 {
3619 case DEBUG_INSN:
3620 /* Don't duplicate label debug insns. */
3621 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
3622 break;
3623 /* FALLTHRU */
3624 case INSN:
3625 case CALL_INSN:
3626 case JUMP_INSN:
3627 /* Avoid copying of dispatch tables. We never duplicate
3628 tablejumps, so this can hit only in case the table got
3629 moved far from original jump. */
34f0d87a 3630 if (JUMP_TABLE_DATA_P (insn))
78bde837
SB
3631 {
3632 /* Avoid copying following barrier as well if any
3633 (and debug insns in between). */
3634 rtx next;
3635
3636 for (next = NEXT_INSN (insn);
3637 next != NEXT_INSN (to);
3638 next = NEXT_INSN (next))
3639 if (!DEBUG_INSN_P (next))
3640 break;
3641 if (next != NEXT_INSN (to) && BARRIER_P (next))
3642 insn = next;
3643 break;
3644 }
3645 copy = emit_copy_of_insn_after (insn, get_last_insn ());
3646 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
3647 && ANY_RETURN_P (JUMP_LABEL (insn)))
3648 JUMP_LABEL (copy) = JUMP_LABEL (insn);
3649 maybe_copy_prologue_epilogue_insn (insn, copy);
3650 break;
3651
3652 case CODE_LABEL:
3653 break;
3654
3655 case BARRIER:
3656 emit_barrier ();
3657 break;
3658
3659 case NOTE:
3660 switch (NOTE_KIND (insn))
3661 {
3662 /* In case prologue is empty and function contain label
3663 in first BB, we may want to copy the block. */
3664 case NOTE_INSN_PROLOGUE_END:
3665
3666 case NOTE_INSN_DELETED:
3667 case NOTE_INSN_DELETED_LABEL:
3668 case NOTE_INSN_DELETED_DEBUG_LABEL:
3669 /* No problem to strip these. */
3670 case NOTE_INSN_FUNCTION_BEG:
3671 /* There is always just single entry to function. */
3672 case NOTE_INSN_BASIC_BLOCK:
3673 break;
3674
3675 case NOTE_INSN_EPILOGUE_BEG:
3676 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
3677 emit_note_copy (insn);
3678 break;
3679
3680 default:
3681 /* All other notes should have already been eliminated. */
3682 gcc_unreachable ();
3683 }
3684 break;
3685 default:
3686 gcc_unreachable ();
3687 }
3688 }
3689 insn = NEXT_INSN (last);
3690 delete_insn (last);
3691 return insn;
3692}
3693
3694/* Create a duplicate of the basic block BB. */
3695
3696static basic_block
3697cfg_layout_duplicate_bb (basic_block bb)
3698{
3699 rtx insn;
3700 basic_block new_bb;
3701
3702 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
3703 new_bb = create_basic_block (insn,
3704 insn ? get_last_insn () : NULL,
3705 EXIT_BLOCK_PTR->prev_bb);
3706
3707 BB_COPY_PARTITION (new_bb, bb);
3708 if (BB_HEADER (bb))
3709 {
3710 insn = BB_HEADER (bb);
3711 while (NEXT_INSN (insn))
3712 insn = NEXT_INSN (insn);
3713 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
3714 if (insn)
3715 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
3716 }
3717
3718 if (BB_FOOTER (bb))
3719 {
3720 insn = BB_FOOTER (bb);
3721 while (NEXT_INSN (insn))
3722 insn = NEXT_INSN (insn);
3723 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
3724 if (insn)
3725 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
3726 }
3727
3728 return new_bb;
3729}
3730
3731\f
3732/* Main entry point to this module - initialize the datastructures for
3733 CFG layout changes. It keeps LOOPS up-to-date if not null.
3734
3735 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
3736
3737void
3738cfg_layout_initialize (unsigned int flags)
3739{
3740 rtx x;
3741 basic_block bb;
3742
3743 initialize_original_copy_tables ();
3744
3745 cfg_layout_rtl_register_cfg_hooks ();
3746
3747 record_effective_endpoints ();
3748
3749 /* Make sure that the targets of non local gotos are marked. */
3750 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
3751 {
3752 bb = BLOCK_FOR_INSN (XEXP (x, 0));
3753 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
3754 }
3755
3756 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
3757}
3758
3759/* Splits superblocks. */
3760void
3761break_superblocks (void)
3762{
3763 sbitmap superblocks;
3764 bool need = false;
3765 basic_block bb;
3766
3767 superblocks = sbitmap_alloc (last_basic_block);
f61e445a 3768 bitmap_clear (superblocks);
78bde837
SB
3769
3770 FOR_EACH_BB (bb)
3771 if (bb->flags & BB_SUPERBLOCK)
3772 {
3773 bb->flags &= ~BB_SUPERBLOCK;
d7c028c0 3774 bitmap_set_bit (superblocks, bb->index);
78bde837
SB
3775 need = true;
3776 }
3777
3778 if (need)
3779 {
3780 rebuild_jump_labels (get_insns ());
3781 find_many_sub_basic_blocks (superblocks);
3782 }
3783
3784 free (superblocks);
3785}
3786
3787/* Finalize the changes: reorder insn list according to the sequence specified
3788 by aux pointers, enter compensation code, rebuild scope forest. */
3789
3790void
3791cfg_layout_finalize (void)
3792{
3793#ifdef ENABLE_CHECKING
3794 verify_flow_info ();
3795#endif
3796 force_one_exit_fallthru ();
3797 rtl_register_cfg_hooks ();
3798 if (reload_completed
3799#ifdef HAVE_epilogue
3800 && !HAVE_epilogue
3801#endif
3802 )
3803 fixup_fallthru_exit_predecessor ();
3804 fixup_reorder_chain ();
3805
3806 rebuild_jump_labels (get_insns ());
3807 delete_dead_jumptables ();
3808
3809#ifdef ENABLE_CHECKING
3810 verify_insn_chain ();
3811 verify_flow_info ();
3812#endif
3813}
3814
ba5e9aca 3815
9ee634e3 3816/* Same as split_block but update cfg_layout structures. */
f470c378
ZD
3817
3818static basic_block
d329e058 3819cfg_layout_split_block (basic_block bb, void *insnp)
9ee634e3 3820{
ae50c0cb 3821 rtx insn = (rtx) insnp;
f470c378 3822 basic_block new_bb = rtl_split_block (bb, insn);
9ee634e3 3823
bcc708fc
MM
3824 BB_FOOTER (new_bb) = BB_FOOTER (bb);
3825 BB_FOOTER (bb) = NULL;
9ee634e3 3826
f470c378 3827 return new_bb;
9ee634e3
JH
3828}
3829
9ee634e3 3830/* Redirect Edge to DEST. */
6de9cd9a 3831static edge
d329e058 3832cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
9ee634e3
JH
3833{
3834 basic_block src = e->src;
6de9cd9a 3835 edge ret;
9ee634e3 3836
bc35512f 3837 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 3838 return NULL;
bc35512f 3839
3348b696 3840 if (e->dest == dest)
6de9cd9a 3841 return e;
bc35512f 3842
3348b696 3843 if (e->src != ENTRY_BLOCK_PTR
6de9cd9a 3844 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
f345f21a 3845 {
6fb5fa3c 3846 df_set_bb_dirty (src);
6de9cd9a 3847 return ret;
f345f21a 3848 }
bc35512f
JH
3849
3850 if (e->src == ENTRY_BLOCK_PTR
3851 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
3852 {
c263766c
RH
3853 if (dump_file)
3854 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
bc35512f
JH
3855 e->src->index, dest->index);
3856
6fb5fa3c 3857 df_set_bb_dirty (e->src);
bc35512f 3858 redirect_edge_succ (e, dest);
6de9cd9a 3859 return e;
bc35512f
JH
3860 }
3861
9ee634e3
JH
3862 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
3863 in the case the basic block appears to be in sequence. Avoid this
3864 transformation. */
3865
9ee634e3
JH
3866 if (e->flags & EDGE_FALLTHRU)
3867 {
3868 /* Redirect any branch edges unified with the fallthru one. */
4b4bf941 3869 if (JUMP_P (BB_END (src))
432f982f
JH
3870 && label_is_jump_target_p (BB_HEAD (e->dest),
3871 BB_END (src)))
9ee634e3 3872 {
341c100f 3873 edge redirected;
c22cacf3 3874
c263766c
RH
3875 if (dump_file)
3876 fprintf (dump_file, "Fallthru edge unified with branch "
432f982f
JH
3877 "%i->%i redirected to %i\n",
3878 e->src->index, e->dest->index, dest->index);
3879 e->flags &= ~EDGE_FALLTHRU;
341c100f
NS
3880 redirected = redirect_branch_edge (e, dest);
3881 gcc_assert (redirected);
0c617be4
JL
3882 redirected->flags |= EDGE_FALLTHRU;
3883 df_set_bb_dirty (redirected->src);
3884 return redirected;
9ee634e3
JH
3885 }
3886 /* In case we are redirecting fallthru edge to the branch edge
c22cacf3 3887 of conditional jump, remove it. */
628f6a4e 3888 if (EDGE_COUNT (src->succs) == 2)
9ee634e3 3889 {
03101c6f
KH
3890 /* Find the edge that is different from E. */
3891 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
628f6a4e 3892
9ee634e3 3893 if (s->dest == dest
a813c111
SB
3894 && any_condjump_p (BB_END (src))
3895 && onlyjump_p (BB_END (src)))
3896 delete_insn (BB_END (src));
9ee634e3 3897 }
c263766c 3898 if (dump_file)
dc764d10 3899 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
bc35512f 3900 e->src->index, e->dest->index, dest->index);
0c617be4 3901 ret = redirect_edge_succ_nodup (e, dest);
9ee634e3
JH
3902 }
3903 else
bc35512f 3904 ret = redirect_branch_edge (e, dest);
9ee634e3
JH
3905
3906 /* We don't want simplejumps in the insn stream during cfglayout. */
341c100f 3907 gcc_assert (!simplejump_p (BB_END (src)));
9ee634e3 3908
6fb5fa3c 3909 df_set_bb_dirty (src);
9ee634e3
JH
3910 return ret;
3911}
3912
3913/* Simple wrapper as we always can redirect fallthru edges. */
3914static basic_block
d329e058 3915cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
9ee634e3 3916{
341c100f
NS
3917 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
3918
3919 gcc_assert (redirected);
9ee634e3
JH
3920 return NULL;
3921}
3922
f470c378
ZD
3923/* Same as delete_basic_block but update cfg_layout structures. */
3924
9ee634e3 3925static void
d329e058 3926cfg_layout_delete_block (basic_block bb)
9ee634e3 3927{
a813c111 3928 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
9ee634e3 3929
bcc708fc 3930 if (BB_HEADER (bb))
9ee634e3 3931 {
a813c111 3932 next = BB_HEAD (bb);
9ee634e3 3933 if (prev)
bcc708fc 3934 NEXT_INSN (prev) = BB_HEADER (bb);
9ee634e3 3935 else
bcc708fc
MM
3936 set_first_insn (BB_HEADER (bb));
3937 PREV_INSN (BB_HEADER (bb)) = prev;
3938 insn = BB_HEADER (bb);
9ee634e3
JH
3939 while (NEXT_INSN (insn))
3940 insn = NEXT_INSN (insn);
3941 NEXT_INSN (insn) = next;
3942 PREV_INSN (next) = insn;
3943 }
a813c111 3944 next = NEXT_INSN (BB_END (bb));
bcc708fc 3945 if (BB_FOOTER (bb))
9ee634e3 3946 {
bcc708fc 3947 insn = BB_FOOTER (bb);
bc35512f
JH
3948 while (insn)
3949 {
4b4bf941 3950 if (BARRIER_P (insn))
bc35512f
JH
3951 {
3952 if (PREV_INSN (insn))
3953 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
3954 else
bcc708fc 3955 BB_FOOTER (bb) = NEXT_INSN (insn);
bc35512f
JH
3956 if (NEXT_INSN (insn))
3957 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
3958 }
4b4bf941 3959 if (LABEL_P (insn))
bc35512f
JH
3960 break;
3961 insn = NEXT_INSN (insn);
3962 }
bcc708fc 3963 if (BB_FOOTER (bb))
bc35512f 3964 {
a813c111 3965 insn = BB_END (bb);
bcc708fc
MM
3966 NEXT_INSN (insn) = BB_FOOTER (bb);
3967 PREV_INSN (BB_FOOTER (bb)) = insn;
bc35512f
JH
3968 while (NEXT_INSN (insn))
3969 insn = NEXT_INSN (insn);
3970 NEXT_INSN (insn) = next;
3971 if (next)
3972 PREV_INSN (next) = insn;
3973 else
3974 set_last_insn (insn);
3975 }
9ee634e3
JH
3976 }
3977 if (bb->next_bb != EXIT_BLOCK_PTR)
bcc708fc 3978 to = &BB_HEADER (bb->next_bb);
9ee634e3
JH
3979 else
3980 to = &cfg_layout_function_footer;
997de8ed 3981
9ee634e3
JH
3982 rtl_delete_block (bb);
3983
3984 if (prev)
3985 prev = NEXT_INSN (prev);
d329e058 3986 else
9ee634e3
JH
3987 prev = get_insns ();
3988 if (next)
3989 next = PREV_INSN (next);
d329e058 3990 else
9ee634e3
JH
3991 next = get_last_insn ();
3992
3993 if (next && NEXT_INSN (next) != prev)
3994 {
3995 remaints = unlink_insn_chain (prev, next);
3996 insn = remaints;
3997 while (NEXT_INSN (insn))
3998 insn = NEXT_INSN (insn);
3999 NEXT_INSN (insn) = *to;
4000 if (*to)
4001 PREV_INSN (*to) = insn;
4002 *to = remaints;
4003 }
4004}
4005
beb235f8 4006/* Return true when blocks A and B can be safely merged. */
b48d0358 4007
bc35512f 4008static bool
b48d0358 4009cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
bc35512f 4010{
750054a2
CT
4011 /* If we are partitioning hot/cold basic blocks, we don't want to
4012 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
4013 and cold sections.
4014
8e8d5162 4015 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
4016 be optimizable (or blocks that appear to be mergeable), but which really
4017 must be left untouched (they are required to make it safely across
4018 partition boundaries). See the comments at the top of
8e8d5162
CT
4019 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4020
87c8b4be 4021 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 4022 return false;
750054a2 4023
7d776ee2
RG
4024 /* Protect the loop latches. */
4025 if (current_loops && b->loop_father->latch == b)
4026 return false;
4027
894a84b5
BS
4028 /* If we would end up moving B's instructions, make sure it doesn't fall
4029 through into the exit block, since we cannot recover from a fallthrough
4030 edge into the exit block occurring in the middle of a function. */
4031 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4032 {
4033 edge e = find_fallthru_edge (b->succs);
4034 if (e && e->dest == EXIT_BLOCK_PTR)
4035 return false;
4036 }
4037
bc35512f 4038 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
4039 return (single_succ_p (a)
4040 && single_succ (a) == b
4041 && single_pred_p (b) == 1
628f6a4e 4042 && a != b
bc35512f 4043 /* Must be simple edge. */
c5cbcccf 4044 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
bc35512f 4045 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
49ea3702
SB
4046 /* If the jump insn has side effects, we can't kill the edge.
4047 When not optimizing, try_redirect_by_replacing_jump will
4048 not allow us to redirect an edge by replacing a table jump. */
4b4bf941 4049 && (!JUMP_P (BB_END (a))
49ea3702 4050 || ((!optimize || reload_completed)
a813c111 4051 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f
JH
4052}
4053
41806d92
NS
4054/* Merge block A and B. The blocks must be mergeable. */
4055
bc35512f
JH
4056static void
4057cfg_layout_merge_blocks (basic_block a, basic_block b)
4058{
50a36e42 4059 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
f9df6f16 4060 rtx insn;
50a36e42 4061
77a74ed7 4062 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
bc35512f 4063
6fb5fa3c 4064 if (dump_file)
50a36e42
EB
4065 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4066 a->index);
6fb5fa3c 4067
bc35512f 4068 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 4069 if (LABEL_P (BB_HEAD (b)))
2c97f8e4 4070 {
2c97f8e4
RH
4071 delete_insn (BB_HEAD (b));
4072 }
bc35512f
JH
4073
4074 /* We should have fallthru edge in a, or we can do dummy redirection to get
4075 it cleaned up. */
4b4bf941 4076 if (JUMP_P (BB_END (a)))
628f6a4e 4077 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
341c100f 4078 gcc_assert (!JUMP_P (BB_END (a)));
bc35512f 4079
9be94227 4080 /* When not optimizing CFG and the edge is the only place in RTL which holds
7241571e 4081 some unique locus, emit a nop with that locus in between. */
9be94227
EB
4082 if (!optimize)
4083 emit_nop_for_unique_locus_between (a, b);
7241571e 4084
bc35512f 4085 /* Possible line number notes should appear in between. */
bcc708fc 4086 if (BB_HEADER (b))
bc35512f 4087 {
a813c111 4088 rtx first = BB_END (a), last;
bc35512f 4089
bcc708fc 4090 last = emit_insn_after_noloc (BB_HEADER (b), BB_END (a), a);
bd73623c
JJ
4091 /* The above might add a BARRIER as BB_END, but as barriers
4092 aren't valid parts of a bb, remove_insn doesn't update
4093 BB_END if it is a barrier. So adjust BB_END here. */
4094 while (BB_END (a) != first && BARRIER_P (BB_END (a)))
4095 BB_END (a) = PREV_INSN (BB_END (a));
a7b87f73 4096 delete_insn_chain (NEXT_INSN (first), last, false);
bcc708fc 4097 BB_HEADER (b) = NULL;
bc35512f
JH
4098 }
4099
4100 /* In the case basic blocks are not adjacent, move them around. */
a813c111 4101 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
bc35512f 4102 {
f9df6f16 4103 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
6c3d0e31 4104
f9df6f16 4105 emit_insn_after_noloc (insn, BB_END (a), a);
bc35512f
JH
4106 }
4107 /* Otherwise just re-associate the instructions. */
4108 else
4109 {
a813c111 4110 insn = BB_HEAD (b);
a813c111 4111 BB_END (a) = BB_END (b);
bc35512f
JH
4112 }
4113
f9df6f16
JJ
4114 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4115 We need to explicitly call. */
4116 update_bb_for_insn_chain (insn, BB_END (b), a);
4117
4118 /* Skip possible DELETED_LABEL insn. */
4119 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4120 insn = NEXT_INSN (insn);
4121 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4122 BB_HEAD (b) = NULL;
4123 delete_insn (insn);
4124
6fb5fa3c
DB
4125 df_bb_delete (b->index);
4126
bc35512f 4127 /* Possible tablejumps and barriers should appear after the block. */
bcc708fc 4128 if (BB_FOOTER (b))
bc35512f 4129 {
bcc708fc
MM
4130 if (!BB_FOOTER (a))
4131 BB_FOOTER (a) = BB_FOOTER (b);
bc35512f
JH
4132 else
4133 {
bcc708fc 4134 rtx last = BB_FOOTER (a);
bc35512f
JH
4135
4136 while (NEXT_INSN (last))
4137 last = NEXT_INSN (last);
bcc708fc
MM
4138 NEXT_INSN (last) = BB_FOOTER (b);
4139 PREV_INSN (BB_FOOTER (b)) = last;
bc35512f 4140 }
bcc708fc 4141 BB_FOOTER (b) = NULL;
bc35512f
JH
4142 }
4143
50a36e42 4144 /* If B was a forwarder block, propagate the locus on the edge. */
2f13f2de 4145 if (forwarder_p
fbc68f2a 4146 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
50a36e42
EB
4147 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4148
c263766c 4149 if (dump_file)
50a36e42 4150 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
bc35512f
JH
4151}
4152
4153/* Split edge E. */
f470c378 4154
bc35512f
JH
4155static basic_block
4156cfg_layout_split_edge (edge e)
4157{
bc35512f
JH
4158 basic_block new_bb =
4159 create_basic_block (e->src != ENTRY_BLOCK_PTR
a813c111 4160 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
bc35512f
JH
4161 NULL_RTX, e->src);
4162
3e7eb734
JJ
4163 if (e->dest == EXIT_BLOCK_PTR)
4164 BB_COPY_PARTITION (new_bb, e->src);
4165 else
4166 BB_COPY_PARTITION (new_bb, e->dest);
a9b2ee88 4167 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
bc35512f
JH
4168 redirect_edge_and_branch_force (e, new_bb);
4169
4170 return new_bb;
4171}
4172
f470c378
ZD
4173/* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4174
4175static void
4176rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4177{
4178}
4179
df92c640
SB
4180/* Return true if BB contains only labels or non-executable
4181 instructions. */
4182
4183static bool
4184rtl_block_empty_p (basic_block bb)
4185{
4186 rtx insn;
4187
4188 if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
4189 return true;
4190
4191 FOR_BB_INSNS (bb, insn)
4192 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4193 return false;
4194
4195 return true;
4196}
4197
4198/* Split a basic block if it ends with a conditional branch and if
4199 the other part of the block is not empty. */
4200
4201static basic_block
4202rtl_split_block_before_cond_jump (basic_block bb)
4203{
4204 rtx insn;
4205 rtx split_point = NULL;
4206 rtx last = NULL;
4207 bool found_code = false;
4208
4209 FOR_BB_INSNS (bb, insn)
4210 {
4211 if (any_condjump_p (insn))
4212 split_point = last;
4213 else if (NONDEBUG_INSN_P (insn))
4214 found_code = true;
4215 last = insn;
4216 }
4217
4218 /* Did not find everything. */
4219 if (found_code && split_point)
4220 return split_block (bb, split_point)->dest;
4221 else
4222 return NULL;
4223}
4224
6de9cd9a
DN
4225/* Return 1 if BB ends with a call, possibly followed by some
4226 instructions that must stay with the call, 0 otherwise. */
4227
4228static bool
b48d0358 4229rtl_block_ends_with_call_p (basic_block bb)
6de9cd9a
DN
4230{
4231 rtx insn = BB_END (bb);
4232
4b4bf941 4233 while (!CALL_P (insn)
6de9cd9a 4234 && insn != BB_HEAD (bb)
92ddef69 4235 && (keep_with_call_p (insn)
b5b8b0ac
AO
4236 || NOTE_P (insn)
4237 || DEBUG_INSN_P (insn)))
6de9cd9a 4238 insn = PREV_INSN (insn);
4b4bf941 4239 return (CALL_P (insn));
6de9cd9a
DN
4240}
4241
4242/* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4243
4244static bool
9678086d 4245rtl_block_ends_with_condjump_p (const_basic_block bb)
6de9cd9a
DN
4246{
4247 return any_condjump_p (BB_END (bb));
4248}
4249
4250/* Return true if we need to add fake edge to exit.
4251 Helper function for rtl_flow_call_edges_add. */
4252
4253static bool
9678086d 4254need_fake_edge_p (const_rtx insn)
6de9cd9a
DN
4255{
4256 if (!INSN_P (insn))
4257 return false;
4258
4b4bf941 4259 if ((CALL_P (insn)
6de9cd9a
DN
4260 && !SIBLING_CALL_P (insn)
4261 && !find_reg_note (insn, REG_NORETURN, NULL)
becfd6e5 4262 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
6de9cd9a
DN
4263 return true;
4264
4265 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4266 && MEM_VOLATILE_P (PATTERN (insn)))
4267 || (GET_CODE (PATTERN (insn)) == PARALLEL
4268 && asm_noperands (insn) != -1
4269 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4270 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4271}
4272
4273/* Add fake edges to the function exit for any non constant and non noreturn
4274 calls, volatile inline assembly in the bitmap of blocks specified by
4275 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4276 that were split.
4277
4278 The goal is to expose cases in which entering a basic block does not imply
4279 that all subsequent instructions must be executed. */
4280
4281static int
4282rtl_flow_call_edges_add (sbitmap blocks)
4283{
4284 int i;
4285 int blocks_split = 0;
4286 int last_bb = last_basic_block;
4287 bool check_last_block = false;
4288
24bd1a0b 4289 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6de9cd9a
DN
4290 return 0;
4291
4292 if (! blocks)
4293 check_last_block = true;
4294 else
d7c028c0 4295 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6de9cd9a
DN
4296
4297 /* In the last basic block, before epilogue generation, there will be
4298 a fallthru edge to EXIT. Special care is required if the last insn
4299 of the last basic block is a call because make_edge folds duplicate
4300 edges, which would result in the fallthru edge also being marked
4301 fake, which would result in the fallthru edge being removed by
4302 remove_fake_edges, which would result in an invalid CFG.
4303
4304 Moreover, we can't elide the outgoing fake edge, since the block
4305 profiler needs to take this into account in order to solve the minimal
4306 spanning tree in the case that the call doesn't return.
4307
4308 Handle this by adding a dummy instruction in a new last basic block. */
4309 if (check_last_block)
4310 {
4311 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
4312 rtx insn = BB_END (bb);
4313
4314 /* Back up past insns that must be kept in the same block as a call. */
4315 while (insn != BB_HEAD (bb)
4316 && keep_with_call_p (insn))
4317 insn = PREV_INSN (insn);
4318
4319 if (need_fake_edge_p (insn))
4320 {
4321 edge e;
4322
9ff3d2de
JL
4323 e = find_edge (bb, EXIT_BLOCK_PTR);
4324 if (e)
4325 {
c41c1387 4326 insert_insn_on_edge (gen_use (const0_rtx), e);
9ff3d2de
JL
4327 commit_edge_insertions ();
4328 }
6de9cd9a
DN
4329 }
4330 }
4331
4332 /* Now add fake edges to the function exit for any non constant
4333 calls since there is no way that we can determine if they will
4334 return or not... */
4335
24bd1a0b 4336 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
6de9cd9a
DN
4337 {
4338 basic_block bb = BASIC_BLOCK (i);
4339 rtx insn;
4340 rtx prev_insn;
4341
4342 if (!bb)
4343 continue;
4344
d7c028c0 4345 if (blocks && !bitmap_bit_p (blocks, i))
6de9cd9a
DN
4346 continue;
4347
4348 for (insn = BB_END (bb); ; insn = prev_insn)
4349 {
4350 prev_insn = PREV_INSN (insn);
4351 if (need_fake_edge_p (insn))
4352 {
4353 edge e;
4354 rtx split_at_insn = insn;
4355
4356 /* Don't split the block between a call and an insn that should
c22cacf3 4357 remain in the same block as the call. */
4b4bf941 4358 if (CALL_P (insn))
6de9cd9a
DN
4359 while (split_at_insn != BB_END (bb)
4360 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4361 split_at_insn = NEXT_INSN (split_at_insn);
4362
4363 /* The handling above of the final block before the epilogue
c22cacf3 4364 should be enough to verify that there is no edge to the exit
6de9cd9a
DN
4365 block in CFG already. Calling make_edge in such case would
4366 cause us to mark that edge as fake and remove it later. */
4367
4368#ifdef ENABLE_CHECKING
4369 if (split_at_insn == BB_END (bb))
628f6a4e 4370 {
9ff3d2de
JL
4371 e = find_edge (bb, EXIT_BLOCK_PTR);
4372 gcc_assert (e == NULL);
628f6a4e 4373 }
6de9cd9a
DN
4374#endif
4375
4376 /* Note that the following may create a new basic block
4377 and renumber the existing basic blocks. */
4378 if (split_at_insn != BB_END (bb))
4379 {
4380 e = split_block (bb, split_at_insn);
4381 if (e)
4382 blocks_split++;
4383 }
4384
4385 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
4386 }
4387
4388 if (insn == BB_HEAD (bb))
4389 break;
4390 }
4391 }
4392
4393 if (blocks_split)
4394 verify_flow_info ();
4395
4396 return blocks_split;
4397}
4398
1cb7dfc3 4399/* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
315682fb 4400 the conditional branch target, SECOND_HEAD should be the fall-thru
1cb7dfc3
MH
4401 there is no need to handle this here the loop versioning code handles
4402 this. the reason for SECON_HEAD is that it is needed for condition
4403 in trees, and this should be of the same type since it is a hook. */
4404static void
4405rtl_lv_add_condition_to_bb (basic_block first_head ,
c22cacf3
MS
4406 basic_block second_head ATTRIBUTE_UNUSED,
4407 basic_block cond_bb, void *comp_rtx)
1cb7dfc3
MH
4408{
4409 rtx label, seq, jump;
4410 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4411 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4412 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4413 enum machine_mode mode;
4414
4415
4416 label = block_label (first_head);
4417 mode = GET_MODE (op0);
4418 if (mode == VOIDmode)
4419 mode = GET_MODE (op1);
4420
4421 start_sequence ();
4422 op0 = force_operand (op0, NULL_RTX);
4423 op1 = force_operand (op1, NULL_RTX);
4424 do_compare_rtx_and_jump (op0, op1, comp, 0,
40e90eac 4425 mode, NULL_RTX, NULL_RTX, label, -1);
1cb7dfc3
MH
4426 jump = get_last_insn ();
4427 JUMP_LABEL (jump) = label;
4428 LABEL_NUSES (label)++;
4429 seq = get_insns ();
4430 end_sequence ();
4431
4432 /* Add the new cond , in the new head. */
4433 emit_insn_after(seq, BB_END(cond_bb));
4434}
4435
4436
4437/* Given a block B with unconditional branch at its end, get the
4438 store the return the branch edge and the fall-thru edge in
4439 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4440static void
4441rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4442 edge *fallthru_edge)
4443{
4444 edge e = EDGE_SUCC (b, 0);
4445
4446 if (e->flags & EDGE_FALLTHRU)
4447 {
4448 *fallthru_edge = e;
4449 *branch_edge = EDGE_SUCC (b, 1);
4450 }
4451 else
4452 {
4453 *branch_edge = e;
4454 *fallthru_edge = EDGE_SUCC (b, 1);
4455 }
4456}
4457
5e2d947c
JH
4458void
4459init_rtl_bb_info (basic_block bb)
4460{
bcc708fc
MM
4461 gcc_assert (!bb->il.x.rtl);
4462 bb->il.x.head_ = NULL;
4463 bb->il.x.rtl = ggc_alloc_cleared_rtl_bb_info ();
5e2d947c
JH
4464}
4465
14fa2cc0
ZD
4466/* Returns true if it is possible to remove edge E by redirecting
4467 it to the destination of the other edge from E->src. */
4468
4469static bool
9678086d 4470rtl_can_remove_branch_p (const_edge e)
14fa2cc0 4471{
9678086d
KG
4472 const_basic_block src = e->src;
4473 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
4474 const_rtx insn = BB_END (src), set;
14fa2cc0
ZD
4475
4476 /* The conditions are taken from try_redirect_by_replacing_jump. */
4477 if (target == EXIT_BLOCK_PTR)
4478 return false;
4479
4480 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4481 return false;
4482
4483 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
4484 || BB_PARTITION (src) != BB_PARTITION (target))
4485 return false;
4486
4487 if (!onlyjump_p (insn)
4488 || tablejump_p (insn, NULL, NULL))
4489 return false;
4490
4491 set = single_set (insn);
4492 if (!set || side_effects_p (set))
4493 return false;
4494
4495 return true;
4496}
4497
ffe14686
AM
4498static basic_block
4499rtl_duplicate_bb (basic_block bb)
4500{
4501 bb = cfg_layout_duplicate_bb (bb);
4502 bb->aux = NULL;
4503 return bb;
4504}
4505
aa4723d7
SB
4506/* Do book-keeping of basic block BB for the profile consistency checker.
4507 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
4508 then do post-pass accounting. Store the counting in RECORD. */
4509static void
4510rtl_account_profile_record (basic_block bb, int after_pass,
4511 struct profile_record *record)
4512{
4513 rtx insn;
4514 FOR_BB_INSNS (bb, insn)
4515 if (INSN_P (insn))
4516 {
4517 record->size[after_pass]
4518 += insn_rtx_cost (PATTERN (insn), false);
4519 if (profile_status == PROFILE_READ)
4520 record->time[after_pass]
4521 += insn_rtx_cost (PATTERN (insn), true) * bb->count;
4522 else if (profile_status == PROFILE_GUESSED)
4523 record->time[after_pass]
4524 += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
4525 }
4526}
4527
9ee634e3
JH
4528/* Implementation of CFG manipulation for linearized RTL. */
4529struct cfg_hooks rtl_cfg_hooks = {
f470c378 4530 "rtl",
9ee634e3 4531 rtl_verify_flow_info,
10e9fecc 4532 rtl_dump_bb,
2c895bd1 4533 rtl_dump_bb_for_graph,
bc35512f 4534 rtl_create_basic_block,
9ee634e3
JH
4535 rtl_redirect_edge_and_branch,
4536 rtl_redirect_edge_and_branch_force,
14fa2cc0 4537 rtl_can_remove_branch_p,
9ee634e3
JH
4538 rtl_delete_block,
4539 rtl_split_block,
f470c378 4540 rtl_move_block_after,
bc35512f
JH
4541 rtl_can_merge_blocks, /* can_merge_blocks_p */
4542 rtl_merge_blocks,
6de9cd9a
DN
4543 rtl_predict_edge,
4544 rtl_predicted_by_p,
ffe14686
AM
4545 cfg_layout_can_duplicate_bb_p,
4546 rtl_duplicate_bb,
f470c378
ZD
4547 rtl_split_edge,
4548 rtl_make_forwarder_block,
6de9cd9a 4549 rtl_tidy_fallthru_edge,
cf103ca4 4550 rtl_force_nonfallthru,
6de9cd9a
DN
4551 rtl_block_ends_with_call_p,
4552 rtl_block_ends_with_condjump_p,
d9d4706f
KH
4553 rtl_flow_call_edges_add,
4554 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
4555 NULL, /* execute_on_shrinking_pred */
4556 NULL, /* duplicate loop for trees */
4557 NULL, /* lv_add_condition_to_bb */
4558 NULL, /* lv_adjust_loop_header_phi*/
4559 NULL, /* extract_cond_bb_edges */
df92c640
SB
4560 NULL, /* flush_pending_stmts */
4561 rtl_block_empty_p, /* block_empty_p */
4562 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
aa4723d7 4563 rtl_account_profile_record,
9ee634e3
JH
4564};
4565
4566/* Implementation of CFG manipulation for cfg layout RTL, where
4567 basic block connected via fallthru edges does not have to be adjacent.
4568 This representation will hopefully become the default one in future
4569 version of the compiler. */
6de9cd9a 4570
9ee634e3 4571struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
f470c378 4572 "cfglayout mode",
bc35512f 4573 rtl_verify_flow_info_1,
10e9fecc 4574 rtl_dump_bb,
2c895bd1 4575 rtl_dump_bb_for_graph,
bc35512f 4576 cfg_layout_create_basic_block,
9ee634e3
JH
4577 cfg_layout_redirect_edge_and_branch,
4578 cfg_layout_redirect_edge_and_branch_force,
14fa2cc0 4579 rtl_can_remove_branch_p,
9ee634e3
JH
4580 cfg_layout_delete_block,
4581 cfg_layout_split_block,
f470c378 4582 rtl_move_block_after,
bc35512f
JH
4583 cfg_layout_can_merge_blocks_p,
4584 cfg_layout_merge_blocks,
6de9cd9a
DN
4585 rtl_predict_edge,
4586 rtl_predicted_by_p,
4587 cfg_layout_can_duplicate_bb_p,
4588 cfg_layout_duplicate_bb,
f470c378
ZD
4589 cfg_layout_split_edge,
4590 rtl_make_forwarder_block,
cf103ca4
EB
4591 NULL, /* tidy_fallthru_edge */
4592 rtl_force_nonfallthru,
6de9cd9a
DN
4593 rtl_block_ends_with_call_p,
4594 rtl_block_ends_with_condjump_p,
d9d4706f
KH
4595 rtl_flow_call_edges_add,
4596 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
4597 NULL, /* execute_on_shrinking_pred */
4598 duplicate_loop_to_header_edge, /* duplicate loop for trees */
4599 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
4600 NULL, /* lv_adjust_loop_header_phi*/
4601 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
df92c640
SB
4602 NULL, /* flush_pending_stmts */
4603 rtl_block_empty_p, /* block_empty_p */
4604 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
aa4723d7 4605 rtl_account_profile_record,
9ee634e3 4606};
78bde837
SB
4607
4608#include "gt-cfgrtl.h"