]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgrtl.c
Add new file gcc/testsuite/jit.dg/test-constants.c erroneously omitted from r219401
[thirdparty/gcc.git] / gcc / cfgrtl.c
CommitLineData
ca6c03ca 1/* Control flow graph manipulation code for GNU compiler.
5624e564 2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
ca6c03ca
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
ca6c03ca
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ca6c03ca 19
5f0d2358
RK
20/* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
ca6c03ca
JH
22
23 Available functionality:
bc35512f 24 - Basic CFG/RTL manipulation API documented in cfghooks.h
5f0d2358 25 - CFG-aware instruction chain manipulation
ca6c03ca 26 delete_insn, delete_insn_chain
bc35512f
JH
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
ba5e9aca
EB
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
bc35512f
JH
33
34 Functions not supposed for generic use:
5f0d2358 35 - Infrastructure to determine quickly basic block for insn
ca6c03ca 36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
5f0d2358 37 - Edge redirection with updating and optimizing of insn chain
bc35512f 38 block_label, tidy_fallthru_edge, force_nonfallthru */
ca6c03ca
JH
39\f
40#include "config.h"
41#include "system.h"
4977bab6
ZW
42#include "coretypes.h"
43#include "tm.h"
40e23961
MC
44#include "hash-set.h"
45#include "machmode.h"
46#include "vec.h"
47#include "double-int.h"
48#include "input.h"
49#include "alias.h"
50#include "symtab.h"
51#include "wide-int.h"
52#include "inchash.h"
ca6c03ca 53#include "tree.h"
ca6c03ca 54#include "hard-reg-set.h"
60393bbc 55#include "predict.h"
83685514 56#include "hashtab.h"
ca6c03ca 57#include "function.h"
60393bbc
AM
58#include "dominance.h"
59#include "cfg.h"
60#include "cfgrtl.h"
61#include "cfganal.h"
62#include "cfgbuild.h"
63#include "cfgcleanup.h"
64#include "basic-block.h"
65#include "bb-reorder.h"
66#include "regs.h"
67#include "flags.h"
ca6c03ca 68#include "except.h"
0cbd9993 69#include "rtl-error.h"
ca6c03ca
JH
70#include "tm_p.h"
71#include "obstack.h"
2a34bece 72#include "insn-attr.h"
0a2ed1f1 73#include "insn-config.h"
ff25ef99 74#include "expr.h"
9fb32434 75#include "target.h"
677f3fa8 76#include "common/common-target.h"
1cb7dfc3 77#include "cfgloop.h"
5e2d947c 78#include "ggc.h"
ef330312 79#include "tree-pass.h"
6fb5fa3c 80#include "df.h"
ca6c03ca 81
78bde837
SB
82/* Holds the interesting leading and trailing notes for the function.
83 Only applicable if the CFG is in cfglayout mode. */
1130d5e3 84static GTY(()) rtx_insn *cfg_layout_function_footer;
9152e0aa 85static GTY(()) rtx_insn *cfg_layout_function_header;
78bde837 86
8879d71c 87static rtx_insn *skip_insns_after_block (basic_block);
78bde837
SB
88static void record_effective_endpoints (void);
89static rtx label_for_bb (basic_block);
90static void fixup_reorder_chain (void);
91
92void verify_insn_chain (void);
93static void fixup_fallthru_exit_predecessor (void);
8879d71c
DM
94static int can_delete_note_p (const rtx_note *);
95static int can_delete_label_p (const rtx_code_label *);
d329e058 96static basic_block rtl_split_edge (edge);
f470c378 97static bool rtl_move_block_after (basic_block, basic_block);
d329e058 98static int rtl_verify_flow_info (void);
f470c378 99static basic_block cfg_layout_split_block (basic_block, void *);
6de9cd9a 100static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
d329e058
AJ
101static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
102static void cfg_layout_delete_block (basic_block);
103static void rtl_delete_block (basic_block);
104static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
6de9cd9a 105static edge rtl_redirect_edge_and_branch (edge, basic_block);
f470c378 106static basic_block rtl_split_block (basic_block, void *);
a315c44c 107static void rtl_dump_bb (FILE *, basic_block, int, int);
d329e058 108static int rtl_verify_flow_info_1 (void);
f470c378 109static void rtl_make_forwarder_block (edge);
ca6c03ca
JH
110\f
111/* Return true if NOTE is not one of the ones that must be kept paired,
5f0d2358 112 so that we may simply delete it. */
ca6c03ca
JH
113
114static int
8879d71c 115can_delete_note_p (const rtx_note *note)
ca6c03ca 116{
cd9c1ca8
RH
117 switch (NOTE_KIND (note))
118 {
119 case NOTE_INSN_DELETED:
120 case NOTE_INSN_BASIC_BLOCK:
121 case NOTE_INSN_EPILOGUE_BEG:
122 return true;
123
124 default:
125 return false;
126 }
ca6c03ca
JH
127}
128
129/* True if a given label can be deleted. */
130
131static int
8879d71c 132can_delete_label_p (const rtx_code_label *label)
ca6c03ca 133{
5f0d2358
RK
134 return (!LABEL_PRESERVE_P (label)
135 /* User declared labels must be preserved. */
136 && LABEL_NAME (label) == 0
610d2478 137 && !in_expr_list_p (forced_labels, label));
ca6c03ca
JH
138}
139
03fbe718 140/* Delete INSN by patching it out. */
ca6c03ca 141
03fbe718 142void
1130d5e3 143delete_insn (rtx uncast_insn)
ca6c03ca 144{
1130d5e3 145 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
ca6c03ca
JH
146 rtx note;
147 bool really_delete = true;
148
4b4bf941 149 if (LABEL_P (insn))
ca6c03ca
JH
150 {
151 /* Some labels can't be directly removed from the INSN chain, as they
c22cacf3
MS
152 might be references via variables, constant pool etc.
153 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
8879d71c 154 if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
ca6c03ca
JH
155 {
156 const char *name = LABEL_NAME (insn);
5e5df392 157 basic_block bb = BLOCK_FOR_INSN (insn);
8879d71c 158 rtx_insn *bb_note = NEXT_INSN (insn);
ca6c03ca
JH
159
160 really_delete = false;
161 PUT_CODE (insn, NOTE);
a38e7aa5 162 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
6773e15f 163 NOTE_DELETED_LABEL_NAME (insn) = name;
03fbe718 164
0f33baa9 165 /* If the note following the label starts a basic block, and the
5e5df392 166 label is a member of the same basic block, interchange the two. */
0f33baa9
TV
167 if (bb_note != NULL_RTX
168 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
5e5df392
TV
169 && bb != NULL
170 && bb == BLOCK_FOR_INSN (bb_note))
03fbe718
TV
171 {
172 reorder_insns_nobb (insn, insn, bb_note);
1130d5e3 173 BB_HEAD (bb) = bb_note;
03fbe718 174 if (BB_END (bb) == bb_note)
1130d5e3 175 BB_END (bb) = insn;
03fbe718 176 }
ca6c03ca 177 }
5f0d2358 178
b5241a5a 179 remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
ca6c03ca
JH
180 }
181
182 if (really_delete)
183 {
cda94cbb 184 /* If this insn has already been deleted, something is very wrong. */
4654c0cf 185 gcc_assert (!insn->deleted ());
1f397f45
SB
186 if (INSN_P (insn))
187 df_insn_delete (insn);
ca6c03ca 188 remove_insn (insn);
4654c0cf 189 insn->set_deleted ();
ca6c03ca
JH
190 }
191
192 /* If deleting a jump, decrement the use count of the label. Deleting
193 the label itself should happen in the normal course of block merging. */
cf7c4aa6 194 if (JUMP_P (insn))
9295a326 195 {
cf7c4aa6
HPN
196 if (JUMP_LABEL (insn)
197 && LABEL_P (JUMP_LABEL (insn)))
198 LABEL_NUSES (JUMP_LABEL (insn))--;
199
200 /* If there are more targets, remove them too. */
201 while ((note
202 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
4b4bf941 203 && LABEL_P (XEXP (note, 0)))
9295a326
JZ
204 {
205 LABEL_NUSES (XEXP (note, 0))--;
206 remove_note (insn, note);
207 }
208 }
ca6c03ca 209
cf7c4aa6
HPN
210 /* Also if deleting any insn that references a label as an operand. */
211 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
212 && LABEL_P (XEXP (note, 0)))
213 {
214 LABEL_NUSES (XEXP (note, 0))--;
215 remove_note (insn, note);
216 }
217
95c43227 218 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
ca6c03ca 219 {
95c43227
DM
220 rtvec vec = table->get_labels ();
221 int len = GET_NUM_ELEM (vec);
ca6c03ca
JH
222 int i;
223
224 for (i = 0; i < len; i++)
a124fcda 225 {
95c43227 226 rtx label = XEXP (RTVEC_ELT (vec, i), 0);
a124fcda
RH
227
228 /* When deleting code in bulk (e.g. removing many unreachable
229 blocks) we can delete a label that's a target of the vector
230 before deleting the vector itself. */
4b4bf941 231 if (!NOTE_P (label))
a124fcda
RH
232 LABEL_NUSES (label)--;
233 }
ca6c03ca 234 }
ca6c03ca
JH
235}
236
3dec4024 237/* Like delete_insn but also purge dead edges from BB. */
9ed7b221 238
03fbe718 239void
373fc786 240delete_insn_and_edges (rtx_insn *insn)
3dec4024 241{
3dec4024
JH
242 bool purge = false;
243
ba4f7968 244 if (INSN_P (insn)
3dec4024 245 && BLOCK_FOR_INSN (insn)
a813c111 246 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
3dec4024 247 purge = true;
03fbe718 248 delete_insn (insn);
3dec4024
JH
249 if (purge)
250 purge_dead_edges (BLOCK_FOR_INSN (insn));
3dec4024
JH
251}
252
ca6c03ca 253/* Unlink a chain of insns between START and FINISH, leaving notes
a7b87f73
ZD
254 that must be paired. If CLEAR_BB is true, we set bb field for
255 insns that cannot be removed to NULL. */
ca6c03ca
JH
256
257void
a7b87f73 258delete_insn_chain (rtx start, rtx finish, bool clear_bb)
ca6c03ca 259{
8879d71c 260 rtx_insn *prev, *current;
ca6c03ca 261
5f0d2358
RK
262 /* Unchain the insns one by one. It would be quicker to delete all of these
263 with a single unchaining, rather than one at a time, but we need to keep
264 the NOTE's. */
8879d71c 265 current = safe_as_a <rtx_insn *> (finish);
ca6c03ca
JH
266 while (1)
267 {
03fbe718 268 prev = PREV_INSN (current);
8879d71c 269 if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
ca6c03ca
JH
270 ;
271 else
03fbe718 272 delete_insn (current);
ca6c03ca 273
4654c0cf 274 if (clear_bb && !current->deleted ())
03fbe718 275 set_block_for_insn (current, NULL);
a7b87f73 276
03fbe718 277 if (current == start)
ca6c03ca 278 break;
03fbe718 279 current = prev;
ca6c03ca
JH
280 }
281}
282\f
5f0d2358
RK
283/* Create a new basic block consisting of the instructions between HEAD and END
284 inclusive. This function is designed to allow fast BB construction - reuses
285 the note and basic block struct in BB_NOTE, if any and do not grow
286 BASIC_BLOCK chain and should be used directly only by CFG construction code.
287 END can be NULL in to create new empty basic block before HEAD. Both END
918ed612
ZD
288 and HEAD can be NULL to create basic block at the end of INSN chain.
289 AFTER is the basic block we should be put after. */
ca6c03ca
JH
290
291basic_block
e00022e9 292create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
66e8df53 293 basic_block after)
ca6c03ca
JH
294{
295 basic_block bb;
296
297 if (bb_note
ca6c03ca
JH
298 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
299 && bb->aux == NULL)
300 {
301 /* If we found an existing note, thread it back onto the chain. */
302
fee3e72c 303 rtx_insn *after;
ca6c03ca 304
4b4bf941 305 if (LABEL_P (head))
ca6c03ca
JH
306 after = head;
307 else
308 {
309 after = PREV_INSN (head);
310 head = bb_note;
311 }
312
313 if (after != bb_note && NEXT_INSN (after) != bb_note)
ba4f7968 314 reorder_insns_nobb (bb_note, bb_note, after);
ca6c03ca
JH
315 }
316 else
317 {
318 /* Otherwise we must create a note and a basic block structure. */
319
320 bb = alloc_block ();
321
5e2d947c 322 init_rtl_bb_info (bb);
ca6c03ca 323 if (!head && !end)
5f0d2358
RK
324 head = end = bb_note
325 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
4b4bf941 326 else if (LABEL_P (head) && end)
ca6c03ca
JH
327 {
328 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
329 if (head == end)
330 end = bb_note;
331 }
332 else
333 {
334 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
335 head = bb_note;
336 if (!end)
337 end = head;
338 }
5f0d2358 339
ca6c03ca
JH
340 NOTE_BASIC_BLOCK (bb_note) = bb;
341 }
342
343 /* Always include the bb note in the block. */
344 if (NEXT_INSN (end) == bb_note)
345 end = bb_note;
346
1130d5e3
DM
347 BB_HEAD (bb) = head;
348 BB_END (bb) = end;
8b1c6fd7 349 bb->index = last_basic_block_for_fn (cfun)++;
5e2d947c 350 bb->flags = BB_NEW | BB_RTL;
918ed612 351 link_block (bb, after);
557c4b49 352 SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
6fb5fa3c 353 df_bb_refs_record (bb->index, false);
ba4f7968 354 update_bb_for_insn (bb);
076c7ab8 355 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
ca6c03ca
JH
356
357 /* Tag the block so that we know it has been used when considering
358 other basic block notes. */
359 bb->aux = bb;
360
361 return bb;
362}
363
5f0d2358 364/* Create new basic block consisting of instructions in between HEAD and END
ffe14686
AM
365 and place it to the BB chain after block AFTER. END can be NULL to
366 create a new empty basic block before HEAD. Both END and HEAD can be
367 NULL to create basic block at the end of INSN chain. */
ca6c03ca 368
bc35512f
JH
369static basic_block
370rtl_create_basic_block (void *headp, void *endp, basic_block after)
ca6c03ca 371{
e00022e9
DM
372 rtx_insn *head = (rtx_insn *) headp;
373 rtx_insn *end = (rtx_insn *) endp;
ca6c03ca 374 basic_block bb;
0b17ab2f 375
7eca0767 376 /* Grow the basic block array if needed. */
8b1c6fd7
DM
377 if ((size_t) last_basic_block_for_fn (cfun)
378 >= basic_block_info_for_fn (cfun)->length ())
7eca0767 379 {
8b1c6fd7
DM
380 size_t new_size =
381 (last_basic_block_for_fn (cfun)
382 + (last_basic_block_for_fn (cfun) + 3) / 4);
c5f05f68 383 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
7eca0767 384 }
0b17ab2f 385
0cae8d31 386 n_basic_blocks_for_fn (cfun)++;
ca6c03ca 387
852c6ec7 388 bb = create_basic_block_structure (head, end, NULL, after);
ca6c03ca
JH
389 bb->aux = NULL;
390 return bb;
391}
bc35512f
JH
392
393static basic_block
394cfg_layout_create_basic_block (void *head, void *end, basic_block after)
395{
396 basic_block newbb = rtl_create_basic_block (head, end, after);
397
bc35512f
JH
398 return newbb;
399}
ca6c03ca
JH
400\f
401/* Delete the insns in a (non-live) block. We physically delete every
402 non-deleted-note insn, and update the flow graph appropriately.
403
404 Return nonzero if we deleted an exception handler. */
405
406/* ??? Preserving all such notes strikes me as wrong. It would be nice
407 to post-process the stream to remove empty blocks, loops, ranges, etc. */
408
f0fda11c 409static void
d329e058 410rtl_delete_block (basic_block b)
ca6c03ca 411{
8879d71c 412 rtx_insn *insn, *end;
ca6c03ca
JH
413
414 /* If the head of this block is a CODE_LABEL, then it might be the
f39e46ba
SB
415 label for an exception handler which can't be reached. We need
416 to remove the label from the exception_handler_label list. */
a813c111 417 insn = BB_HEAD (b);
ca6c03ca 418
96370780 419 end = get_last_bb_insn (b);
ca6c03ca
JH
420
421 /* Selectively delete the entire chain. */
1130d5e3 422 BB_HEAD (b) = NULL;
a7b87f73
ZD
423 delete_insn_chain (insn, end, true);
424
6fb5fa3c
DB
425
426 if (dump_file)
427 fprintf (dump_file, "deleting block %d\n", b->index);
428 df_bb_delete (b->index);
ca6c03ca
JH
429}
430\f
852c6ec7 431/* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
ca6c03ca
JH
432
433void
d329e058 434compute_bb_for_insn (void)
ca6c03ca 435{
e0082a72 436 basic_block bb;
ca6c03ca 437
11cd3bed 438 FOR_EACH_BB_FN (bb, cfun)
ca6c03ca 439 {
8879d71c
DM
440 rtx_insn *end = BB_END (bb);
441 rtx_insn *insn;
ca6c03ca 442
a813c111 443 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
ca6c03ca 444 {
ba4f7968 445 BLOCK_FOR_INSN (insn) = bb;
ca6c03ca
JH
446 if (insn == end)
447 break;
ca6c03ca
JH
448 }
449 }
450}
451
452/* Release the basic_block_for_insn array. */
453
c2924966 454unsigned int
d329e058 455free_bb_for_insn (void)
ca6c03ca 456{
8879d71c 457 rtx_insn *insn;
ba4f7968 458 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 459 if (!BARRIER_P (insn))
ba4f7968 460 BLOCK_FOR_INSN (insn) = NULL;
c2924966 461 return 0;
ca6c03ca
JH
462}
463
27a4cd48
DM
464namespace {
465
466const pass_data pass_data_free_cfg =
467{
468 RTL_PASS, /* type */
469 "*free_cfg", /* name */
470 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
471 TV_NONE, /* tv_id */
472 0, /* properties_required */
473 0, /* properties_provided */
474 PROP_cfg, /* properties_destroyed */
475 0, /* todo_flags_start */
476 0, /* todo_flags_finish */
ef330312
PB
477};
478
27a4cd48
DM
479class pass_free_cfg : public rtl_opt_pass
480{
481public:
c3284718
RS
482 pass_free_cfg (gcc::context *ctxt)
483 : rtl_opt_pass (pass_data_free_cfg, ctxt)
27a4cd48
DM
484 {}
485
486 /* opt_pass methods: */
be55bfe6 487 virtual unsigned int execute (function *);
27a4cd48
DM
488
489}; // class pass_free_cfg
490
be55bfe6
TS
491unsigned int
492pass_free_cfg::execute (function *)
493{
494#ifdef DELAY_SLOTS
495 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
496 valid at that point so it would be too late to call df_analyze. */
497 if (optimize > 0 && flag_delayed_branch)
498 {
499 df_note_add_problem ();
500 df_analyze ();
501 }
502#endif
503
504 if (crtl->has_bb_partition)
505 insert_section_boundary_note ();
506
507 free_bb_for_insn ();
508 return 0;
509}
510
27a4cd48
DM
511} // anon namespace
512
513rtl_opt_pass *
514make_pass_free_cfg (gcc::context *ctxt)
515{
516 return new pass_free_cfg (ctxt);
517}
518
91278841 519/* Return RTX to emit after when we want to emit code on the entry of function. */
8a2e8325 520rtx_insn *
91278841
AP
521entry_of_function (void)
522{
0cae8d31 523 return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
fefa31b5 524 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
91278841
AP
525}
526
11b904a1
BS
527/* Emit INSN at the entry point of the function, ensuring that it is only
528 executed once per function. */
529void
530emit_insn_at_entry (rtx insn)
531{
fefa31b5 532 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
11b904a1 533 edge e = ei_safe_edge (ei);
5419bc7f 534 gcc_assert (e->flags & EDGE_FALLTHRU);
11b904a1
BS
535
536 insert_insn_on_edge (insn, e);
537 commit_edge_insertions ();
538}
539
6c3d0e31 540/* Update BLOCK_FOR_INSN of insns between BEGIN and END
b8698a0f 541 (or BARRIER if found) and notify df of the bb change.
6c3d0e31
SP
542 The insn chain range is inclusive
543 (i.e. both BEGIN and END will be updated. */
ca6c03ca 544
6c3d0e31 545static void
8879d71c 546update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
ca6c03ca 547{
8879d71c 548 rtx_insn *insn;
ca6c03ca 549
6c3d0e31
SP
550 end = NEXT_INSN (end);
551 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
63642d5a
AO
552 if (!BARRIER_P (insn))
553 df_insn_change_bb (insn, bb);
ca6c03ca 554}
6c3d0e31
SP
555
556/* Update BLOCK_FOR_INSN of insns in BB to BB,
557 and notify df of the change. */
558
559void
560update_bb_for_insn (basic_block bb)
561{
562 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
563}
564
532aafad
SB
565\f
566/* Like active_insn_p, except keep the return value clobber around
567 even after reload. */
568
569static bool
8879d71c 570flow_active_insn_p (const rtx_insn *insn)
532aafad
SB
571{
572 if (active_insn_p (insn))
573 return true;
574
575 /* A clobber of the function return value exists for buggy
576 programs that fail to return a value. Its effect is to
577 keep the return value from being live across the entire
578 function. If we allow it to be skipped, we introduce the
579 possibility for register lifetime confusion. */
580 if (GET_CODE (PATTERN (insn)) == CLOBBER
581 && REG_P (XEXP (PATTERN (insn), 0))
582 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
583 return true;
584
585 return false;
586}
587
588/* Return true if the block has no effect and only forwards control flow to
589 its single destination. */
532aafad
SB
590
591bool
2407343c 592contains_no_active_insn_p (const_basic_block bb)
532aafad 593{
8879d71c 594 rtx_insn *insn;
532aafad 595
fefa31b5 596 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
532aafad
SB
597 || !single_succ_p (bb))
598 return false;
599
2407343c
JR
600 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
601 if (INSN_P (insn) && flow_active_insn_p (insn))
602 return false;
603
604 return (!INSN_P (insn)
605 || (JUMP_P (insn) && simplejump_p (insn))
606 || !flow_active_insn_p (insn));
607}
608
609/* Likewise, but protect loop latches, headers and preheaders. */
610/* FIXME: Make this a cfg hook. */
611
612bool
613forwarder_block_p (const_basic_block bb)
614{
615 if (!contains_no_active_insn_p (bb))
616 return false;
617
532aafad
SB
618 /* Protect loop latches, headers and preheaders. */
619 if (current_loops)
620 {
621 basic_block dest;
622 if (bb->loop_father->header == bb)
623 return false;
624 dest = EDGE_SUCC (bb, 0)->dest;
625 if (dest->loop_father->header == dest)
626 return false;
627 }
628
2407343c 629 return true;
532aafad
SB
630}
631
632/* Return nonzero if we can reach target from src by falling through. */
d7b6661b 633/* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
532aafad
SB
634
635bool
636can_fallthru (basic_block src, basic_block target)
637{
8879d71c
DM
638 rtx_insn *insn = BB_END (src);
639 rtx_insn *insn2;
532aafad
SB
640 edge e;
641 edge_iterator ei;
642
fefa31b5 643 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
532aafad
SB
644 return true;
645 if (src->next_bb != target)
d7b6661b
SB
646 return false;
647
648 /* ??? Later we may add code to move jump tables offline. */
649 if (tablejump_p (insn, NULL, NULL))
650 return false;
651
532aafad 652 FOR_EACH_EDGE (e, ei, src->succs)
fefa31b5 653 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
532aafad 654 && e->flags & EDGE_FALLTHRU)
d7b6661b 655 return false;
532aafad
SB
656
657 insn2 = BB_HEAD (target);
d7b6661b 658 if (!active_insn_p (insn2))
532aafad
SB
659 insn2 = next_active_insn (insn2);
660
532aafad
SB
661 return next_active_insn (insn) == insn2;
662}
663
664/* Return nonzero if we could reach target from src by falling through,
665 if the target was made adjacent. If we already have a fall-through
666 edge to the exit block, we can't do that. */
667static bool
668could_fall_through (basic_block src, basic_block target)
669{
670 edge e;
671 edge_iterator ei;
672
fefa31b5 673 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
532aafad
SB
674 return true;
675 FOR_EACH_EDGE (e, ei, src->succs)
fefa31b5 676 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
532aafad
SB
677 && e->flags & EDGE_FALLTHRU)
678 return 0;
679 return true;
680}
ca6c03ca 681\f
ef2be249 682/* Return the NOTE_INSN_BASIC_BLOCK of BB. */
ed270997 683rtx_note *
ef2be249
RS
684bb_note (basic_block bb)
685{
8879d71c 686 rtx_insn *note;
ef2be249
RS
687
688 note = BB_HEAD (bb);
689 if (LABEL_P (note))
690 note = NEXT_INSN (note);
691
692 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
ed270997 693 return as_a <rtx_note *> (note);
ef2be249
RS
694}
695
6fb5fa3c
DB
696/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
697 note associated with the BLOCK. */
698
8879d71c 699static rtx_insn *
6fb5fa3c
DB
700first_insn_after_basic_block_note (basic_block block)
701{
8879d71c 702 rtx_insn *insn;
6fb5fa3c
DB
703
704 /* Get the first instruction in the block. */
705 insn = BB_HEAD (block);
706
707 if (insn == NULL_RTX)
8879d71c 708 return NULL;
6fb5fa3c
DB
709 if (LABEL_P (insn))
710 insn = NEXT_INSN (insn);
711 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
712
713 return NEXT_INSN (insn);
714}
715
2b9c63a2
FY
716/* Creates a new basic block just after basic block BB by splitting
717 everything after specified instruction INSNP. */
ca6c03ca 718
f470c378 719static basic_block
d329e058 720rtl_split_block (basic_block bb, void *insnp)
ca6c03ca
JH
721{
722 basic_block new_bb;
8879d71c 723 rtx_insn *insn = (rtx_insn *) insnp;
f470c378 724 edge e;
628f6a4e 725 edge_iterator ei;
ca6c03ca 726
f470c378
ZD
727 if (!insn)
728 {
729 insn = first_insn_after_basic_block_note (bb);
730
731 if (insn)
b5b8b0ac 732 {
8879d71c 733 rtx_insn *next = insn;
b5b8b0ac
AO
734
735 insn = PREV_INSN (insn);
736
737 /* If the block contains only debug insns, insn would have
738 been NULL in a non-debug compilation, and then we'd end
739 up emitting a DELETED note. For -fcompare-debug
740 stability, emit the note too. */
741 if (insn != BB_END (bb)
742 && DEBUG_INSN_P (next)
743 && DEBUG_INSN_P (BB_END (bb)))
744 {
745 while (next != BB_END (bb) && DEBUG_INSN_P (next))
746 next = NEXT_INSN (next);
747
748 if (next == BB_END (bb))
749 emit_note_after (NOTE_INSN_DELETED, next);
750 }
751 }
f470c378
ZD
752 else
753 insn = get_last_insn ();
754 }
755
756 /* We probably should check type of the insn so that we do not create
757 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
758 bother. */
759 if (insn == BB_END (bb))
760 emit_note_after (NOTE_INSN_DELETED, insn);
ca6c03ca
JH
761
762 /* Create the new basic block. */
a813c111 763 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
076c7ab8 764 BB_COPY_PARTITION (new_bb, bb);
1130d5e3 765 BB_END (bb) = insn;
ca6c03ca
JH
766
767 /* Redirect the outgoing edges. */
628f6a4e
BE
768 new_bb->succs = bb->succs;
769 bb->succs = NULL;
770 FOR_EACH_EDGE (e, ei, new_bb->succs)
ca6c03ca
JH
771 e->src = new_bb;
772
6fb5fa3c
DB
773 /* The new block starts off being dirty. */
774 df_set_bb_dirty (bb);
f470c378 775 return new_bb;
bc35512f
JH
776}
777
9be94227
EB
778/* Return true if the single edge between blocks A and B is the only place
779 in RTL which holds some unique locus. */
780
781static bool
782unique_locus_on_edge_between_p (basic_block a, basic_block b)
783{
5368224f 784 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
8879d71c 785 rtx_insn *insn, *end;
9be94227 786
2f13f2de 787 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
9be94227
EB
788 return false;
789
790 /* First scan block A backward. */
791 insn = BB_END (a);
792 end = PREV_INSN (BB_HEAD (a));
5368224f 793 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
9be94227
EB
794 insn = PREV_INSN (insn);
795
5368224f 796 if (insn != end && INSN_LOCATION (insn) == goto_locus)
9be94227
EB
797 return false;
798
799 /* Then scan block B forward. */
800 insn = BB_HEAD (b);
801 if (insn)
802 {
803 end = NEXT_INSN (BB_END (b));
804 while (insn != end && !NONDEBUG_INSN_P (insn))
805 insn = NEXT_INSN (insn);
806
5368224f
DC
807 if (insn != end && INSN_HAS_LOCATION (insn)
808 && INSN_LOCATION (insn) == goto_locus)
9be94227
EB
809 return false;
810 }
811
812 return true;
813}
814
815/* If the single edge between blocks A and B is the only place in RTL which
816 holds some unique locus, emit a nop with that locus between the blocks. */
817
818static void
819emit_nop_for_unique_locus_between (basic_block a, basic_block b)
820{
821 if (!unique_locus_on_edge_between_p (a, b))
822 return;
823
1130d5e3 824 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
5368224f 825 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
9be94227
EB
826}
827
ca6c03ca 828/* Blocks A and B are to be merged into a single block A. The insns
bc35512f 829 are already contiguous. */
ca6c03ca 830
bc35512f
JH
831static void
832rtl_merge_blocks (basic_block a, basic_block b)
ca6c03ca 833{
8879d71c
DM
834 rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
835 rtx_insn *del_first = NULL, *del_last = NULL;
836 rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
50a36e42 837 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
ca6c03ca
JH
838 int b_empty = 0;
839
6fb5fa3c 840 if (dump_file)
50a36e42
EB
841 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
842 a->index);
6fb5fa3c 843
b5b8b0ac
AO
844 while (DEBUG_INSN_P (b_end))
845 b_end = PREV_INSN (b_debug_start = b_end);
846
ca6c03ca 847 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 848 if (LABEL_P (b_head))
ca6c03ca
JH
849 {
850 /* Detect basic blocks with nothing but a label. This can happen
851 in particular at the end of a function. */
852 if (b_head == b_end)
853 b_empty = 1;
5f0d2358 854
ca6c03ca
JH
855 del_first = del_last = b_head;
856 b_head = NEXT_INSN (b_head);
857 }
858
5f0d2358
RK
859 /* Delete the basic block note and handle blocks containing just that
860 note. */
ca6c03ca
JH
861 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
862 {
863 if (b_head == b_end)
864 b_empty = 1;
865 if (! del_last)
866 del_first = b_head;
5f0d2358 867
ca6c03ca
JH
868 del_last = b_head;
869 b_head = NEXT_INSN (b_head);
870 }
871
872 /* If there was a jump out of A, delete it. */
4b4bf941 873 if (JUMP_P (a_end))
ca6c03ca 874 {
8879d71c 875 rtx_insn *prev;
ca6c03ca
JH
876
877 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
4b4bf941 878 if (!NOTE_P (prev)
a38e7aa5 879 || NOTE_INSN_BASIC_BLOCK_P (prev)
a813c111 880 || prev == BB_HEAD (a))
ca6c03ca
JH
881 break;
882
883 del_first = a_end;
884
885#ifdef HAVE_cc0
886 /* If this was a conditional jump, we need to also delete
887 the insn that set cc0. */
888 if (only_sets_cc0_p (prev))
889 {
8879d71c 890 rtx_insn *tmp = prev;
5f0d2358 891
ca6c03ca
JH
892 prev = prev_nonnote_insn (prev);
893 if (!prev)
a813c111 894 prev = BB_HEAD (a);
ca6c03ca
JH
895 del_first = tmp;
896 }
897#endif
898
899 a_end = PREV_INSN (del_first);
900 }
4b4bf941 901 else if (BARRIER_P (NEXT_INSN (a_end)))
ca6c03ca
JH
902 del_first = NEXT_INSN (a_end);
903
ca6c03ca
JH
904 /* Delete everything marked above as well as crap that might be
905 hanging out between the two blocks. */
1130d5e3
DM
906 BB_END (a) = a_end;
907 BB_HEAD (b) = b_empty ? NULL : b_head;
a7b87f73 908 delete_insn_chain (del_first, del_last, true);
ca6c03ca 909
8a829274 910 /* When not optimizing and the edge is the only place in RTL which holds
9be94227
EB
911 some unique locus, emit a nop with that locus in between. */
912 if (!optimize)
913 {
914 emit_nop_for_unique_locus_between (a, b);
915 a_end = BB_END (a);
916 }
917
ca6c03ca
JH
918 /* Reassociate the insns of B with A. */
919 if (!b_empty)
920 {
b5b8b0ac 921 update_bb_for_insn_chain (a_end, b_debug_end, a);
5f0d2358 922
1130d5e3
DM
923 BB_END (a) = b_debug_end;
924 BB_HEAD (b) = NULL;
b5b8b0ac
AO
925 }
926 else if (b_end != b_debug_end)
927 {
928 /* Move any deleted labels and other notes between the end of A
929 and the debug insns that make up B after the debug insns,
930 bringing the debug insns into A while keeping the notes after
931 the end of A. */
932 if (NEXT_INSN (a_end) != b_debug_start)
933 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
934 b_debug_end);
935 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
1130d5e3 936 BB_END (a) = b_debug_end;
ca6c03ca 937 }
5f0d2358 938
6fb5fa3c 939 df_bb_delete (b->index);
50a36e42
EB
940
941 /* If B was a forwarder block, propagate the locus on the edge. */
fbc68f2a
DC
942 if (forwarder_p
943 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
50a36e42
EB
944 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
945
946 if (dump_file)
947 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
ca6c03ca 948}
bc35512f 949
6fb5fa3c 950
bc35512f 951/* Return true when block A and B can be merged. */
9678086d 952
b48d0358
DN
953static bool
954rtl_can_merge_blocks (basic_block a, basic_block b)
bc35512f 955{
750054a2
CT
956 /* If we are partitioning hot/cold basic blocks, we don't want to
957 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
958 and cold sections.
959
8e8d5162 960 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
961 be optimizable (or blocks that appear to be mergeable), but which really
962 must be left untouched (they are required to make it safely across
963 partition boundaries). See the comments at the top of
8e8d5162 964 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
076c7ab8 965
87c8b4be 966 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 967 return false;
750054a2 968
7d776ee2
RG
969 /* Protect the loop latches. */
970 if (current_loops && b->loop_father->latch == b)
971 return false;
972
bc35512f 973 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
974 return (single_succ_p (a)
975 && single_succ (a) == b
976 && single_pred_p (b)
628f6a4e 977 && a != b
bc35512f 978 /* Must be simple edge. */
c5cbcccf 979 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
bc35512f 980 && a->next_bb == b
fefa31b5
DM
981 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
982 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
bc35512f
JH
983 /* If the jump insn has side effects,
984 we can't kill the edge. */
4b4bf941 985 && (!JUMP_P (BB_END (a))
e24e7211 986 || (reload_completed
a813c111 987 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f 988}
ca6c03ca 989\f
5f0d2358
RK
990/* Return the label in the head of basic block BLOCK. Create one if it doesn't
991 exist. */
ca6c03ca
JH
992
993rtx
d329e058 994block_label (basic_block block)
ca6c03ca 995{
fefa31b5 996 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
ca6c03ca 997 return NULL_RTX;
5f0d2358 998
4b4bf941 999 if (!LABEL_P (BB_HEAD (block)))
ca6c03ca 1000 {
1130d5e3 1001 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
ca6c03ca 1002 }
5f0d2358 1003
a813c111 1004 return BB_HEAD (block);
ca6c03ca
JH
1005}
1006
1007/* Attempt to perform edge redirection by replacing possibly complex jump
5f0d2358
RK
1008 instruction by unconditional jump or removing jump completely. This can
1009 apply only if all edges now point to the same block. The parameters and
1010 return values are equivalent to redirect_edge_and_branch. */
ca6c03ca 1011
6de9cd9a 1012edge
bc35512f 1013try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
ca6c03ca
JH
1014{
1015 basic_block src = e->src;
8879d71c 1016 rtx_insn *insn = BB_END (src), *kill_from;
e1233a7d 1017 rtx set;
ca6c03ca 1018 int fallthru = 0;
750054a2
CT
1019
1020 /* If we are partitioning hot/cold basic blocks, we don't want to
1021 mess up unconditional or indirect jumps that cross between hot
8e8d5162
CT
1022 and cold sections.
1023
1024 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
1025 be optimizable (or blocks that appear to be mergeable), but which really
1026 must be left untouched (they are required to make it safely across
1027 partition boundaries). See the comments at the top of
8e8d5162 1028 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
c22cacf3 1029
3371a64f 1030 if (BB_PARTITION (src) != BB_PARTITION (target))
9cf84a3c 1031 return NULL;
750054a2 1032
6a66a8a7
KH
1033 /* We can replace or remove a complex jump only when we have exactly
1034 two edges. Also, if we have exactly one outgoing edge, we can
1035 redirect that. */
1036 if (EDGE_COUNT (src->succs) >= 3
1037 /* Verify that all targets will be TARGET. Specifically, the
1038 edge that is not E must also go to TARGET. */
1039 || (EDGE_COUNT (src->succs) == 2
1040 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1041 return NULL;
5f0d2358 1042
6a66a8a7 1043 if (!onlyjump_p (insn))
6de9cd9a 1044 return NULL;
3348b696 1045 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
6de9cd9a 1046 return NULL;
ca6c03ca
JH
1047
1048 /* Avoid removing branch with side effects. */
1049 set = single_set (insn);
1050 if (!set || side_effects_p (set))
6de9cd9a 1051 return NULL;
ca6c03ca
JH
1052
1053 /* In case we zap a conditional jump, we'll need to kill
1054 the cc0 setter too. */
1055 kill_from = insn;
1056#ifdef HAVE_cc0
9caea4a7
RS
1057 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
1058 && only_sets_cc0_p (PREV_INSN (insn)))
ca6c03ca
JH
1059 kill_from = PREV_INSN (insn);
1060#endif
1061
1062 /* See if we can create the fallthru edge. */
bc35512f 1063 if (in_cfglayout || can_fallthru (src, target))
ca6c03ca 1064 {
c263766c
RH
1065 if (dump_file)
1066 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
ca6c03ca
JH
1067 fallthru = 1;
1068
eaec9b3d 1069 /* Selectively unlink whole insn chain. */
bc35512f
JH
1070 if (in_cfglayout)
1071 {
8879d71c 1072 rtx_insn *insn = BB_FOOTER (src);
bc35512f 1073
a7b87f73 1074 delete_insn_chain (kill_from, BB_END (src), false);
bc35512f
JH
1075
1076 /* Remove barriers but keep jumptables. */
1077 while (insn)
1078 {
4b4bf941 1079 if (BARRIER_P (insn))
bc35512f
JH
1080 {
1081 if (PREV_INSN (insn))
0f82e5c9 1082 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
bc35512f 1083 else
d8ce2eae 1084 BB_FOOTER (src) = NEXT_INSN (insn);
bc35512f 1085 if (NEXT_INSN (insn))
0f82e5c9 1086 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
bc35512f 1087 }
4b4bf941 1088 if (LABEL_P (insn))
bc35512f
JH
1089 break;
1090 insn = NEXT_INSN (insn);
1091 }
1092 }
1093 else
a7b87f73
ZD
1094 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1095 false);
ca6c03ca 1096 }
5f0d2358 1097
ca6c03ca
JH
1098 /* If this already is simplejump, redirect it. */
1099 else if (simplejump_p (insn))
1100 {
1101 if (e->dest == target)
6de9cd9a 1102 return NULL;
c263766c
RH
1103 if (dump_file)
1104 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
0b17ab2f 1105 INSN_UID (insn), e->dest->index, target->index);
6ee3c8e4
JJ
1106 if (!redirect_jump (insn, block_label (target), 0))
1107 {
fefa31b5 1108 gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
341c100f 1109 return NULL;
6ee3c8e4 1110 }
ca6c03ca 1111 }
5f0d2358 1112
6ee3c8e4 1113 /* Cannot do anything for target exit block. */
fefa31b5 1114 else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
6de9cd9a 1115 return NULL;
6ee3c8e4 1116
ca6c03ca
JH
1117 /* Or replace possibly complicated jump insn by simple jump insn. */
1118 else
1119 {
1120 rtx target_label = block_label (target);
8879d71c
DM
1121 rtx_insn *barrier;
1122 rtx label;
8942ee0f 1123 rtx_jump_table_data *table;
ca6c03ca 1124
a7102479 1125 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
a813c111 1126 JUMP_LABEL (BB_END (src)) = target_label;
ca6c03ca 1127 LABEL_NUSES (target_label)++;
c263766c
RH
1128 if (dump_file)
1129 fprintf (dump_file, "Replacing insn %i by jump %i\n",
a813c111 1130 INSN_UID (insn), INSN_UID (BB_END (src)));
ca6c03ca 1131
4da2eb6b 1132
ba4807a0
LB
1133 delete_insn_chain (kill_from, insn, false);
1134
4da2eb6b
RH
1135 /* Recognize a tablejump that we are converting to a
1136 simple jump and remove its associated CODE_LABEL
1137 and ADDR_VEC or ADDR_DIFF_VEC. */
ba4807a0
LB
1138 if (tablejump_p (insn, &label, &table))
1139 delete_insn_chain (label, table, false);
eb5b8ad4 1140
a813c111 1141 barrier = next_nonnote_insn (BB_END (src));
4b4bf941 1142 if (!barrier || !BARRIER_P (barrier))
a813c111 1143 emit_barrier_after (BB_END (src));
5d693491
JZ
1144 else
1145 {
a813c111 1146 if (barrier != NEXT_INSN (BB_END (src)))
5d693491
JZ
1147 {
1148 /* Move the jump before barrier so that the notes
1149 which originally were or were created before jump table are
1150 inside the basic block. */
8879d71c 1151 rtx_insn *new_insn = BB_END (src);
5d693491 1152
6c3d0e31
SP
1153 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1154 PREV_INSN (barrier), src);
5d693491 1155
0f82e5c9
DM
1156 SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1157 SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
5d693491 1158
0f82e5c9
DM
1159 SET_NEXT_INSN (new_insn) = barrier;
1160 SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
5d693491 1161
0f82e5c9
DM
1162 SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1163 SET_PREV_INSN (barrier) = new_insn;
5d693491
JZ
1164 }
1165 }
ca6c03ca
JH
1166 }
1167
1168 /* Keep only one edge out and set proper flags. */
c5cbcccf 1169 if (!single_succ_p (src))
628f6a4e 1170 remove_edge (e);
c5cbcccf 1171 gcc_assert (single_succ_p (src));
628f6a4e 1172
c5cbcccf 1173 e = single_succ_edge (src);
ca6c03ca
JH
1174 if (fallthru)
1175 e->flags = EDGE_FALLTHRU;
1176 else
1177 e->flags = 0;
5f0d2358 1178
ca6c03ca
JH
1179 e->probability = REG_BR_PROB_BASE;
1180 e->count = src->count;
1181
ca6c03ca
JH
1182 if (e->dest != target)
1183 redirect_edge_succ (e, target);
6de9cd9a 1184 return e;
ca6c03ca
JH
1185}
1186
4e3825db
MM
1187/* Subroutine of redirect_branch_edge that tries to patch the jump
1188 instruction INSN so that it reaches block NEW. Do this
1189 only when it originally reached block OLD. Return true if this
1190 worked or the original target wasn't OLD, return false if redirection
1191 doesn't work. */
1192
1193static bool
8879d71c 1194patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
ca6c03ca 1195{
8942ee0f 1196 rtx_jump_table_data *table;
ca6c03ca 1197 rtx tmp;
ca6c03ca 1198 /* Recognize a tablejump and adjust all matching cases. */
8942ee0f 1199 if (tablejump_p (insn, NULL, &table))
ca6c03ca
JH
1200 {
1201 rtvec vec;
1202 int j;
4e3825db 1203 rtx new_label = block_label (new_bb);
ca6c03ca 1204
fefa31b5 1205 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4e3825db 1206 return false;
95c43227 1207 vec = table->get_labels ();
ca6c03ca
JH
1208
1209 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1210 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1211 {
1212 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1213 --LABEL_NUSES (old_label);
1214 ++LABEL_NUSES (new_label);
1215 }
1216
f9da5064 1217 /* Handle casesi dispatch insns. */
ca6c03ca
JH
1218 if ((tmp = single_set (insn)) != NULL
1219 && SET_DEST (tmp) == pc_rtx
1220 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1221 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
a827d9b1 1222 && LABEL_REF_LABEL (XEXP (SET_SRC (tmp), 2)) == old_label)
ca6c03ca 1223 {
4c33cb26 1224 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
ca6c03ca
JH
1225 new_label);
1226 --LABEL_NUSES (old_label);
1227 ++LABEL_NUSES (new_label);
1228 }
1229 }
1c384bf1
RH
1230 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1231 {
1232 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1233 rtx new_label, note;
1234
fefa31b5 1235 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1c384bf1
RH
1236 return false;
1237 new_label = block_label (new_bb);
1238
1239 for (i = 0; i < n; ++i)
1240 {
1241 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1242 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1243 if (XEXP (old_ref, 0) == old_label)
1244 {
1245 ASM_OPERANDS_LABEL (tmp, i)
1246 = gen_rtx_LABEL_REF (Pmode, new_label);
1247 --LABEL_NUSES (old_label);
1248 ++LABEL_NUSES (new_label);
1249 }
1250 }
1251
1252 if (JUMP_LABEL (insn) == old_label)
1253 {
1254 JUMP_LABEL (insn) = new_label;
1255 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1256 if (note)
1257 remove_note (insn, note);
1258 }
1259 else
1260 {
1261 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1262 if (note)
1263 remove_note (insn, note);
1264 if (JUMP_LABEL (insn) != new_label
1265 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1266 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1267 }
3b5fda81
JJ
1268 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1269 != NULL_RTX)
1270 XEXP (note, 0) = new_label;
1c384bf1 1271 }
ca6c03ca
JH
1272 else
1273 {
1274 /* ?? We may play the games with moving the named labels from
1275 one basic block to the other in case only one computed_jump is
1276 available. */
5f0d2358
RK
1277 if (computed_jump_p (insn)
1278 /* A return instruction can't be redirected. */
1279 || returnjump_p (insn))
4e3825db 1280 return false;
6ee3c8e4 1281
4e3825db 1282 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
6ee3c8e4 1283 {
4e3825db
MM
1284 /* If the insn doesn't go where we think, we're confused. */
1285 gcc_assert (JUMP_LABEL (insn) == old_label);
1286
1287 /* If the substitution doesn't succeed, die. This can happen
1288 if the back end emitted unrecognizable instructions or if
1289 target is exit block on some arches. */
1290 if (!redirect_jump (insn, block_label (new_bb), 0))
1291 {
fefa31b5 1292 gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun));
4e3825db
MM
1293 return false;
1294 }
6ee3c8e4 1295 }
ca6c03ca 1296 }
4e3825db
MM
1297 return true;
1298}
1299
1300
1301/* Redirect edge representing branch of (un)conditional jump or tablejump,
1302 NULL on failure */
1303static edge
1304redirect_branch_edge (edge e, basic_block target)
1305{
8879d71c 1306 rtx_insn *old_label = BB_HEAD (e->dest);
4e3825db 1307 basic_block src = e->src;
8879d71c 1308 rtx_insn *insn = BB_END (src);
4e3825db
MM
1309
1310 /* We can only redirect non-fallthru edges of jump insn. */
1311 if (e->flags & EDGE_FALLTHRU)
1312 return NULL;
1313 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1314 return NULL;
1315
1316 if (!currently_expanding_to_rtl)
1317 {
1318 if (!patch_jump_insn (insn, old_label, target))
1319 return NULL;
1320 }
1321 else
1322 /* When expanding this BB might actually contain multiple
1323 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1324 Redirect all of those that match our label. */
0f346928 1325 FOR_BB_INSNS (src, insn)
4e3825db
MM
1326 if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1327 return NULL;
ca6c03ca 1328
c263766c
RH
1329 if (dump_file)
1330 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
0b17ab2f 1331 e->src->index, e->dest->index, target->index);
5f0d2358 1332
ca6c03ca 1333 if (e->dest != target)
6de9cd9a 1334 e = redirect_edge_succ_nodup (e, target);
6fb5fa3c 1335
6de9cd9a 1336 return e;
bc35512f
JH
1337}
1338
3371a64f
TJ
1339/* Called when edge E has been redirected to a new destination,
1340 in order to update the region crossing flag on the edge and
1341 jump. */
1342
1343static void
1344fixup_partition_crossing (edge e)
1345{
fefa31b5
DM
1346 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1347 == EXIT_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1348 return;
1349 /* If we redirected an existing edge, it may already be marked
1350 crossing, even though the new src is missing a reg crossing note.
1351 But make sure reg crossing note doesn't already exist before
1352 inserting. */
1353 if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1354 {
1355 e->flags |= EDGE_CROSSING;
3371a64f 1356 if (JUMP_P (BB_END (e->src))
339ba33b
RS
1357 && !CROSSING_JUMP_P (BB_END (e->src)))
1358 CROSSING_JUMP_P (BB_END (e->src)) = 1;
3371a64f
TJ
1359 }
1360 else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1361 {
1362 e->flags &= ~EDGE_CROSSING;
1363 /* Remove the section crossing note from jump at end of
1364 src if it exists, and if no other successors are
1365 still crossing. */
339ba33b 1366 if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
3371a64f
TJ
1367 {
1368 bool has_crossing_succ = false;
1369 edge e2;
1370 edge_iterator ei;
1371 FOR_EACH_EDGE (e2, ei, e->src->succs)
1372 {
1373 has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1374 if (has_crossing_succ)
1375 break;
1376 }
1377 if (!has_crossing_succ)
339ba33b 1378 CROSSING_JUMP_P (BB_END (e->src)) = 0;
3371a64f
TJ
1379 }
1380 }
1381}
1382
600b5b1d
TJ
1383/* Called when block BB has been reassigned to the cold partition,
1384 because it is now dominated by another cold block,
1385 to ensure that the region crossing attributes are updated. */
1386
1387static void
1388fixup_new_cold_bb (basic_block bb)
1389{
1390 edge e;
1391 edge_iterator ei;
1392
1393 /* This is called when a hot bb is found to now be dominated
1394 by a cold bb and therefore needs to become cold. Therefore,
1395 its preds will no longer be region crossing. Any non-dominating
1396 preds that were previously hot would also have become cold
1397 in the caller for the same region. Any preds that were previously
1398 region-crossing will be adjusted in fixup_partition_crossing. */
1399 FOR_EACH_EDGE (e, ei, bb->preds)
1400 {
1401 fixup_partition_crossing (e);
1402 }
1403
1404 /* Possibly need to make bb's successor edges region crossing,
1405 or remove stale region crossing. */
1406 FOR_EACH_EDGE (e, ei, bb->succs)
1407 {
1408 /* We can't have fall-through edges across partition boundaries.
1409 Note that force_nonfallthru will do any necessary partition
1410 boundary fixup by calling fixup_partition_crossing itself. */
1411 if ((e->flags & EDGE_FALLTHRU)
1412 && BB_PARTITION (bb) != BB_PARTITION (e->dest)
fefa31b5 1413 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
600b5b1d
TJ
1414 force_nonfallthru (e);
1415 else
1416 fixup_partition_crossing (e);
1417 }
1418}
1419
bc35512f
JH
1420/* Attempt to change code to redirect edge E to TARGET. Don't do that on
1421 expense of adding new instructions or reordering basic blocks.
1422
1423 Function can be also called with edge destination equivalent to the TARGET.
1424 Then it should try the simplifications and do nothing if none is possible.
1425
6de9cd9a
DN
1426 Return edge representing the branch if transformation succeeded. Return NULL
1427 on failure.
1428 We still return NULL in case E already destinated TARGET and we didn't
1429 managed to simplify instruction stream. */
bc35512f 1430
6de9cd9a 1431static edge
5671bf27 1432rtl_redirect_edge_and_branch (edge e, basic_block target)
bc35512f 1433{
6de9cd9a 1434 edge ret;
f345f21a 1435 basic_block src = e->src;
3371a64f 1436 basic_block dest = e->dest;
f345f21a 1437
bc35512f 1438 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 1439 return NULL;
bc35512f 1440
3371a64f 1441 if (dest == target)
6de9cd9a 1442 return e;
3348b696 1443
6de9cd9a 1444 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
f345f21a 1445 {
6fb5fa3c 1446 df_set_bb_dirty (src);
3371a64f 1447 fixup_partition_crossing (ret);
6de9cd9a 1448 return ret;
f345f21a 1449 }
bc35512f 1450
6de9cd9a
DN
1451 ret = redirect_branch_edge (e, target);
1452 if (!ret)
1453 return NULL;
5f0d2358 1454
6fb5fa3c 1455 df_set_bb_dirty (src);
3371a64f 1456 fixup_partition_crossing (ret);
6de9cd9a 1457 return ret;
ca6c03ca
JH
1458}
1459
3371a64f
TJ
1460/* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1461
1462void
1463emit_barrier_after_bb (basic_block bb)
1464{
cd459bf8 1465 rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
c3284718 1466 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
3371a64f
TJ
1467 || current_ir_type () == IR_RTL_CFGLAYOUT);
1468 if (current_ir_type () == IR_RTL_CFGLAYOUT)
0fb3402f
RX
1469 {
1470 rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1471
1472 if (BB_FOOTER (bb))
1473 {
1474 rtx_insn *footer_tail = BB_FOOTER (bb);
1475
1476 while (NEXT_INSN (footer_tail))
1477 footer_tail = NEXT_INSN (footer_tail);
1478 if (!BARRIER_P (footer_tail))
1479 {
1480 SET_NEXT_INSN (footer_tail) = insn;
1481 SET_PREV_INSN (insn) = footer_tail;
1482 }
1483 }
1484 else
1485 BB_FOOTER (bb) = insn;
1486 }
3371a64f
TJ
1487}
1488
4fe9b91c 1489/* Like force_nonfallthru below, but additionally performs redirection
26898771
BS
1490 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1491 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1492 simple_return_rtx, indicating which kind of returnjump to create.
1493 It should be NULL otherwise. */
ca6c03ca 1494
dc0ff1c8 1495basic_block
26898771 1496force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
ca6c03ca 1497{
a3716585 1498 basic_block jump_block, new_bb = NULL, src = e->src;
ca6c03ca
JH
1499 rtx note;
1500 edge new_edge;
a3716585 1501 int abnormal_edge_flags = 0;
a3e6a37b 1502 bool asm_goto_edge = false;
7241571e 1503 int loc;
ca6c03ca 1504
cb9a1d9b
JH
1505 /* In the case the last instruction is conditional jump to the next
1506 instruction, first redirect the jump itself and then continue
b20b352b 1507 by creating a basic block afterwards to redirect fallthru edge. */
fefa31b5
DM
1508 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1509 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
a813c111 1510 && any_condjump_p (BB_END (e->src))
a813c111 1511 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
cb9a1d9b
JH
1512 {
1513 rtx note;
58e6ae30 1514 edge b = unchecked_make_edge (e->src, target, 0);
341c100f 1515 bool redirected;
cb9a1d9b 1516
341c100f
NS
1517 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1518 gcc_assert (redirected);
c22cacf3 1519
a813c111 1520 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
cb9a1d9b
JH
1521 if (note)
1522 {
e5af9ddd 1523 int prob = XINT (note, 0);
cb9a1d9b
JH
1524
1525 b->probability = prob;
8ddb5a29 1526 /* Update this to use GCOV_COMPUTE_SCALE. */
cb9a1d9b
JH
1527 b->count = e->count * prob / REG_BR_PROB_BASE;
1528 e->probability -= e->probability;
1529 e->count -= b->count;
1530 if (e->probability < 0)
1531 e->probability = 0;
1532 if (e->count < 0)
1533 e->count = 0;
1534 }
1535 }
1536
ca6c03ca 1537 if (e->flags & EDGE_ABNORMAL)
a3716585
JH
1538 {
1539 /* Irritating special case - fallthru edge to the same block as abnormal
1540 edge.
1541 We can't redirect abnormal edge, but we still can split the fallthru
d329e058 1542 one and create separate abnormal edge to original destination.
a3716585 1543 This allows bb-reorder to make such edge non-fallthru. */
341c100f 1544 gcc_assert (e->dest == target);
ee44c28d
SB
1545 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1546 e->flags &= EDGE_FALLTHRU;
a3716585 1547 }
341c100f 1548 else
24c545ff 1549 {
341c100f 1550 gcc_assert (e->flags & EDGE_FALLTHRU);
fefa31b5 1551 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
341c100f
NS
1552 {
1553 /* We can't redirect the entry block. Create an empty block
628f6a4e
BE
1554 at the start of the function which we use to add the new
1555 jump. */
1556 edge tmp;
1557 edge_iterator ei;
1558 bool found = false;
c22cacf3 1559
fefa31b5
DM
1560 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1561 ENTRY_BLOCK_PTR_FOR_FN (cfun));
c22cacf3 1562
341c100f
NS
1563 /* Change the existing edge's source to be the new block, and add
1564 a new edge from the entry block to the new block. */
1565 e->src = bb;
fefa31b5
DM
1566 for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1567 (tmp = ei_safe_edge (ei)); )
628f6a4e
BE
1568 {
1569 if (tmp == e)
1570 {
fefa31b5 1571 ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
628f6a4e
BE
1572 found = true;
1573 break;
1574 }
1575 else
1576 ei_next (&ei);
1577 }
c22cacf3 1578
628f6a4e 1579 gcc_assert (found);
c22cacf3 1580
9771b263 1581 vec_safe_push (bb->succs, e);
fefa31b5
DM
1582 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1583 EDGE_FALLTHRU);
341c100f 1584 }
24c545ff
BS
1585 }
1586
a3e6a37b 1587 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
891ca07d 1588 don't point to the target or fallthru label. */
a3e6a37b 1589 if (JUMP_P (BB_END (e->src))
fefa31b5 1590 && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
a3e6a37b
JJ
1591 && (e->flags & EDGE_FALLTHRU)
1592 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
ca6c03ca 1593 {
a3e6a37b 1594 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
5d68b7e6 1595 bool adjust_jump_target = false;
a3e6a37b
JJ
1596
1597 for (i = 0; i < n; ++i)
891ca07d
JJ
1598 {
1599 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
5d68b7e6
JJ
1600 {
1601 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1602 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1603 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1604 adjust_jump_target = true;
1605 }
891ca07d 1606 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
a3e6a37b 1607 asm_goto_edge = true;
891ca07d 1608 }
5d68b7e6
JJ
1609 if (adjust_jump_target)
1610 {
8879d71c
DM
1611 rtx_insn *insn = BB_END (e->src);
1612 rtx note;
1613 rtx_insn *old_label = BB_HEAD (e->dest);
1614 rtx_insn *new_label = BB_HEAD (target);
5d68b7e6
JJ
1615
1616 if (JUMP_LABEL (insn) == old_label)
1617 {
1618 JUMP_LABEL (insn) = new_label;
1619 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1620 if (note)
1621 remove_note (insn, note);
1622 }
1623 else
1624 {
1625 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1626 if (note)
1627 remove_note (insn, note);
1628 if (JUMP_LABEL (insn) != new_label
1629 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1630 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1631 }
1632 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1633 != NULL_RTX)
1634 XEXP (note, 0) = new_label;
1635 }
a3e6a37b
JJ
1636 }
1637
1638 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1639 {
dc01c3d1 1640 rtx_insn *new_head;
a3e6a37b
JJ
1641 gcov_type count = e->count;
1642 int probability = e->probability;
ca6c03ca 1643 /* Create the new structures. */
31a78298 1644
79019985
RH
1645 /* If the old block ended with a tablejump, skip its table
1646 by searching forward from there. Otherwise start searching
1647 forward from the last instruction of the old block. */
8942ee0f
DM
1648 rtx_jump_table_data *table;
1649 if (tablejump_p (BB_END (e->src), NULL, &table))
dc01c3d1 1650 new_head = table;
8942ee0f 1651 else
dc01c3d1
DM
1652 new_head = BB_END (e->src);
1653 new_head = NEXT_INSN (new_head);
31a78298 1654
dc01c3d1 1655 jump_block = create_basic_block (new_head, NULL, e->src);
a3e6a37b 1656 jump_block->count = count;
ca6c03ca 1657 jump_block->frequency = EDGE_FREQUENCY (e);
ca6c03ca 1658
750054a2
CT
1659 /* Make sure new block ends up in correct hot/cold section. */
1660
076c7ab8 1661 BB_COPY_PARTITION (jump_block, e->src);
c22cacf3 1662
ca6c03ca
JH
1663 /* Wire edge in. */
1664 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
a3e6a37b
JJ
1665 new_edge->probability = probability;
1666 new_edge->count = count;
ca6c03ca
JH
1667
1668 /* Redirect old edge. */
1669 redirect_edge_pred (e, jump_block);
1670 e->probability = REG_BR_PROB_BASE;
1671
3371a64f
TJ
1672 /* If e->src was previously region crossing, it no longer is
1673 and the reg crossing note should be removed. */
1674 fixup_partition_crossing (new_edge);
1675
a3e6a37b
JJ
1676 /* If asm goto has any label refs to target's label,
1677 add also edge from asm goto bb to target. */
1678 if (asm_goto_edge)
1679 {
1680 new_edge->probability /= 2;
1681 new_edge->count /= 2;
1682 jump_block->count /= 2;
1683 jump_block->frequency /= 2;
1684 new_edge = make_edge (new_edge->src, target,
1685 e->flags & ~EDGE_FALLTHRU);
1686 new_edge->probability = probability - probability / 2;
1687 new_edge->count = count - count / 2;
1688 }
1689
ca6c03ca
JH
1690 new_bb = jump_block;
1691 }
1692 else
1693 jump_block = e->src;
5f0d2358 1694
2f13f2de 1695 loc = e->goto_locus;
ca6c03ca 1696 e->flags &= ~EDGE_FALLTHRU;
fefa31b5 1697 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
ca6c03ca 1698 {
26898771
BS
1699 if (jump_label == ret_rtx)
1700 {
cf22ce3c 1701#ifdef HAVE_return
26898771 1702 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
cf22ce3c 1703#else
26898771 1704 gcc_unreachable ();
cf22ce3c 1705#endif
26898771
BS
1706 }
1707 else
1708 {
1709 gcc_assert (jump_label == simple_return_rtx);
1710#ifdef HAVE_simple_return
1711 emit_jump_insn_after_setloc (gen_simple_return (),
1712 BB_END (jump_block), loc);
1713#else
1714 gcc_unreachable ();
1715#endif
1716 }
387748de 1717 set_return_jump_label (BB_END (jump_block));
ca6c03ca
JH
1718 }
1719 else
1720 {
1721 rtx label = block_label (target);
7241571e 1722 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
a813c111 1723 JUMP_LABEL (BB_END (jump_block)) = label;
ca6c03ca
JH
1724 LABEL_NUSES (label)++;
1725 }
5f0d2358 1726
3371a64f
TJ
1727 /* We might be in cfg layout mode, and if so, the following routine will
1728 insert the barrier correctly. */
1729 emit_barrier_after_bb (jump_block);
ca6c03ca
JH
1730 redirect_edge_succ_nodup (e, target);
1731
a3716585
JH
1732 if (abnormal_edge_flags)
1733 make_edge (src, target, abnormal_edge_flags);
1734
b8698a0f 1735 df_mark_solutions_dirty ();
3371a64f 1736 fixup_partition_crossing (e);
ca6c03ca
JH
1737 return new_bb;
1738}
1739
1740/* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1741 (and possibly create new basic block) to make edge non-fallthru.
1742 Return newly created BB or NULL if none. */
5f0d2358 1743
cf103ca4
EB
1744static basic_block
1745rtl_force_nonfallthru (edge e)
ca6c03ca 1746{
26898771 1747 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
ca6c03ca
JH
1748}
1749
1750/* Redirect edge even at the expense of creating new jump insn or
1751 basic block. Return new basic block if created, NULL otherwise.
41806d92 1752 Conversion must be possible. */
ca6c03ca 1753
9ee634e3 1754static basic_block
d329e058 1755rtl_redirect_edge_and_branch_force (edge e, basic_block target)
ca6c03ca 1756{
5f0d2358
RK
1757 if (redirect_edge_and_branch (e, target)
1758 || e->dest == target)
ca6c03ca
JH
1759 return NULL;
1760
1761 /* In case the edge redirection failed, try to force it to be non-fallthru
1762 and redirect newly created simplejump. */
6fb5fa3c 1763 df_set_bb_dirty (e->src);
26898771 1764 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
ca6c03ca
JH
1765}
1766
1767/* The given edge should potentially be a fallthru edge. If that is in
1768 fact true, delete the jump and barriers that are in the way. */
1769
f470c378
ZD
1770static void
1771rtl_tidy_fallthru_edge (edge e)
ca6c03ca 1772{
8879d71c 1773 rtx_insn *q;
f470c378
ZD
1774 basic_block b = e->src, c = b->next_bb;
1775
ca6c03ca
JH
1776 /* ??? In a late-running flow pass, other folks may have deleted basic
1777 blocks by nopping out blocks, leaving multiple BARRIERs between here
0fa2e4df 1778 and the target label. They ought to be chastised and fixed.
ca6c03ca
JH
1779
1780 We can also wind up with a sequence of undeletable labels between
1781 one block and the next.
1782
1783 So search through a sequence of barriers, labels, and notes for
1784 the head of block C and assert that we really do fall through. */
1785
a813c111 1786 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
9c0a0632
RH
1787 if (INSN_P (q))
1788 return;
ca6c03ca
JH
1789
1790 /* Remove what will soon cease being the jump insn from the source block.
1791 If block B consisted only of this single jump, turn it into a deleted
1792 note. */
a813c111 1793 q = BB_END (b);
4b4bf941 1794 if (JUMP_P (q)
ca6c03ca
JH
1795 && onlyjump_p (q)
1796 && (any_uncondjump_p (q)
c5cbcccf 1797 || single_succ_p (b)))
ca6c03ca
JH
1798 {
1799#ifdef HAVE_cc0
1800 /* If this was a conditional jump, we need to also delete
1801 the insn that set cc0. */
1802 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1803 q = PREV_INSN (q);
1804#endif
1805
1806 q = PREV_INSN (q);
ca6c03ca
JH
1807 }
1808
1809 /* Selectively unlink the sequence. */
a813c111 1810 if (q != PREV_INSN (BB_HEAD (c)))
a7b87f73 1811 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
ca6c03ca
JH
1812
1813 e->flags |= EDGE_FALLTHRU;
1814}
ca6c03ca 1815\f
f470c378
ZD
1816/* Should move basic block BB after basic block AFTER. NIY. */
1817
1818static bool
1819rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1820 basic_block after ATTRIBUTE_UNUSED)
1821{
1822 return false;
1823}
1824
3371a64f
TJ
1825/* Locate the last bb in the same partition as START_BB. */
1826
1827static basic_block
1828last_bb_in_partition (basic_block start_bb)
1829{
1830 basic_block bb;
fefa31b5 1831 FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
3371a64f
TJ
1832 {
1833 if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1834 return bb;
1835 }
6626665f 1836 /* Return bb before the exit block. */
3371a64f
TJ
1837 return bb->prev_bb;
1838}
1839
ca6c03ca 1840/* Split a (typically critical) edge. Return the new block.
41806d92 1841 The edge must not be abnormal.
ca6c03ca
JH
1842
1843 ??? The code generally expects to be called on critical edges.
1844 The case of a block ending in an unconditional jump to a
1845 block with multiple predecessors is not handled optimally. */
1846
8ce33230 1847static basic_block
d329e058 1848rtl_split_edge (edge edge_in)
ca6c03ca 1849{
3371a64f 1850 basic_block bb, new_bb;
8879d71c 1851 rtx_insn *before;
ca6c03ca
JH
1852
1853 /* Abnormal edges cannot be split. */
341c100f 1854 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
ca6c03ca
JH
1855
1856 /* We are going to place the new block in front of edge destination.
eaec9b3d 1857 Avoid existence of fallthru predecessors. */
ca6c03ca
JH
1858 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1859 {
0fd4b31d 1860 edge e = find_fallthru_edge (edge_in->dest->preds);
ca6c03ca
JH
1861
1862 if (e)
1863 force_nonfallthru (e);
1864 }
1865
96e82e0a 1866 /* Create the basic block note. */
fefa31b5 1867 if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
a813c111 1868 before = BB_HEAD (edge_in->dest);
ca6c03ca 1869 else
8879d71c 1870 before = NULL;
ca6c03ca 1871
623a66fa 1872 /* If this is a fall through edge to the exit block, the blocks might be
ffe14686 1873 not adjacent, and the right place is after the source. */
fefa31b5
DM
1874 if ((edge_in->flags & EDGE_FALLTHRU)
1875 && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
623a66fa
R
1876 {
1877 before = NEXT_INSN (BB_END (edge_in->src));
623a66fa 1878 bb = create_basic_block (before, NULL, edge_in->src);
076c7ab8 1879 BB_COPY_PARTITION (bb, edge_in->src);
623a66fa
R
1880 }
1881 else
9fb32434 1882 {
fefa31b5 1883 if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1884 {
1885 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1886 BB_COPY_PARTITION (bb, edge_in->dest);
1887 }
1888 else
1889 {
1890 basic_block after = edge_in->dest->prev_bb;
1891 /* If this is post-bb reordering, and the edge crosses a partition
1892 boundary, the new block needs to be inserted in the bb chain
1893 at the end of the src partition (since we put the new bb into
1894 that partition, see below). Otherwise we may end up creating
1895 an extra partition crossing in the chain, which is illegal.
1896 It can't go after the src, because src may have a fall-through
1897 to a different block. */
1898 if (crtl->bb_reorder_complete
1899 && (edge_in->flags & EDGE_CROSSING))
1900 {
1901 after = last_bb_in_partition (edge_in->src);
1902 before = NEXT_INSN (BB_END (after));
1903 /* The instruction following the last bb in partition should
1904 be a barrier, since it cannot end in a fall-through. */
1905 gcc_checking_assert (BARRIER_P (before));
1906 before = NEXT_INSN (before);
1907 }
1908 bb = create_basic_block (before, NULL, after);
1909 /* Put the split bb into the src partition, to avoid creating
1910 a situation where a cold bb dominates a hot bb, in the case
1911 where src is cold and dest is hot. The src will dominate
1912 the new bb (whereas it might not have dominated dest). */
1913 BB_COPY_PARTITION (bb, edge_in->src);
1914 }
9fb32434 1915 }
ca6c03ca 1916
4977bab6 1917 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
ca6c03ca 1918
3371a64f
TJ
1919 /* Can't allow a region crossing edge to be fallthrough. */
1920 if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
fefa31b5 1921 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1922 {
1923 new_bb = force_nonfallthru (single_succ_edge (bb));
1924 gcc_assert (!new_bb);
1925 }
1926
4d6922ee 1927 /* For non-fallthru edges, we must adjust the predecessor's
ca6c03ca
JH
1928 jump instruction to target our new block. */
1929 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1930 {
341c100f
NS
1931 edge redirected = redirect_edge_and_branch (edge_in, bb);
1932 gcc_assert (redirected);
ca6c03ca
JH
1933 }
1934 else
3b5fda81 1935 {
fefa31b5 1936 if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
3b5fda81
JJ
1937 {
1938 /* For asm goto even splitting of fallthru edge might
1939 need insn patching, as other labels might point to the
1940 old label. */
8879d71c 1941 rtx_insn *last = BB_END (edge_in->src);
3b5fda81
JJ
1942 if (last
1943 && JUMP_P (last)
fefa31b5 1944 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3b5fda81
JJ
1945 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1946 && patch_jump_insn (last, before, bb))
1947 df_set_bb_dirty (edge_in->src);
1948 }
1949 redirect_edge_succ (edge_in, bb);
1950 }
ca6c03ca
JH
1951
1952 return bb;
1953}
1954
1955/* Queue instructions for insertion on an edge between two basic blocks.
1956 The new instructions and basic blocks (if any) will not appear in the
1957 CFG until commit_edge_insertions is called. */
1958
1959void
d329e058 1960insert_insn_on_edge (rtx pattern, edge e)
ca6c03ca
JH
1961{
1962 /* We cannot insert instructions on an abnormal critical edge.
1963 It will be easier to find the culprit if we die now. */
341c100f 1964 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
ca6c03ca 1965
6de9cd9a 1966 if (e->insns.r == NULL_RTX)
ca6c03ca
JH
1967 start_sequence ();
1968 else
6de9cd9a 1969 push_to_sequence (e->insns.r);
ca6c03ca
JH
1970
1971 emit_insn (pattern);
1972
6de9cd9a 1973 e->insns.r = get_insns ();
ca6c03ca
JH
1974 end_sequence ();
1975}
1976
1977/* Update the CFG for the instructions queued on edge E. */
1978
4e3825db 1979void
2ac66157 1980commit_one_edge_insertion (edge e)
ca6c03ca 1981{
8879d71c 1982 rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
898c90c0 1983 basic_block bb;
ca6c03ca
JH
1984
1985 /* Pull the insns off the edge now since the edge might go away. */
3ffa95c2
DM
1986 insns = e->insns.r;
1987 e->insns.r = NULL;
ca6c03ca 1988
898c90c0
EB
1989 /* Figure out where to put these insns. If the destination has
1990 one predecessor, insert there. Except for the exit block. */
fefa31b5 1991 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
ca6c03ca 1992 {
898c90c0
EB
1993 bb = e->dest;
1994
1995 /* Get the location correct wrt a code label, and "nice" wrt
1996 a basic block note, and before everything else. */
1997 tmp = BB_HEAD (bb);
1998 if (LABEL_P (tmp))
1999 tmp = NEXT_INSN (tmp);
2000 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2001 tmp = NEXT_INSN (tmp);
2002 if (tmp == BB_HEAD (bb))
2003 before = tmp;
2004 else if (tmp)
2005 after = PREV_INSN (tmp);
2006 else
2007 after = get_last_insn ();
2008 }
3dec4024 2009
898c90c0 2010 /* If the source has one successor and the edge is not abnormal,
a1d50386
JJ
2011 insert there. Except for the entry block.
2012 Don't do this if the predecessor ends in a jump other than
2013 unconditional simple jump. E.g. for asm goto that points all
2014 its labels at the fallthru basic block, we can't insert instructions
2015 before the asm goto, as the asm goto can have various of side effects,
2016 and can't emit instructions after the asm goto, as it must end
2017 the basic block. */
898c90c0
EB
2018 else if ((e->flags & EDGE_ABNORMAL) == 0
2019 && single_succ_p (e->src)
fefa31b5 2020 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
a1d50386
JJ
2021 && (!JUMP_P (BB_END (e->src))
2022 || simplejump_p (BB_END (e->src))))
898c90c0
EB
2023 {
2024 bb = e->src;
3dec4024 2025
898c90c0
EB
2026 /* It is possible to have a non-simple jump here. Consider a target
2027 where some forms of unconditional jumps clobber a register. This
2028 happens on the fr30 for example.
ca6c03ca 2029
898c90c0
EB
2030 We know this block has a single successor, so we can just emit
2031 the queued insns before the jump. */
2032 if (JUMP_P (BB_END (bb)))
2033 before = BB_END (bb);
3dec4024
JH
2034 else
2035 {
898c90c0
EB
2036 /* We'd better be fallthru, or we've lost track of what's what. */
2037 gcc_assert (e->flags & EDGE_FALLTHRU);
750054a2 2038
898c90c0 2039 after = BB_END (bb);
ca6c03ca
JH
2040 }
2041 }
2042
898c90c0
EB
2043 /* Otherwise we must split the edge. */
2044 else
2045 {
2046 bb = split_edge (e);
3371a64f
TJ
2047
2048 /* If E crossed a partition boundary, we needed to make bb end in
2049 a region-crossing jump, even though it was originally fallthru. */
2050 if (JUMP_P (BB_END (bb)))
2051 before = BB_END (bb);
2052 else
2053 after = BB_END (bb);
898c90c0
EB
2054 }
2055
2056 /* Now that we've found the spot, do the insertion. */
ca6c03ca
JH
2057 if (before)
2058 {
6fb5fa3c 2059 emit_insn_before_noloc (insns, before, bb);
ca6c03ca
JH
2060 last = prev_nonnote_insn (before);
2061 }
2062 else
6fb5fa3c 2063 last = emit_insn_after_noloc (insns, after, bb);
ca6c03ca
JH
2064
2065 if (returnjump_p (last))
2066 {
2067 /* ??? Remove all outgoing edges from BB and add one for EXIT.
c22cacf3
MS
2068 This is not currently a problem because this only happens
2069 for the (single) epilogue, which already has a fallthru edge
2070 to EXIT. */
ca6c03ca 2071
c5cbcccf 2072 e = single_succ_edge (bb);
fefa31b5 2073 gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
c5cbcccf 2074 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
ca6c03ca 2075
5f0d2358 2076 e->flags &= ~EDGE_FALLTHRU;
ca6c03ca 2077 emit_barrier_after (last);
0b17ab2f 2078
ca6c03ca
JH
2079 if (before)
2080 delete_insn (before);
2081 }
341c100f
NS
2082 else
2083 gcc_assert (!JUMP_P (last));
ca6c03ca
JH
2084}
2085
2086/* Update the CFG for all queued instructions. */
2087
2088void
d329e058 2089commit_edge_insertions (void)
ca6c03ca 2090{
ca6c03ca
JH
2091 basic_block bb;
2092
600b5b1d
TJ
2093 /* Optimization passes that invoke this routine can cause hot blocks
2094 previously reached by both hot and cold blocks to become dominated only
2095 by cold blocks. This will cause the verification below to fail,
2096 and lead to now cold code in the hot section. In some cases this
2097 may only be visible after newly unreachable blocks are deleted,
2098 which will be done by fixup_partitions. */
2099 fixup_partitions ();
2100
ca6c03ca
JH
2101#ifdef ENABLE_CHECKING
2102 verify_flow_info ();
2103#endif
2104
fefa31b5
DM
2105 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2106 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
ca6c03ca 2107 {
628f6a4e
BE
2108 edge e;
2109 edge_iterator ei;
ca6c03ca 2110
628f6a4e
BE
2111 FOR_EACH_EDGE (e, ei, bb->succs)
2112 if (e->insns.r)
cf103ca4 2113 commit_one_edge_insertion (e);
3dec4024 2114 }
ca6c03ca
JH
2115}
2116\f
6fb5fa3c 2117
f470c378 2118/* Print out RTL-specific basic block information (live information
a315c44c
SB
2119 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2120 documented in dumpfile.h. */
ca6c03ca 2121
10e9fecc 2122static void
a315c44c 2123rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
ca6c03ca 2124{
8879d71c
DM
2125 rtx_insn *insn;
2126 rtx_insn *last;
f470c378 2127 char *s_indent;
ca6c03ca 2128
ae50c0cb 2129 s_indent = (char *) alloca ((size_t) indent + 1);
400e39e3 2130 memset (s_indent, ' ', (size_t) indent);
f470c378 2131 s_indent[indent] = '\0';
b8698a0f 2132
a315c44c 2133 if (df && (flags & TDF_DETAILS))
6fb5fa3c
DB
2134 {
2135 df_dump_top (bb, outf);
2136 putc ('\n', outf);
2137 }
ca6c03ca 2138
68cc3174
EB
2139 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2140 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
2141 insn = NEXT_INSN (insn))
a315c44c 2142 {
7b19209f
SB
2143 if (flags & TDF_DETAILS)
2144 df_dump_insn_top (insn, outf);
a315c44c
SB
2145 if (! (flags & TDF_SLIM))
2146 print_rtl_single (outf, insn);
2147 else
2148 dump_insn_slim (outf, insn);
7b19209f
SB
2149 if (flags & TDF_DETAILS)
2150 df_dump_insn_bottom (insn, outf);
a315c44c
SB
2151 }
2152
2153 if (df && (flags & TDF_DETAILS))
6fb5fa3c
DB
2154 {
2155 df_dump_bottom (bb, outf);
2156 putc ('\n', outf);
2157 }
2158
ca6c03ca
JH
2159}
2160\f
c4669594
SB
2161/* Like dump_function_to_file, but for RTL. Print out dataflow information
2162 for the start of each basic block. FLAGS are the TDF_* masks documented
2163 in dumpfile.h. */
ca6c03ca
JH
2164
2165void
b2908ba6 2166print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, int flags)
ca6c03ca 2167{
b2908ba6 2168 const rtx_insn *tmp_rtx;
ca6c03ca
JH
2169 if (rtx_first == 0)
2170 fprintf (outf, "(nil)\n");
2171 else
2172 {
ca6c03ca
JH
2173 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2174 int max_uid = get_max_uid ();
5ed6ace5
MD
2175 basic_block *start = XCNEWVEC (basic_block, max_uid);
2176 basic_block *end = XCNEWVEC (basic_block, max_uid);
2177 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
e0082a72
ZD
2178 basic_block bb;
2179
c4669594
SB
2180 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2181 insns, but the CFG is not maintained so the basic block info
2182 is not reliable. Therefore it's omitted from the dumps. */
2183 if (! (cfun->curr_properties & PROP_cfg))
2184 flags &= ~TDF_BLOCKS;
2185
6fb5fa3c
DB
2186 if (df)
2187 df_dump_start (outf);
2188
c4669594 2189 if (flags & TDF_BLOCKS)
ca6c03ca 2190 {
4f42035e 2191 FOR_EACH_BB_REVERSE_FN (bb, cfun)
ca6c03ca 2192 {
8879d71c 2193 rtx_insn *x;
c4669594
SB
2194
2195 start[INSN_UID (BB_HEAD (bb))] = bb;
2196 end[INSN_UID (BB_END (bb))] = bb;
2197 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2198 {
2199 enum bb_state state = IN_MULTIPLE_BB;
5f0d2358 2200
c4669594
SB
2201 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2202 state = IN_ONE_BB;
2203 in_bb_p[INSN_UID (x)] = state;
ca6c03ca 2204
c4669594
SB
2205 if (x == BB_END (bb))
2206 break;
2207 }
ca6c03ca
JH
2208 }
2209 }
2210
2211 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
2212 {
c4669594
SB
2213 if (flags & TDF_BLOCKS)
2214 {
2215 bb = start[INSN_UID (tmp_rtx)];
2216 if (bb != NULL)
2217 {
2218 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
2219 if (df && (flags & TDF_DETAILS))
2220 df_dump_top (bb, outf);
2221 }
ca6c03ca 2222
c4669594
SB
2223 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2224 && !NOTE_P (tmp_rtx)
2225 && !BARRIER_P (tmp_rtx))
2226 fprintf (outf, ";; Insn is not within a basic block\n");
2227 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2228 fprintf (outf, ";; Insn is in multiple basic blocks\n");
2229 }
ca6c03ca 2230
7b19209f
SB
2231 if (flags & TDF_DETAILS)
2232 df_dump_insn_top (tmp_rtx, outf);
a315c44c
SB
2233 if (! (flags & TDF_SLIM))
2234 print_rtl_single (outf, tmp_rtx);
2235 else
2236 dump_insn_slim (outf, tmp_rtx);
7b19209f
SB
2237 if (flags & TDF_DETAILS)
2238 df_dump_insn_bottom (tmp_rtx, outf);
ca6c03ca 2239
c4669594
SB
2240 if (flags & TDF_BLOCKS)
2241 {
2242 bb = end[INSN_UID (tmp_rtx)];
2243 if (bb != NULL)
2244 {
2245 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
2246 if (df && (flags & TDF_DETAILS))
2247 df_dump_bottom (bb, outf);
1b03a96d 2248 putc ('\n', outf);
c4669594
SB
2249 }
2250 }
ca6c03ca
JH
2251 }
2252
2253 free (start);
2254 free (end);
2255 free (in_bb_p);
2256 }
ca6c03ca
JH
2257}
2258\f
532aafad
SB
2259/* Update the branch probability of BB if a REG_BR_PROB is present. */
2260
b446e5a2 2261void
d329e058 2262update_br_prob_note (basic_block bb)
b446e5a2
JH
2263{
2264 rtx note;
4b4bf941 2265 if (!JUMP_P (BB_END (bb)))
b446e5a2 2266 return;
a813c111 2267 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
e5af9ddd 2268 if (!note || XINT (note, 0) == BRANCH_EDGE (bb)->probability)
b446e5a2 2269 return;
e5af9ddd 2270 XINT (note, 0) = BRANCH_EDGE (bb)->probability;
b446e5a2 2271}
96370780
MK
2272
2273/* Get the last insn associated with block BB (that includes barriers and
2274 tablejumps after BB). */
636eb204 2275rtx_insn *
96370780
MK
2276get_last_bb_insn (basic_block bb)
2277{
8942ee0f 2278 rtx_jump_table_data *table;
636eb204
DM
2279 rtx_insn *tmp;
2280 rtx_insn *end = BB_END (bb);
96370780
MK
2281
2282 /* Include any jump table following the basic block. */
8942ee0f
DM
2283 if (tablejump_p (end, NULL, &table))
2284 end = table;
96370780
MK
2285
2286 /* Include any barriers that may follow the basic block. */
1e211590 2287 tmp = next_nonnote_insn_bb (end);
96370780
MK
2288 while (tmp && BARRIER_P (tmp))
2289 {
2290 end = tmp;
1e211590 2291 tmp = next_nonnote_insn_bb (end);
96370780
MK
2292 }
2293
2294 return end;
2295}
af205f67 2296
600b5b1d
TJ
2297/* Sanity check partition hotness to ensure that basic blocks in
2298   the cold partition don't dominate basic blocks in the hot partition.
2299 If FLAG_ONLY is true, report violations as errors. Otherwise
2300 re-mark the dominated blocks as cold, since this is run after
2301 cfg optimizations that may make hot blocks previously reached
2302 by both hot and cold blocks now only reachable along cold paths. */
2303
2304static vec<basic_block>
2305find_partition_fixes (bool flag_only)
2306{
2307 basic_block bb;
2308 vec<basic_block> bbs_in_cold_partition = vNULL;
2309 vec<basic_block> bbs_to_fix = vNULL;
2310
2311 /* Callers check this. */
2312 gcc_checking_assert (crtl->has_bb_partition);
2313
11cd3bed 2314 FOR_EACH_BB_FN (bb, cfun)
600b5b1d
TJ
2315 if ((BB_PARTITION (bb) == BB_COLD_PARTITION))
2316 bbs_in_cold_partition.safe_push (bb);
2317
2318 if (bbs_in_cold_partition.is_empty ())
2319 return vNULL;
2320
2321 bool dom_calculated_here = !dom_info_available_p (CDI_DOMINATORS);
2322
2323 if (dom_calculated_here)
2324 calculate_dominance_info (CDI_DOMINATORS);
2325
2326 while (! bbs_in_cold_partition.is_empty ())
2327 {
2328 bb = bbs_in_cold_partition.pop ();
2329 /* Any blocks dominated by a block in the cold section
2330 must also be cold. */
2331 basic_block son;
2332 for (son = first_dom_son (CDI_DOMINATORS, bb);
2333 son;
2334 son = next_dom_son (CDI_DOMINATORS, son))
2335 {
2336 /* If son is not yet cold, then mark it cold here and
2337 enqueue it for further processing. */
2338 if ((BB_PARTITION (son) != BB_COLD_PARTITION))
2339 {
2340 if (flag_only)
2341 error ("non-cold basic block %d dominated "
2342 "by a block in the cold partition (%d)", son->index, bb->index);
2343 else
2344 BB_SET_PARTITION (son, BB_COLD_PARTITION);
2345 bbs_to_fix.safe_push (son);
2346 bbs_in_cold_partition.safe_push (son);
2347 }
2348 }
2349 }
2350
2351 if (dom_calculated_here)
2352 free_dominance_info (CDI_DOMINATORS);
2353
2354 return bbs_to_fix;
2355}
2356
2357/* Perform cleanup on the hot/cold bb partitioning after optimization
2358 passes that modify the cfg. */
2359
2360void
2361fixup_partitions (void)
2362{
2363 basic_block bb;
2364
2365 if (!crtl->has_bb_partition)
2366 return;
2367
2368 /* Delete any blocks that became unreachable and weren't
2369 already cleaned up, for example during edge forwarding
2370 and convert_jumps_to_returns. This will expose more
2371 opportunities for fixing the partition boundaries here.
2372 Also, the calculation of the dominance graph during verification
2373 will assert if there are unreachable nodes. */
2374 delete_unreachable_blocks ();
2375
2376 /* If there are partitions, do a sanity check on them: A basic block in
2377   a cold partition cannot dominate a basic block in a hot partition.
2378 Fixup any that now violate this requirement, as a result of edge
2379 forwarding and unreachable block deletion.  */
2380 vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2381
2382 /* Do the partition fixup after all necessary blocks have been converted to
2383 cold, so that we only update the region crossings the minimum number of
2384 places, which can require forcing edges to be non fallthru. */
2385 while (! bbs_to_fix.is_empty ())
2386 {
2387 bb = bbs_to_fix.pop ();
2388 fixup_new_cold_bb (bb);
2389 }
2390}
2391
af205f67
TJ
2392/* Verify, in the basic block chain, that there is at most one switch
2393 between hot/cold partitions. This condition will not be true until
2394 after reorder_basic_blocks is called. */
2395
251a41b9 2396static int
af205f67
TJ
2397verify_hot_cold_block_grouping (void)
2398{
2399 basic_block bb;
2400 int err = 0;
2401 bool switched_sections = false;
2402 int current_partition = BB_UNPARTITIONED;
2403
3371a64f
TJ
2404 /* Even after bb reordering is complete, we go into cfglayout mode
2405 again (in compgoto). Ensure we don't call this before going back
2406 into linearized RTL when any layout fixes would have been committed. */
2407 if (!crtl->bb_reorder_complete
c3284718 2408 || current_ir_type () != IR_RTL_CFGRTL)
251a41b9 2409 return err;
af205f67 2410
11cd3bed 2411 FOR_EACH_BB_FN (bb, cfun)
af205f67
TJ
2412 {
2413 if (current_partition != BB_UNPARTITIONED
2414 && BB_PARTITION (bb) != current_partition)
2415 {
2416 if (switched_sections)
2417 {
2418 error ("multiple hot/cold transitions found (bb %i)",
2419 bb->index);
2420 err = 1;
2421 }
2422 else
2423 switched_sections = true;
2424
2425 if (!crtl->has_bb_partition)
2426 error ("partition found but function partition flag not set");
2427 }
2428 current_partition = BB_PARTITION (bb);
2429 }
2430
251a41b9 2431 return err;
af205f67 2432}
b446e5a2 2433\f
ca6c03ca 2434
251a41b9
TJ
2435/* Perform several checks on the edges out of each block, such as
2436 the consistency of the branch probabilities, the correctness
2437 of hot/cold partition crossing edges, and the number of expected
600b5b1d
TJ
2438 successor edges. Also verify that the dominance relationship
2439 between hot/cold blocks is sane. */
f470c378 2440
10e9fecc 2441static int
251a41b9 2442rtl_verify_edges (void)
ca6c03ca 2443{
10e9fecc 2444 int err = 0;
94eb5ddb 2445 basic_block bb;
ca6c03ca 2446
4f42035e 2447 FOR_EACH_BB_REVERSE_FN (bb, cfun)
ca6c03ca 2448 {
f99ffaa3
EB
2449 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2450 int n_eh = 0, n_abnormal = 0;
3cf54412 2451 edge e, fallthru = NULL;
628f6a4e 2452 edge_iterator ei;
251a41b9 2453 rtx note;
3371a64f 2454 bool has_crossing_edge = false;
ca6c03ca 2455
2085a21f 2456 if (JUMP_P (BB_END (bb))
a813c111 2457 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
628f6a4e 2458 && EDGE_COUNT (bb->succs) >= 2
a813c111 2459 && any_condjump_p (BB_END (bb)))
5a1a3e5e 2460 {
e5af9ddd 2461 if (XINT (note, 0) != BRANCH_EDGE (bb)->probability
0a6a6ac9 2462 && profile_status_for_fn (cfun) != PROFILE_ABSENT)
5a1a3e5e 2463 {
e5af9ddd
RS
2464 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2465 XINT (note, 0), BRANCH_EDGE (bb)->probability);
5a1a3e5e
JH
2466 err = 1;
2467 }
2468 }
251a41b9 2469
628f6a4e 2470 FOR_EACH_EDGE (e, ei, bb->succs)
ca6c03ca 2471 {
0be7e7a6
RH
2472 bool is_crossing;
2473
ca6c03ca 2474 if (e->flags & EDGE_FALLTHRU)
0be7e7a6
RH
2475 n_fallthru++, fallthru = e;
2476
2477 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
fefa31b5
DM
2478 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2479 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
3371a64f 2480 has_crossing_edge |= is_crossing;
0be7e7a6 2481 if (e->flags & EDGE_CROSSING)
750054a2 2482 {
0be7e7a6
RH
2483 if (!is_crossing)
2484 {
2485 error ("EDGE_CROSSING incorrectly set across same section");
2486 err = 1;
2487 }
2488 if (e->flags & EDGE_FALLTHRU)
2489 {
f99ffaa3 2490 error ("fallthru edge crosses section boundary in bb %i",
750054a2
CT
2491 e->src->index);
2492 err = 1;
2493 }
0be7e7a6
RH
2494 if (e->flags & EDGE_EH)
2495 {
f99ffaa3 2496 error ("EH edge crosses section boundary in bb %i",
0be7e7a6
RH
2497 e->src->index);
2498 err = 1;
2499 }
339ba33b 2500 if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
3371a64f
TJ
2501 {
2502 error ("No region crossing jump at section boundary in bb %i",
2503 bb->index);
2504 err = 1;
2505 }
0be7e7a6
RH
2506 }
2507 else if (is_crossing)
2508 {
2509 error ("EDGE_CROSSING missing across section boundary");
2510 err = 1;
750054a2 2511 }
3dec4024 2512
65f43cdf
ZD
2513 if ((e->flags & ~(EDGE_DFS_BACK
2514 | EDGE_CAN_FALLTHRU
2515 | EDGE_IRREDUCIBLE_LOOP
9beb1c84 2516 | EDGE_LOOP_EXIT
7e872b90
JJ
2517 | EDGE_CROSSING
2518 | EDGE_PRESERVE)) == 0)
3dec4024
JH
2519 n_branch++;
2520
2521 if (e->flags & EDGE_ABNORMAL_CALL)
f99ffaa3
EB
2522 n_abnormal_call++;
2523
2524 if (e->flags & EDGE_SIBCALL)
2525 n_sibcall++;
3dec4024
JH
2526
2527 if (e->flags & EDGE_EH)
2528 n_eh++;
f99ffaa3
EB
2529
2530 if (e->flags & EDGE_ABNORMAL)
3dec4024 2531 n_abnormal++;
ca6c03ca 2532 }
5f0d2358 2533
3371a64f 2534 if (!has_crossing_edge
339ba33b
RS
2535 && JUMP_P (BB_END (bb))
2536 && CROSSING_JUMP_P (BB_END (bb)))
3371a64f
TJ
2537 {
2538 print_rtl_with_bb (stderr, get_insns (), TDF_RTL | TDF_BLOCKS | TDF_DETAILS);
2539 error ("Region crossing jump across same section in bb %i",
2540 bb->index);
2541 err = 1;
2542 }
2543
1d65f45c 2544 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
3dec4024 2545 {
f99ffaa3 2546 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
3dec4024
JH
2547 err = 1;
2548 }
1d65f45c
RH
2549 if (n_eh > 1)
2550 {
f99ffaa3 2551 error ("too many exception handling edges in bb %i", bb->index);
1d65f45c
RH
2552 err = 1;
2553 }
3dec4024 2554 if (n_branch
4b4bf941 2555 && (!JUMP_P (BB_END (bb))
a813c111
SB
2556 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2557 || any_condjump_p (BB_END (bb))))))
3dec4024 2558 {
ab532386 2559 error ("too many outgoing branch edges from bb %i", bb->index);
3dec4024
JH
2560 err = 1;
2561 }
a813c111 2562 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
3dec4024 2563 {
f99ffaa3 2564 error ("fallthru edge after unconditional jump in bb %i", bb->index);
3dec4024
JH
2565 err = 1;
2566 }
a813c111 2567 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
3dec4024 2568 {
f99ffaa3
EB
2569 error ("wrong number of branch edges after unconditional jump"
2570 " in bb %i", bb->index);
3dec4024
JH
2571 err = 1;
2572 }
a813c111 2573 if (n_branch != 1 && any_condjump_p (BB_END (bb))
c11fd0b2 2574 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
3dec4024 2575 {
f99ffaa3
EB
2576 error ("wrong amount of branch edges after conditional jump"
2577 " in bb %i", bb->index);
2578 err = 1;
2579 }
2580 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2581 {
2582 error ("abnormal call edges for non-call insn in bb %i", bb->index);
3dec4024
JH
2583 err = 1;
2584 }
f99ffaa3 2585 if (n_sibcall && !CALL_P (BB_END (bb)))
3dec4024 2586 {
f99ffaa3 2587 error ("sibcall edges for non-call insn in bb %i", bb->index);
3dec4024
JH
2588 err = 1;
2589 }
f99ffaa3
EB
2590 if (n_abnormal > n_eh
2591 && !(CALL_P (BB_END (bb))
2592 && n_abnormal == n_abnormal_call + n_sibcall)
4b4bf941 2593 && (!JUMP_P (BB_END (bb))
a813c111
SB
2594 || any_condjump_p (BB_END (bb))
2595 || any_uncondjump_p (BB_END (bb))))
3dec4024 2596 {
ab532386 2597 error ("abnormal edges for no purpose in bb %i", bb->index);
3dec4024
JH
2598 err = 1;
2599 }
251a41b9 2600 }
f87c27b4 2601
600b5b1d
TJ
2602 /* If there are partitions, do a sanity check on them: A basic block in
2603   a cold partition cannot dominate a basic block in a hot partition.  */
2604 if (crtl->has_bb_partition && !err)
2605 {
2606 vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2607 err = !bbs_to_fix.is_empty ();
2608 }
2609
251a41b9
TJ
2610 /* Clean up. */
2611 return err;
2612}
5f0d2358 2613
251a41b9
TJ
2614/* Checks on the instructions within blocks. Currently checks that each
2615 block starts with a basic block note, and that basic block notes and
2616 control flow jumps are not found in the middle of the block. */
ca6c03ca 2617
251a41b9
TJ
2618static int
2619rtl_verify_bb_insns (void)
2620{
8879d71c 2621 rtx_insn *x;
251a41b9
TJ
2622 int err = 0;
2623 basic_block bb;
2624
4f42035e 2625 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9
TJ
2626 {
2627 /* Now check the header of basic
c22cacf3 2628 block. It ought to contain optional CODE_LABEL followed
ca6c03ca 2629 by NOTE_BASIC_BLOCK. */
a813c111 2630 x = BB_HEAD (bb);
4b4bf941 2631 if (LABEL_P (x))
ca6c03ca 2632 {
a813c111 2633 if (BB_END (bb) == x)
ca6c03ca
JH
2634 {
2635 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 2636 bb->index);
ca6c03ca
JH
2637 err = 1;
2638 }
5f0d2358 2639
ca6c03ca
JH
2640 x = NEXT_INSN (x);
2641 }
5f0d2358 2642
ca6c03ca
JH
2643 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2644 {
2645 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 2646 bb->index);
ca6c03ca
JH
2647 err = 1;
2648 }
2649
a813c111 2650 if (BB_END (bb) == x)
49243025 2651 /* Do checks for empty blocks here. */
5f0d2358 2652 ;
ca6c03ca 2653 else
5f0d2358
RK
2654 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2655 {
2656 if (NOTE_INSN_BASIC_BLOCK_P (x))
2657 {
2658 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
0b17ab2f 2659 INSN_UID (x), bb->index);
5f0d2358
RK
2660 err = 1;
2661 }
2662
a813c111 2663 if (x == BB_END (bb))
5f0d2358 2664 break;
ca6c03ca 2665
83fd323c 2666 if (control_flow_insn_p (x))
5f0d2358 2667 {
0b17ab2f 2668 error ("in basic block %d:", bb->index);
5f0d2358
RK
2669 fatal_insn ("flow control insn inside a basic block", x);
2670 }
2671 }
ca6c03ca
JH
2672 }
2673
251a41b9
TJ
2674 /* Clean up. */
2675 return err;
2676}
2677
2678/* Verify that block pointers for instructions in basic blocks, headers and
2679 footers are set appropriately. */
2680
2681static int
2682rtl_verify_bb_pointers (void)
2683{
2684 int err = 0;
2685 basic_block bb;
2686
2687 /* Check the general integrity of the basic blocks. */
4f42035e 2688 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9 2689 {
8879d71c 2690 rtx_insn *insn;
251a41b9
TJ
2691
2692 if (!(bb->flags & BB_RTL))
2693 {
2694 error ("BB_RTL flag not set for block %d", bb->index);
2695 err = 1;
2696 }
2697
2698 FOR_BB_INSNS (bb, insn)
2699 if (BLOCK_FOR_INSN (insn) != bb)
2700 {
2701 error ("insn %d basic block pointer is %d, should be %d",
2702 INSN_UID (insn),
2703 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2704 bb->index);
2705 err = 1;
2706 }
2707
2708 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2709 if (!BARRIER_P (insn)
2710 && BLOCK_FOR_INSN (insn) != NULL)
2711 {
2712 error ("insn %d in header of bb %d has non-NULL basic block",
2713 INSN_UID (insn), bb->index);
2714 err = 1;
2715 }
2716 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2717 if (!BARRIER_P (insn)
2718 && BLOCK_FOR_INSN (insn) != NULL)
2719 {
2720 error ("insn %d in footer of bb %d has non-NULL basic block",
2721 INSN_UID (insn), bb->index);
2722 err = 1;
2723 }
2724 }
af205f67 2725
10e9fecc 2726 /* Clean up. */
10e9fecc
JH
2727 return err;
2728}
5f0d2358 2729
10e9fecc
JH
2730/* Verify the CFG and RTL consistency common for both underlying RTL and
2731 cfglayout RTL.
5f0d2358 2732
10e9fecc 2733 Currently it does following checks:
251a41b9
TJ
2734
2735 - overlapping of basic blocks
2736 - insns with wrong BLOCK_FOR_INSN pointers
2737 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2738 - tails of basic blocks (ensure that boundary is necessary)
2739 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2740 and NOTE_INSN_BASIC_BLOCK
2741 - verify that no fall_thru edge crosses hot/cold partition boundaries
2742 - verify that there are no pending RTL branch predictions
600b5b1d 2743 - verify that hot blocks are not dominated by cold blocks
251a41b9
TJ
2744
2745 In future it can be extended check a lot of other stuff as well
2746 (reachability of basic blocks, life information, etc. etc.). */
9eab6785 2747
10e9fecc 2748static int
251a41b9
TJ
2749rtl_verify_flow_info_1 (void)
2750{
2751 int err = 0;
2752
2753 err |= rtl_verify_bb_pointers ();
2754
2755 err |= rtl_verify_bb_insns ();
2756
2757 err |= rtl_verify_edges ();
2758
251a41b9
TJ
2759 return err;
2760}
2761
2762/* Walk the instruction chain and verify that bb head/end pointers
2763 are correct, and that instructions are in exactly one bb and have
2764 correct block pointers. */
2765
2766static int
2767rtl_verify_bb_insn_chain (void)
10e9fecc
JH
2768{
2769 basic_block bb;
251a41b9 2770 int err = 0;
8879d71c
DM
2771 rtx_insn *x;
2772 rtx_insn *last_head = get_last_insn ();
9eab6785 2773 basic_block *bb_info;
9eab6785
SB
2774 const int max_uid = get_max_uid ();
2775
2776 bb_info = XCNEWVEC (basic_block, max_uid);
ca6c03ca 2777
4f42035e 2778 FOR_EACH_BB_REVERSE_FN (bb, cfun)
10e9fecc 2779 {
8879d71c
DM
2780 rtx_insn *head = BB_HEAD (bb);
2781 rtx_insn *end = BB_END (bb);
628f6a4e 2782
9eab6785 2783 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
a7b87f73
ZD
2784 {
2785 /* Verify the end of the basic block is in the INSN chain. */
2786 if (x == end)
2787 break;
2788
251a41b9
TJ
2789 /* And that the code outside of basic blocks has NULL bb field. */
2790 if (!BARRIER_P (x)
2791 && BLOCK_FOR_INSN (x) != NULL)
2792 {
2793 error ("insn %d outside of basic blocks has non-NULL bb field",
2794 INSN_UID (x));
2795 err = 1;
2796 }
a7b87f73 2797 }
9eab6785
SB
2798
2799 if (!x)
2800 {
2801 error ("end insn %d for block %d not found in the insn stream",
2802 INSN_UID (end), bb->index);
2803 err = 1;
2804 }
2805
2806 /* Work backwards from the end to the head of the basic block
2807 to verify the head is in the RTL chain. */
2808 for (; x != NULL_RTX; x = PREV_INSN (x))
a00d11f0 2809 {
9eab6785
SB
2810 /* While walking over the insn chain, verify insns appear
2811 in only one basic block. */
2812 if (bb_info[INSN_UID (x)] != NULL)
2813 {
2814 error ("insn %d is in multiple basic blocks (%d and %d)",
2815 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2816 err = 1;
2817 }
2818
2819 bb_info[INSN_UID (x)] = bb;
2820
2821 if (x == head)
2822 break;
2823 }
2824 if (!x)
2825 {
2826 error ("head insn %d for block %d not found in the insn stream",
2827 INSN_UID (head), bb->index);
a00d11f0
JH
2828 err = 1;
2829 }
2830
a7b87f73 2831 last_head = PREV_INSN (x);
251a41b9
TJ
2832 }
2833
2834 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2835 {
2836 /* Check that the code before the first basic block has NULL
2837 bb field. */
2838 if (!BARRIER_P (x)
2839 && BLOCK_FOR_INSN (x) != NULL)
2840 {
2841 error ("insn %d outside of basic blocks has non-NULL bb field",
2842 INSN_UID (x));
2843 err = 1;
2844 }
2845 }
2846 free (bb_info);
2847
2848 return err;
2849}
2850
2851/* Verify that fallthru edges point to adjacent blocks in layout order and
2852 that barriers exist after non-fallthru blocks. */
2853
2854static int
2855rtl_verify_fallthru (void)
2856{
2857 basic_block bb;
2858 int err = 0;
2859
4f42035e 2860 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9
TJ
2861 {
2862 edge e;
9eab6785 2863
0fd4b31d 2864 e = find_fallthru_edge (bb->succs);
10e9fecc
JH
2865 if (!e)
2866 {
8879d71c 2867 rtx_insn *insn;
10e9fecc
JH
2868
2869 /* Ensure existence of barrier in BB with no fallthru edges. */
468059bc
DD
2870 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2871 {
2872 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
10e9fecc
JH
2873 {
2874 error ("missing barrier after block %i", bb->index);
2875 err = 1;
2876 break;
2877 }
468059bc
DD
2878 if (BARRIER_P (insn))
2879 break;
2880 }
10e9fecc 2881 }
fefa31b5
DM
2882 else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2883 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
c22cacf3 2884 {
8879d71c 2885 rtx_insn *insn;
10e9fecc
JH
2886
2887 if (e->src->next_bb != e->dest)
2888 {
2889 error
2890 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2891 e->src->index, e->dest->index);
2892 err = 1;
2893 }
2894 else
a813c111 2895 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
10e9fecc 2896 insn = NEXT_INSN (insn))
6be85b25 2897 if (BARRIER_P (insn) || INSN_P (insn))
10e9fecc
JH
2898 {
2899 error ("verify_flow_info: Incorrect fallthru %i->%i",
2900 e->src->index, e->dest->index);
2901 fatal_insn ("wrong insn in the fallthru edge", insn);
2902 err = 1;
2903 }
c22cacf3 2904 }
10e9fecc 2905 }
ca6c03ca 2906
251a41b9
TJ
2907 return err;
2908}
2909
2910/* Verify that blocks are laid out in consecutive order. While walking the
2911 instructions, verify that all expected instructions are inside the basic
2912 blocks, and that all returns are followed by barriers. */
2913
2914static int
2915rtl_verify_bb_layout (void)
2916{
2917 basic_block bb;
2918 int err = 0;
8879d71c 2919 rtx_insn *x;
251a41b9 2920 int num_bb_notes;
8879d71c 2921 rtx_insn * const rtx_first = get_insns ();
fefa31b5 2922 basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
9eab6785 2923
ca6c03ca 2924 num_bb_notes = 0;
fefa31b5 2925 last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
e0082a72 2926
5f0d2358 2927 for (x = rtx_first; x; x = NEXT_INSN (x))
ca6c03ca
JH
2928 {
2929 if (NOTE_INSN_BASIC_BLOCK_P (x))
2930 {
bf77398c 2931 bb = NOTE_BASIC_BLOCK (x);
5f0d2358 2932
ca6c03ca 2933 num_bb_notes++;
e0082a72 2934 if (bb != last_bb_seen->next_bb)
10e9fecc 2935 internal_error ("basic blocks not laid down consecutively");
ca6c03ca 2936
10e9fecc 2937 curr_bb = last_bb_seen = bb;
ca6c03ca
JH
2938 }
2939
10e9fecc 2940 if (!curr_bb)
ca6c03ca
JH
2941 {
2942 switch (GET_CODE (x))
2943 {
2944 case BARRIER:
2945 case NOTE:
2946 break;
2947
2948 case CODE_LABEL:
39718607 2949 /* An ADDR_VEC is placed outside any basic block. */
ca6c03ca 2950 if (NEXT_INSN (x)
481683e1 2951 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
5f0d2358 2952 x = NEXT_INSN (x);
ca6c03ca
JH
2953
2954 /* But in any case, non-deletable labels can appear anywhere. */
2955 break;
2956
2957 default:
1f978f5f 2958 fatal_insn ("insn outside basic block", x);
ca6c03ca
JH
2959 }
2960 }
2961
26cae194 2962 if (JUMP_P (x)
ca6c03ca 2963 && returnjump_p (x) && ! condjump_p (x)
15eb3a2e 2964 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
1f978f5f 2965 fatal_insn ("return not followed by barrier", x);
251a41b9 2966
a813c111 2967 if (curr_bb && x == BB_END (curr_bb))
10e9fecc 2968 curr_bb = NULL;
ca6c03ca
JH
2969 }
2970
0cae8d31 2971 if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
ca6c03ca 2972 internal_error
0b17ab2f 2973 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
0cae8d31 2974 num_bb_notes, n_basic_blocks_for_fn (cfun));
ca6c03ca 2975
10e9fecc 2976 return err;
ca6c03ca 2977}
251a41b9
TJ
2978
2979/* Verify the CFG and RTL consistency common for both underlying RTL and
2980 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
2981
2982 Currently it does following checks:
2983 - all checks of rtl_verify_flow_info_1
2984 - test head/end pointers
2985 - check that blocks are laid out in consecutive order
2986 - check that all insns are in the basic blocks
2987 (except the switch handling code, barriers and notes)
2988 - check that all returns are followed by barriers
600b5b1d
TJ
2989 - check that all fallthru edge points to the adjacent blocks
2990 - verify that there is a single hot/cold partition boundary after bbro */
251a41b9
TJ
2991
2992static int
2993rtl_verify_flow_info (void)
2994{
2995 int err = 0;
2996
2997 err |= rtl_verify_flow_info_1 ();
2998
2999 err |= rtl_verify_bb_insn_chain ();
3000
3001 err |= rtl_verify_fallthru ();
3002
3003 err |= rtl_verify_bb_layout ();
3004
3371a64f
TJ
3005 err |= verify_hot_cold_block_grouping ();
3006
251a41b9
TJ
3007 return err;
3008}
ca6c03ca 3009\f
eaec9b3d 3010/* Assume that the preceding pass has possibly eliminated jump instructions
ca6c03ca
JH
3011 or converted the unconditional jumps. Eliminate the edges from CFG.
3012 Return true if any edges are eliminated. */
3013
3014bool
d329e058 3015purge_dead_edges (basic_block bb)
ca6c03ca 3016{
628f6a4e 3017 edge e;
8879d71c
DM
3018 rtx_insn *insn = BB_END (bb);
3019 rtx note;
ca6c03ca 3020 bool purged = false;
628f6a4e
BE
3021 bool found;
3022 edge_iterator ei;
ca6c03ca 3023
b5b8b0ac
AO
3024 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
3025 do
3026 insn = PREV_INSN (insn);
3027 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3028
70da1d03 3029 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
4b4bf941 3030 if (NONJUMP_INSN_P (insn)
70da1d03
JH
3031 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3032 {
3033 rtx eqnote;
3034
3035 if (! may_trap_p (PATTERN (insn))
3036 || ((eqnote = find_reg_equal_equiv_note (insn))
3037 && ! may_trap_p (XEXP (eqnote, 0))))
3038 remove_note (insn, note);
3039 }
3040
546c093e 3041 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
628f6a4e 3042 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
546c093e 3043 {
1d65f45c
RH
3044 bool remove = false;
3045
e5f9a909
JW
3046 /* There are three types of edges we need to handle correctly here: EH
3047 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3048 latter can appear when nonlocal gotos are used. */
1d65f45c 3049 if (e->flags & EDGE_ABNORMAL_CALL)
546c093e 3050 {
1d65f45c
RH
3051 if (!CALL_P (insn))
3052 remove = true;
3053 else if (can_nonlocal_goto (insn))
3054 ;
3055 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3056 ;
0a35513e
AH
3057 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3058 ;
1d65f45c
RH
3059 else
3060 remove = true;
546c093e 3061 }
1d65f45c
RH
3062 else if (e->flags & EDGE_EH)
3063 remove = !can_throw_internal (insn);
3064
3065 if (remove)
546c093e 3066 {
1d65f45c
RH
3067 remove_edge (e);
3068 df_set_bb_dirty (bb);
3069 purged = true;
546c093e
RH
3070 }
3071 else
1d65f45c 3072 ei_next (&ei);
546c093e 3073 }
5f0d2358 3074
4b4bf941 3075 if (JUMP_P (insn))
ca6c03ca
JH
3076 {
3077 rtx note;
3078 edge b,f;
628f6a4e 3079 edge_iterator ei;
5f0d2358 3080
ca6c03ca
JH
3081 /* We do care only about conditional jumps and simplejumps. */
3082 if (!any_condjump_p (insn)
3083 && !returnjump_p (insn)
3084 && !simplejump_p (insn))
c51d95ec 3085 return purged;
5f0d2358 3086
5a1a3e5e
JH
3087 /* Branch probability/prediction notes are defined only for
3088 condjumps. We've possibly turned condjump into simplejump. */
3089 if (simplejump_p (insn))
3090 {
3091 note = find_reg_note (insn, REG_BR_PROB, NULL);
3092 if (note)
3093 remove_note (insn, note);
3094 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3095 remove_note (insn, note);
3096 }
3097
628f6a4e 3098 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 3099 {
7fcd7218
JH
3100 /* Avoid abnormal flags to leak from computed jumps turned
3101 into simplejumps. */
f87c27b4 3102
0e1638d4 3103 e->flags &= ~EDGE_ABNORMAL;
7fcd7218 3104
5a566bed
MM
3105 /* See if this edge is one we should keep. */
3106 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3107 /* A conditional jump can fall through into the next
3108 block, so we should keep the edge. */
628f6a4e
BE
3109 {
3110 ei_next (&ei);
3111 continue;
3112 }
fefa31b5 3113 else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
a813c111 3114 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
5a566bed
MM
3115 /* If the destination block is the target of the jump,
3116 keep the edge. */
628f6a4e
BE
3117 {
3118 ei_next (&ei);
3119 continue;
3120 }
fefa31b5
DM
3121 else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3122 && returnjump_p (insn))
5a566bed
MM
3123 /* If the destination block is the exit block, and this
3124 instruction is a return, then keep the edge. */
628f6a4e
BE
3125 {
3126 ei_next (&ei);
3127 continue;
3128 }
5a566bed
MM
3129 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3130 /* Keep the edges that correspond to exceptions thrown by
0b75beaa
EB
3131 this instruction and rematerialize the EDGE_ABNORMAL
3132 flag we just cleared above. */
3133 {
3134 e->flags |= EDGE_ABNORMAL;
628f6a4e 3135 ei_next (&ei);
0b75beaa
EB
3136 continue;
3137 }
5f0d2358 3138
5a566bed 3139 /* We do not need this edge. */
6fb5fa3c 3140 df_set_bb_dirty (bb);
ca6c03ca
JH
3141 purged = true;
3142 remove_edge (e);
3143 }
5f0d2358 3144
628f6a4e 3145 if (EDGE_COUNT (bb->succs) == 0 || !purged)
c51d95ec 3146 return purged;
5f0d2358 3147
c263766c
RH
3148 if (dump_file)
3149 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
5f0d2358 3150
ca6c03ca
JH
3151 if (!optimize)
3152 return purged;
3153
3154 /* Redistribute probabilities. */
c5cbcccf 3155 if (single_succ_p (bb))
ca6c03ca 3156 {
c5cbcccf
ZD
3157 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3158 single_succ_edge (bb)->count = bb->count;
f87c27b4 3159 }
ca6c03ca
JH
3160 else
3161 {
3162 note = find_reg_note (insn, REG_BR_PROB, NULL);
3163 if (!note)
3164 return purged;
5f0d2358 3165
ca6c03ca
JH
3166 b = BRANCH_EDGE (bb);
3167 f = FALLTHRU_EDGE (bb);
e5af9ddd 3168 b->probability = XINT (note, 0);
ca6c03ca 3169 f->probability = REG_BR_PROB_BASE - b->probability;
8ddb5a29 3170 /* Update these to use GCOV_COMPUTE_SCALE. */
ca6c03ca
JH
3171 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
3172 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
3173 }
5f0d2358 3174
ca6c03ca
JH
3175 return purged;
3176 }
4b4bf941 3177 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
1722c2c8
RH
3178 {
3179 /* First, there should not be any EH or ABCALL edges resulting
3180 from non-local gotos and the like. If there were, we shouldn't
3181 have created the sibcall in the first place. Second, there
3182 should of course never have been a fallthru edge. */
c5cbcccf
ZD
3183 gcc_assert (single_succ_p (bb));
3184 gcc_assert (single_succ_edge (bb)->flags
3185 == (EDGE_SIBCALL | EDGE_ABNORMAL));
1722c2c8
RH
3186
3187 return 0;
3188 }
ca6c03ca 3189
ca6c03ca
JH
3190 /* If we don't see a jump insn, we don't know exactly why the block would
3191 have been broken at this point. Look for a simple, non-fallthru edge,
3192 as these are only created by conditional branches. If we find such an
3193 edge we know that there used to be a jump here and can then safely
3194 remove all non-fallthru edges. */
628f6a4e
BE
3195 found = false;
3196 FOR_EACH_EDGE (e, ei, bb->succs)
3197 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3198 {
3199 found = true;
3200 break;
3201 }
5f0d2358 3202
628f6a4e 3203 if (!found)
ca6c03ca 3204 return purged;
5f0d2358 3205
2afa8dce
DB
3206 /* Remove all but the fake and fallthru edges. The fake edge may be
3207 the only successor for this block in the case of noreturn
3208 calls. */
628f6a4e 3209 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 3210 {
2afa8dce 3211 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
c51d95ec 3212 {
6fb5fa3c 3213 df_set_bb_dirty (bb);
c51d95ec
JH
3214 remove_edge (e);
3215 purged = true;
3216 }
628f6a4e
BE
3217 else
3218 ei_next (&ei);
ca6c03ca 3219 }
5f0d2358 3220
c5cbcccf 3221 gcc_assert (single_succ_p (bb));
5f0d2358 3222
c5cbcccf
ZD
3223 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3224 single_succ_edge (bb)->count = bb->count;
ca6c03ca 3225
c263766c
RH
3226 if (dump_file)
3227 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
0b17ab2f 3228 bb->index);
ca6c03ca
JH
3229 return purged;
3230}
3231
5f0d2358
RK
3232/* Search all basic blocks for potentially dead edges and purge them. Return
3233 true if some edge has been eliminated. */
ca6c03ca
JH
3234
3235bool
25cd19de 3236purge_all_dead_edges (void)
ca6c03ca 3237{
e0082a72 3238 int purged = false;
e0082a72 3239 basic_block bb;
473fb060 3240
11cd3bed 3241 FOR_EACH_BB_FN (bb, cfun)
473fb060 3242 {
e0082a72 3243 bool purged_here = purge_dead_edges (bb);
5f0d2358 3244
473fb060 3245 purged |= purged_here;
473fb060 3246 }
5f0d2358 3247
ca6c03ca
JH
3248 return purged;
3249}
9ee634e3 3250
ba5e9aca
EB
3251/* This is used by a few passes that emit some instructions after abnormal
3252 calls, moving the basic block's end, while they in fact do want to emit
3253 them on the fallthru edge. Look for abnormal call edges, find backward
3254 the call in the block and insert the instructions on the edge instead.
3255
3256 Similarly, handle instructions throwing exceptions internally.
3257
3258 Return true when instructions have been found and inserted on edges. */
3259
3260bool
3261fixup_abnormal_edges (void)
3262{
3263 bool inserted = false;
3264 basic_block bb;
3265
11cd3bed 3266 FOR_EACH_BB_FN (bb, cfun)
ba5e9aca
EB
3267 {
3268 edge e;
3269 edge_iterator ei;
3270
3271 /* Look for cases we are interested in - calls or instructions causing
3272 exceptions. */
3273 FOR_EACH_EDGE (e, ei, bb->succs)
3274 if ((e->flags & EDGE_ABNORMAL_CALL)
3275 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3276 == (EDGE_ABNORMAL | EDGE_EH)))
3277 break;
3278
3279 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3280 {
8879d71c 3281 rtx_insn *insn;
ba5e9aca
EB
3282
3283 /* Get past the new insns generated. Allow notes, as the insns
3284 may be already deleted. */
3285 insn = BB_END (bb);
3286 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3287 && !can_throw_internal (insn)
3288 && insn != BB_HEAD (bb))
3289 insn = PREV_INSN (insn);
3290
3291 if (CALL_P (insn) || can_throw_internal (insn))
3292 {
8879d71c 3293 rtx_insn *stop, *next;
ba5e9aca
EB
3294
3295 e = find_fallthru_edge (bb->succs);
3296
3297 stop = NEXT_INSN (BB_END (bb));
1130d5e3 3298 BB_END (bb) = insn;
ba5e9aca
EB
3299
3300 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3301 {
3302 next = NEXT_INSN (insn);
3303 if (INSN_P (insn))
3304 {
3305 delete_insn (insn);
3306
3307 /* Sometimes there's still the return value USE.
3308 If it's placed after a trapping call (i.e. that
3309 call is the last insn anyway), we have no fallthru
3310 edge. Simply delete this use and don't try to insert
3311 on the non-existent edge. */
3312 if (GET_CODE (PATTERN (insn)) != USE)
3313 {
3314 /* We're not deleting it, we're moving it. */
4654c0cf 3315 insn->set_undeleted ();
0f82e5c9
DM
3316 SET_PREV_INSN (insn) = NULL_RTX;
3317 SET_NEXT_INSN (insn) = NULL_RTX;
ba5e9aca
EB
3318
3319 insert_insn_on_edge (insn, e);
3320 inserted = true;
3321 }
3322 }
3323 else if (!BARRIER_P (insn))
3324 set_block_for_insn (insn, NULL);
3325 }
3326 }
3327
3328 /* It may be that we don't find any trapping insn. In this
3329 case we discovered quite late that the insn that had been
3330 marked as can_throw_internal in fact couldn't trap at all.
3331 So we should in fact delete the EH edges out of the block. */
3332 else
3333 purge_dead_edges (bb);
3334 }
3335 }
3336
3337 return inserted;
3338}
78bde837
SB
3339\f
3340/* Cut the insns from FIRST to LAST out of the insns stream. */
3341
a90c614a 3342rtx_insn *
9152e0aa 3343unlink_insn_chain (rtx_insn *first, rtx_insn *last)
78bde837 3344{
a90c614a
DM
3345 rtx_insn *prevfirst = PREV_INSN (first);
3346 rtx_insn *nextlast = NEXT_INSN (last);
78bde837 3347
0f82e5c9
DM
3348 SET_PREV_INSN (first) = NULL;
3349 SET_NEXT_INSN (last) = NULL;
78bde837 3350 if (prevfirst)
0f82e5c9 3351 SET_NEXT_INSN (prevfirst) = nextlast;
78bde837 3352 if (nextlast)
0f82e5c9 3353 SET_PREV_INSN (nextlast) = prevfirst;
78bde837
SB
3354 else
3355 set_last_insn (prevfirst);
3356 if (!prevfirst)
3357 set_first_insn (nextlast);
9152e0aa 3358 return first;
78bde837
SB
3359}
3360\f
3361/* Skip over inter-block insns occurring after BB which are typically
3362 associated with BB (e.g., barriers). If there are any such insns,
3363 we return the last one. Otherwise, we return the end of BB. */
3364
8879d71c 3365static rtx_insn *
78bde837
SB
3366skip_insns_after_block (basic_block bb)
3367{
8879d71c 3368 rtx_insn *insn, *last_insn, *next_head, *prev;
78bde837 3369
8879d71c 3370 next_head = NULL;
fefa31b5 3371 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3372 next_head = BB_HEAD (bb->next_bb);
3373
3374 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3375 {
3376 if (insn == next_head)
3377 break;
3378
3379 switch (GET_CODE (insn))
3380 {
3381 case BARRIER:
3382 last_insn = insn;
3383 continue;
3384
3385 case NOTE:
3386 switch (NOTE_KIND (insn))
3387 {
3388 case NOTE_INSN_BLOCK_END:
3389 gcc_unreachable ();
3390 continue;
3391 default:
3392 continue;
3393 break;
3394 }
3395 break;
3396
3397 case CODE_LABEL:
3398 if (NEXT_INSN (insn)
3399 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3400 {
3401 insn = NEXT_INSN (insn);
3402 last_insn = insn;
3403 continue;
3404 }
3405 break;
3406
3407 default:
3408 break;
3409 }
3410
3411 break;
3412 }
3413
3414 /* It is possible to hit contradictory sequence. For instance:
3415
3416 jump_insn
3417 NOTE_INSN_BLOCK_BEG
3418 barrier
3419
3420 Where barrier belongs to jump_insn, but the note does not. This can be
3421 created by removing the basic block originally following
3422 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3423
3424 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3425 {
3426 prev = PREV_INSN (insn);
3427 if (NOTE_P (insn))
3428 switch (NOTE_KIND (insn))
3429 {
3430 case NOTE_INSN_BLOCK_END:
3431 gcc_unreachable ();
3432 break;
3433 case NOTE_INSN_DELETED:
3434 case NOTE_INSN_DELETED_LABEL:
3435 case NOTE_INSN_DELETED_DEBUG_LABEL:
3436 continue;
3437 default:
3438 reorder_insns (insn, insn, last_insn);
3439 }
3440 }
3441
3442 return last_insn;
3443}
3444
3445/* Locate or create a label for a given basic block. */
3446
3447static rtx
3448label_for_bb (basic_block bb)
3449{
3450 rtx label = BB_HEAD (bb);
3451
3452 if (!LABEL_P (label))
3453 {
3454 if (dump_file)
3455 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3456
3457 label = block_label (bb);
3458 }
3459
3460 return label;
3461}
3462
3463/* Locate the effective beginning and end of the insn chain for each
3464 block, as defined by skip_insns_after_block above. */
3465
3466static void
3467record_effective_endpoints (void)
3468{
8879d71c 3469 rtx_insn *next_insn;
78bde837 3470 basic_block bb;
8879d71c 3471 rtx_insn *insn;
78bde837
SB
3472
3473 for (insn = get_insns ();
3474 insn
3475 && NOTE_P (insn)
3476 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3477 insn = NEXT_INSN (insn))
3478 continue;
3479 /* No basic blocks at all? */
3480 gcc_assert (insn);
3481
3482 if (PREV_INSN (insn))
3483 cfg_layout_function_header =
3484 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3485 else
9152e0aa 3486 cfg_layout_function_header = NULL;
78bde837
SB
3487
3488 next_insn = get_insns ();
11cd3bed 3489 FOR_EACH_BB_FN (bb, cfun)
78bde837 3490 {
8879d71c 3491 rtx_insn *end;
78bde837
SB
3492
3493 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
1130d5e3 3494 BB_HEADER (bb) = unlink_insn_chain (next_insn,
190bea87 3495 PREV_INSN (BB_HEAD (bb)));
78bde837
SB
3496 end = skip_insns_after_block (bb);
3497 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
d8ce2eae 3498 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
78bde837
SB
3499 next_insn = NEXT_INSN (BB_END (bb));
3500 }
3501
3502 cfg_layout_function_footer = next_insn;
3503 if (cfg_layout_function_footer)
3504 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3505}
3506\f
27a4cd48
DM
3507namespace {
3508
3509const pass_data pass_data_into_cfg_layout_mode =
3510{
3511 RTL_PASS, /* type */
3512 "into_cfglayout", /* name */
3513 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3514 TV_CFG, /* tv_id */
3515 0, /* properties_required */
3516 PROP_cfglayout, /* properties_provided */
3517 0, /* properties_destroyed */
3518 0, /* todo_flags_start */
3519 0, /* todo_flags_finish */
78bde837
SB
3520};
3521
27a4cd48
DM
3522class pass_into_cfg_layout_mode : public rtl_opt_pass
3523{
3524public:
c3284718
RS
3525 pass_into_cfg_layout_mode (gcc::context *ctxt)
3526 : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
27a4cd48
DM
3527 {}
3528
3529 /* opt_pass methods: */
be55bfe6
TS
3530 virtual unsigned int execute (function *)
3531 {
3532 cfg_layout_initialize (0);
3533 return 0;
3534 }
27a4cd48
DM
3535
3536}; // class pass_into_cfg_layout_mode
3537
3538} // anon namespace
3539
3540rtl_opt_pass *
3541make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3542{
3543 return new pass_into_cfg_layout_mode (ctxt);
3544}
3545
3546namespace {
3547
3548const pass_data pass_data_outof_cfg_layout_mode =
3549{
3550 RTL_PASS, /* type */
3551 "outof_cfglayout", /* name */
3552 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3553 TV_CFG, /* tv_id */
3554 0, /* properties_required */
3555 0, /* properties_provided */
3556 PROP_cfglayout, /* properties_destroyed */
3557 0, /* todo_flags_start */
3558 0, /* todo_flags_finish */
78bde837 3559};
27a4cd48
DM
3560
3561class pass_outof_cfg_layout_mode : public rtl_opt_pass
3562{
3563public:
c3284718
RS
3564 pass_outof_cfg_layout_mode (gcc::context *ctxt)
3565 : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
27a4cd48
DM
3566 {}
3567
3568 /* opt_pass methods: */
be55bfe6 3569 virtual unsigned int execute (function *);
27a4cd48
DM
3570
3571}; // class pass_outof_cfg_layout_mode
3572
be55bfe6
TS
3573unsigned int
3574pass_outof_cfg_layout_mode::execute (function *fun)
3575{
3576 basic_block bb;
3577
3578 FOR_EACH_BB_FN (bb, fun)
3579 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3580 bb->aux = bb->next_bb;
3581
3582 cfg_layout_finalize ();
3583
3584 return 0;
3585}
3586
27a4cd48
DM
3587} // anon namespace
3588
3589rtl_opt_pass *
3590make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3591{
3592 return new pass_outof_cfg_layout_mode (ctxt);
3593}
78bde837
SB
3594\f
3595
3596/* Link the basic blocks in the correct order, compacting the basic
3597 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3598 function also clears the basic block header and footer fields.
3599
3600 This function is usually called after a pass (e.g. tracer) finishes
3601 some transformations while in cfglayout mode. The required sequence
3602 of the basic blocks is in a linked list along the bb->aux field.
3603 This functions re-links the basic block prev_bb and next_bb pointers
532aafad
SB
3604 accordingly, and it compacts and renumbers the blocks.
3605
3606 FIXME: This currently works only for RTL, but the only RTL-specific
3607 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3608 to GIMPLE a long time ago, but it doesn't relink the basic block
3609 chain. It could do that (to give better initial RTL) if this function
3610 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
78bde837
SB
3611
3612void
3613relink_block_chain (bool stay_in_cfglayout_mode)
3614{
3615 basic_block bb, prev_bb;
3616 int index;
3617
3618 /* Maybe dump the re-ordered sequence. */
3619 if (dump_file)
3620 {
3621 fprintf (dump_file, "Reordered sequence:\n");
fefa31b5
DM
3622 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3623 NUM_FIXED_BLOCKS;
78bde837
SB
3624 bb;
3625 bb = (basic_block) bb->aux, index++)
3626 {
3627 fprintf (dump_file, " %i ", index);
3628 if (get_bb_original (bb))
3629 fprintf (dump_file, "duplicate of %i ",
3630 get_bb_original (bb)->index);
3631 else if (forwarder_block_p (bb)
3632 && !LABEL_P (BB_HEAD (bb)))
3633 fprintf (dump_file, "compensation ");
3634 else
3635 fprintf (dump_file, "bb %i ", bb->index);
3636 fprintf (dump_file, " [%i]\n", bb->frequency);
3637 }
3638 }
3639
3640 /* Now reorder the blocks. */
fefa31b5
DM
3641 prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3642 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
78bde837
SB
3643 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3644 {
3645 bb->prev_bb = prev_bb;
3646 prev_bb->next_bb = bb;
3647 }
fefa31b5
DM
3648 prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3649 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
78bde837
SB
3650
3651 /* Then, clean up the aux fields. */
04a90bec 3652 FOR_ALL_BB_FN (bb, cfun)
78bde837
SB
3653 {
3654 bb->aux = NULL;
3655 if (!stay_in_cfglayout_mode)
1130d5e3 3656 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
78bde837
SB
3657 }
3658
3659 /* Maybe reset the original copy tables, they are not valid anymore
3660 when we renumber the basic blocks in compact_blocks. If we are
3661 are going out of cfglayout mode, don't re-allocate the tables. */
3662 free_original_copy_tables ();
3663 if (stay_in_cfglayout_mode)
3664 initialize_original_copy_tables ();
3665
3666 /* Finally, put basic_block_info in the new order. */
3667 compact_blocks ();
3668}
3669\f
3670
3671/* Given a reorder chain, rearrange the code to match. */
3672
3673static void
3674fixup_reorder_chain (void)
3675{
3676 basic_block bb;
9152e0aa 3677 rtx_insn *insn = NULL;
78bde837
SB
3678
3679 if (cfg_layout_function_header)
3680 {
3681 set_first_insn (cfg_layout_function_header);
3682 insn = cfg_layout_function_header;
3683 while (NEXT_INSN (insn))
3684 insn = NEXT_INSN (insn);
3685 }
3686
3687 /* First do the bulk reordering -- rechain the blocks without regard to
3688 the needed changes to jumps and labels. */
3689
fefa31b5
DM
3690 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3691 bb->aux)
78bde837
SB
3692 {
3693 if (BB_HEADER (bb))
3694 {
3695 if (insn)
0f82e5c9 3696 SET_NEXT_INSN (insn) = BB_HEADER (bb);
78bde837
SB
3697 else
3698 set_first_insn (BB_HEADER (bb));
0f82e5c9 3699 SET_PREV_INSN (BB_HEADER (bb)) = insn;
78bde837
SB
3700 insn = BB_HEADER (bb);
3701 while (NEXT_INSN (insn))
3702 insn = NEXT_INSN (insn);
3703 }
3704 if (insn)
0f82e5c9 3705 SET_NEXT_INSN (insn) = BB_HEAD (bb);
78bde837
SB
3706 else
3707 set_first_insn (BB_HEAD (bb));
0f82e5c9 3708 SET_PREV_INSN (BB_HEAD (bb)) = insn;
78bde837
SB
3709 insn = BB_END (bb);
3710 if (BB_FOOTER (bb))
3711 {
0f82e5c9
DM
3712 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3713 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
78bde837
SB
3714 while (NEXT_INSN (insn))
3715 insn = NEXT_INSN (insn);
3716 }
3717 }
3718
0f82e5c9 3719 SET_NEXT_INSN (insn) = cfg_layout_function_footer;
78bde837 3720 if (cfg_layout_function_footer)
0f82e5c9 3721 SET_PREV_INSN (cfg_layout_function_footer) = insn;
78bde837
SB
3722
3723 while (NEXT_INSN (insn))
3724 insn = NEXT_INSN (insn);
3725
3726 set_last_insn (insn);
3727#ifdef ENABLE_CHECKING
3728 verify_insn_chain ();
3729#endif
3730
3731 /* Now add jumps and labels as needed to match the blocks new
29f3fd5b 3732 outgoing edges. */
78bde837 3733
29f3fd5b
SB
3734 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3735 bb->aux)
78bde837
SB
3736 {
3737 edge e_fall, e_taken, e;
8879d71c 3738 rtx_insn *bb_end_insn;
78bde837 3739 rtx ret_label = NULL_RTX;
3371a64f 3740 basic_block nb;
78bde837
SB
3741 edge_iterator ei;
3742
3743 if (EDGE_COUNT (bb->succs) == 0)
3744 continue;
3745
3746 /* Find the old fallthru edge, and another non-EH edge for
3747 a taken jump. */
3748 e_taken = e_fall = NULL;
3749
3750 FOR_EACH_EDGE (e, ei, bb->succs)
3751 if (e->flags & EDGE_FALLTHRU)
3752 e_fall = e;
3753 else if (! (e->flags & EDGE_EH))
3754 e_taken = e;
3755
3756 bb_end_insn = BB_END (bb);
3757 if (JUMP_P (bb_end_insn))
3758 {
3759 ret_label = JUMP_LABEL (bb_end_insn);
3760 if (any_condjump_p (bb_end_insn))
3761 {
3762 /* This might happen if the conditional jump has side
3763 effects and could therefore not be optimized away.
3764 Make the basic block to end with a barrier in order
3765 to prevent rtl_verify_flow_info from complaining. */
3766 if (!e_fall)
3767 {
3768 gcc_assert (!onlyjump_p (bb_end_insn)
91af97c3
TJ
3769 || returnjump_p (bb_end_insn)
3770 || (e_taken->flags & EDGE_CROSSING));
39718607 3771 emit_barrier_after (bb_end_insn);
78bde837
SB
3772 continue;
3773 }
3774
3775 /* If the old fallthru is still next, nothing to do. */
3776 if (bb->aux == e_fall->dest
fefa31b5 3777 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3778 continue;
3779
3780 /* The degenerated case of conditional jump jumping to the next
3781 instruction can happen for jumps with side effects. We need
3782 to construct a forwarder block and this will be done just
3783 fine by force_nonfallthru below. */
3784 if (!e_taken)
3785 ;
3786
3787 /* There is another special case: if *neither* block is next,
3788 such as happens at the very end of a function, then we'll
3789 need to add a new unconditional jump. Choose the taken
3790 edge based on known or assumed probability. */
3791 else if (bb->aux != e_taken->dest)
3792 {
3793 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
3794
3795 if (note
e5af9ddd 3796 && XINT (note, 0) < REG_BR_PROB_BASE / 2
78bde837 3797 && invert_jump (bb_end_insn,
fefa31b5
DM
3798 (e_fall->dest
3799 == EXIT_BLOCK_PTR_FOR_FN (cfun)
78bde837
SB
3800 ? NULL_RTX
3801 : label_for_bb (e_fall->dest)), 0))
3802 {
3803 e_fall->flags &= ~EDGE_FALLTHRU;
3804 gcc_checking_assert (could_fall_through
3805 (e_taken->src, e_taken->dest));
3806 e_taken->flags |= EDGE_FALLTHRU;
3807 update_br_prob_note (bb);
3808 e = e_fall, e_fall = e_taken, e_taken = e;
3809 }
3810 }
3811
3812 /* If the "jumping" edge is a crossing edge, and the fall
3813 through edge is non-crossing, leave things as they are. */
3814 else if ((e_taken->flags & EDGE_CROSSING)
3815 && !(e_fall->flags & EDGE_CROSSING))
3816 continue;
3817
3818 /* Otherwise we can try to invert the jump. This will
3819 basically never fail, however, keep up the pretense. */
3820 else if (invert_jump (bb_end_insn,
fefa31b5
DM
3821 (e_fall->dest
3822 == EXIT_BLOCK_PTR_FOR_FN (cfun)
78bde837
SB
3823 ? NULL_RTX
3824 : label_for_bb (e_fall->dest)), 0))
3825 {
3826 e_fall->flags &= ~EDGE_FALLTHRU;
3827 gcc_checking_assert (could_fall_through
3828 (e_taken->src, e_taken->dest));
3829 e_taken->flags |= EDGE_FALLTHRU;
3830 update_br_prob_note (bb);
3831 if (LABEL_NUSES (ret_label) == 0
3832 && single_pred_p (e_taken->dest))
3833 delete_insn (ret_label);
3834 continue;
3835 }
3836 }
3837 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3838 {
3839 /* If the old fallthru is still next or if
3840 asm goto doesn't have a fallthru (e.g. when followed by
3841 __builtin_unreachable ()), nothing to do. */
3842 if (! e_fall
3843 || bb->aux == e_fall->dest
fefa31b5 3844 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3845 continue;
3846
3847 /* Otherwise we'll have to use the fallthru fixup below. */
3848 }
3849 else
3850 {
3851 /* Otherwise we have some return, switch or computed
3852 jump. In the 99% case, there should not have been a
3853 fallthru edge. */
3854 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3855 continue;
3856 }
3857 }
3858 else
3859 {
3860 /* No fallthru implies a noreturn function with EH edges, or
3861 something similarly bizarre. In any case, we don't need to
3862 do anything. */
3863 if (! e_fall)
3864 continue;
3865
3866 /* If the fallthru block is still next, nothing to do. */
3867 if (bb->aux == e_fall->dest)
3868 continue;
3869
3870 /* A fallthru to exit block. */
fefa31b5 3871 if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3872 continue;
3873 }
3874
3875 /* We got here if we need to add a new jump insn.
3876 Note force_nonfallthru can delete E_FALL and thus we have to
3877 save E_FALL->src prior to the call to force_nonfallthru. */
78bde837
SB
3878 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3879 if (nb)
3880 {
3881 nb->aux = bb->aux;
3882 bb->aux = nb;
3883 /* Don't process this new block. */
3884 bb = nb;
78bde837
SB
3885 }
3886 }
3887
3888 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3889
29f3fd5b 3890 /* Annoying special case - jump around dead jumptables left in the code. */
11cd3bed 3891 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
3892 {
3893 edge e = find_fallthru_edge (bb->succs);
3894
29f3fd5b
SB
3895 if (e && !can_fallthru (e->src, e->dest))
3896 force_nonfallthru (e);
78bde837
SB
3897 }
3898
3899 /* Ensure goto_locus from edges has some instructions with that locus
3900 in RTL. */
3901 if (!optimize)
11cd3bed 3902 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
3903 {
3904 edge e;
3905 edge_iterator ei;
3906
3907 FOR_EACH_EDGE (e, ei, bb->succs)
2f13f2de 3908 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
5368224f 3909 && !(e->flags & EDGE_ABNORMAL))
78bde837
SB
3910 {
3911 edge e2;
3912 edge_iterator ei2;
3913 basic_block dest, nb;
8879d71c 3914 rtx_insn *end;
78bde837
SB
3915
3916 insn = BB_END (e->src);
3917 end = PREV_INSN (BB_HEAD (e->src));
3918 while (insn != end
5368224f 3919 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
78bde837
SB
3920 insn = PREV_INSN (insn);
3921 if (insn != end
5368224f 3922 && INSN_LOCATION (insn) == e->goto_locus)
78bde837
SB
3923 continue;
3924 if (simplejump_p (BB_END (e->src))
5368224f 3925 && !INSN_HAS_LOCATION (BB_END (e->src)))
78bde837 3926 {
5368224f 3927 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
78bde837
SB
3928 continue;
3929 }
3930 dest = e->dest;
fefa31b5 3931 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3932 {
3933 /* Non-fallthru edges to the exit block cannot be split. */
3934 if (!(e->flags & EDGE_FALLTHRU))
3935 continue;
3936 }
3937 else
3938 {
3939 insn = BB_HEAD (dest);
3940 end = NEXT_INSN (BB_END (dest));
3941 while (insn != end && !NONDEBUG_INSN_P (insn))
3942 insn = NEXT_INSN (insn);
5368224f
DC
3943 if (insn != end && INSN_HAS_LOCATION (insn)
3944 && INSN_LOCATION (insn) == e->goto_locus)
78bde837
SB
3945 continue;
3946 }
3947 nb = split_edge (e);
3948 if (!INSN_P (BB_END (nb)))
1130d5e3 3949 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
190bea87 3950 nb);
5368224f 3951 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
78bde837
SB
3952
3953 /* If there are other incoming edges to the destination block
3954 with the same goto locus, redirect them to the new block as
3955 well, this can prevent other such blocks from being created
3956 in subsequent iterations of the loop. */
3957 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
2f13f2de 3958 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
78bde837 3959 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
5368224f 3960 && e->goto_locus == e2->goto_locus)
78bde837
SB
3961 redirect_edge_and_branch (e2, nb);
3962 else
3963 ei_next (&ei2);
3964 }
3965 }
3966}
3967\f
3968/* Perform sanity checks on the insn chain.
3969 1. Check that next/prev pointers are consistent in both the forward and
3970 reverse direction.
3971 2. Count insns in chain, going both directions, and check if equal.
3972 3. Check that get_last_insn () returns the actual end of chain. */
3973
3974DEBUG_FUNCTION void
3975verify_insn_chain (void)
3976{
8879d71c 3977 rtx_insn *x, *prevx, *nextx;
78bde837
SB
3978 int insn_cnt1, insn_cnt2;
3979
3980 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3981 x != 0;
3982 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
3983 gcc_assert (PREV_INSN (x) == prevx);
3984
3985 gcc_assert (prevx == get_last_insn ());
3986
3987 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
3988 x != 0;
3989 nextx = x, insn_cnt2++, x = PREV_INSN (x))
3990 gcc_assert (NEXT_INSN (x) == nextx);
3991
3992 gcc_assert (insn_cnt1 == insn_cnt2);
3993}
3994\f
3995/* If we have assembler epilogues, the block falling through to exit must
3996 be the last one in the reordered chain when we reach final. Ensure
3997 that this condition is met. */
3998static void
3999fixup_fallthru_exit_predecessor (void)
4000{
4001 edge e;
4002 basic_block bb = NULL;
4003
4004 /* This transformation is not valid before reload, because we might
4005 separate a call from the instruction that copies the return
4006 value. */
4007 gcc_assert (reload_completed);
4008
fefa31b5 4009 e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
78bde837
SB
4010 if (e)
4011 bb = e->src;
4012
4013 if (bb && bb->aux)
4014 {
fefa31b5 4015 basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
78bde837
SB
4016
4017 /* If the very first block is the one with the fall-through exit
4018 edge, we have to split that block. */
4019 if (c == bb)
4020 {
4021 bb = split_block (bb, NULL)->dest;
4022 bb->aux = c->aux;
4023 c->aux = bb;
d8ce2eae
DM
4024 BB_FOOTER (bb) = BB_FOOTER (c);
4025 BB_FOOTER (c) = NULL;
78bde837
SB
4026 }
4027
4028 while (c->aux != bb)
4029 c = (basic_block) c->aux;
4030
4031 c->aux = bb->aux;
4032 while (c->aux)
4033 c = (basic_block) c->aux;
4034
4035 c->aux = bb;
4036 bb->aux = NULL;
4037 }
4038}
4039
4040/* In case there are more than one fallthru predecessors of exit, force that
4041 there is only one. */
4042
4043static void
4044force_one_exit_fallthru (void)
4045{
4046 edge e, predecessor = NULL;
4047 bool more = false;
4048 edge_iterator ei;
4049 basic_block forwarder, bb;
4050
fefa31b5 4051 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
78bde837
SB
4052 if (e->flags & EDGE_FALLTHRU)
4053 {
4054 if (predecessor == NULL)
4055 predecessor = e;
4056 else
4057 {
4058 more = true;
4059 break;
4060 }
4061 }
4062
4063 if (!more)
4064 return;
4065
4066 /* Exit has several fallthru predecessors. Create a forwarder block for
4067 them. */
4068 forwarder = split_edge (predecessor);
fefa31b5
DM
4069 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4070 (e = ei_safe_edge (ei)); )
78bde837
SB
4071 {
4072 if (e->src == forwarder
4073 || !(e->flags & EDGE_FALLTHRU))
4074 ei_next (&ei);
4075 else
4076 redirect_edge_and_branch_force (e, forwarder);
4077 }
4078
4079 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4080 exit block. */
11cd3bed 4081 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
4082 {
4083 if (bb->aux == NULL && bb != forwarder)
4084 {
4085 bb->aux = forwarder;
4086 break;
4087 }
4088 }
4089}
4090\f
4091/* Return true in case it is possible to duplicate the basic block BB. */
4092
4093static bool
4094cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4095{
4096 /* Do not attempt to duplicate tablejumps, as we need to unshare
4097 the dispatch table. This is difficult to do, as the instructions
4098 computing jump destination may be hoisted outside the basic block. */
4099 if (tablejump_p (BB_END (bb), NULL, NULL))
4100 return false;
4101
4102 /* Do not duplicate blocks containing insns that can't be copied. */
4103 if (targetm.cannot_copy_insn_p)
4104 {
8879d71c 4105 rtx_insn *insn = BB_HEAD (bb);
78bde837
SB
4106 while (1)
4107 {
4108 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4109 return false;
4110 if (insn == BB_END (bb))
4111 break;
4112 insn = NEXT_INSN (insn);
4113 }
4114 }
4115
4116 return true;
4117}
4118
a90c614a 4119rtx_insn *
5d34b050 4120duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
78bde837 4121{
5d34b050 4122 rtx_insn *insn, *next, *copy;
66e8df53 4123 rtx_note *last;
78bde837
SB
4124
4125 /* Avoid updating of boundaries of previous basic block. The
4126 note will get removed from insn stream in fixup. */
4127 last = emit_note (NOTE_INSN_DELETED);
4128
4129 /* Create copy at the end of INSN chain. The chain will
4130 be reordered later. */
4131 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4132 {
4133 switch (GET_CODE (insn))
4134 {
4135 case DEBUG_INSN:
4136 /* Don't duplicate label debug insns. */
4137 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4138 break;
4139 /* FALLTHRU */
4140 case INSN:
4141 case CALL_INSN:
4142 case JUMP_INSN:
78bde837
SB
4143 copy = emit_copy_of_insn_after (insn, get_last_insn ());
4144 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4145 && ANY_RETURN_P (JUMP_LABEL (insn)))
4146 JUMP_LABEL (copy) = JUMP_LABEL (insn);
4147 maybe_copy_prologue_epilogue_insn (insn, copy);
4148 break;
4149
39718607
SB
4150 case JUMP_TABLE_DATA:
4151 /* Avoid copying of dispatch tables. We never duplicate
4152 tablejumps, so this can hit only in case the table got
4153 moved far from original jump.
4154 Avoid copying following barrier as well if any
4155 (and debug insns in between). */
4156 for (next = NEXT_INSN (insn);
4157 next != NEXT_INSN (to);
4158 next = NEXT_INSN (next))
4159 if (!DEBUG_INSN_P (next))
4160 break;
4161 if (next != NEXT_INSN (to) && BARRIER_P (next))
4162 insn = next;
4163 break;
4164
78bde837
SB
4165 case CODE_LABEL:
4166 break;
4167
4168 case BARRIER:
4169 emit_barrier ();
4170 break;
4171
4172 case NOTE:
4173 switch (NOTE_KIND (insn))
4174 {
4175 /* In case prologue is empty and function contain label
4176 in first BB, we may want to copy the block. */
4177 case NOTE_INSN_PROLOGUE_END:
4178
4179 case NOTE_INSN_DELETED:
4180 case NOTE_INSN_DELETED_LABEL:
4181 case NOTE_INSN_DELETED_DEBUG_LABEL:
4182 /* No problem to strip these. */
4183 case NOTE_INSN_FUNCTION_BEG:
4184 /* There is always just single entry to function. */
4185 case NOTE_INSN_BASIC_BLOCK:
3371a64f
TJ
4186 /* We should only switch text sections once. */
4187 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
78bde837
SB
4188 break;
4189
4190 case NOTE_INSN_EPILOGUE_BEG:
66e8df53 4191 emit_note_copy (as_a <rtx_note *> (insn));
78bde837
SB
4192 break;
4193
4194 default:
4195 /* All other notes should have already been eliminated. */
4196 gcc_unreachable ();
4197 }
4198 break;
4199 default:
4200 gcc_unreachable ();
4201 }
4202 }
4203 insn = NEXT_INSN (last);
4204 delete_insn (last);
5d34b050 4205 return insn;
78bde837
SB
4206}
4207
4208/* Create a duplicate of the basic block BB. */
4209
4210static basic_block
4211cfg_layout_duplicate_bb (basic_block bb)
4212{
8879d71c 4213 rtx_insn *insn;
78bde837
SB
4214 basic_block new_bb;
4215
4216 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4217 new_bb = create_basic_block (insn,
4218 insn ? get_last_insn () : NULL,
fefa31b5 4219 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
78bde837
SB
4220
4221 BB_COPY_PARTITION (new_bb, bb);
4222 if (BB_HEADER (bb))
4223 {
4224 insn = BB_HEADER (bb);
4225 while (NEXT_INSN (insn))
4226 insn = NEXT_INSN (insn);
4227 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4228 if (insn)
1130d5e3 4229 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
78bde837
SB
4230 }
4231
4232 if (BB_FOOTER (bb))
4233 {
4234 insn = BB_FOOTER (bb);
4235 while (NEXT_INSN (insn))
4236 insn = NEXT_INSN (insn);
4237 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4238 if (insn)
d8ce2eae 4239 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
78bde837
SB
4240 }
4241
4242 return new_bb;
4243}
4244
4245\f
4246/* Main entry point to this module - initialize the datastructures for
4247 CFG layout changes. It keeps LOOPS up-to-date if not null.
4248
4249 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4250
4251void
4252cfg_layout_initialize (unsigned int flags)
4253{
b5241a5a 4254 rtx_insn_list *x;
78bde837
SB
4255 basic_block bb;
4256
8a9e6b45
BS
4257 /* Once bb partitioning is complete, cfg layout mode should not be
4258 re-entered. Entering cfg layout mode may require fixups. As an
4259 example, if edge forwarding performed when optimizing the cfg
4260 layout required moving a block from the hot to the cold
4261 section. This would create an illegal partitioning unless some
4262 manual fixup was performed. */
4263 gcc_assert (!(crtl->bb_reorder_complete
4264 && flag_reorder_blocks_and_partition));
4ca19309 4265
78bde837
SB
4266 initialize_original_copy_tables ();
4267
4268 cfg_layout_rtl_register_cfg_hooks ();
4269
4270 record_effective_endpoints ();
4271
4272 /* Make sure that the targets of non local gotos are marked. */
2382940b 4273 for (x = nonlocal_goto_handler_labels; x; x = x->next ())
78bde837 4274 {
b5241a5a 4275 bb = BLOCK_FOR_INSN (x->insn ());
78bde837
SB
4276 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4277 }
4278
4279 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4280}
4281
4282/* Splits superblocks. */
4283void
4284break_superblocks (void)
4285{
4286 sbitmap superblocks;
4287 bool need = false;
4288 basic_block bb;
4289
8b1c6fd7 4290 superblocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
f61e445a 4291 bitmap_clear (superblocks);
78bde837 4292
11cd3bed 4293 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
4294 if (bb->flags & BB_SUPERBLOCK)
4295 {
4296 bb->flags &= ~BB_SUPERBLOCK;
d7c028c0 4297 bitmap_set_bit (superblocks, bb->index);
78bde837
SB
4298 need = true;
4299 }
4300
4301 if (need)
4302 {
4303 rebuild_jump_labels (get_insns ());
4304 find_many_sub_basic_blocks (superblocks);
4305 }
4306
4307 free (superblocks);
4308}
4309
4310/* Finalize the changes: reorder insn list according to the sequence specified
29f3fd5b 4311 by aux pointers, enter compensation code, rebuild scope forest. */
78bde837
SB
4312
4313void
4314cfg_layout_finalize (void)
4315{
4316#ifdef ENABLE_CHECKING
4317 verify_flow_info ();
4318#endif
4319 force_one_exit_fallthru ();
4320 rtl_register_cfg_hooks ();
4321 if (reload_completed
4322#ifdef HAVE_epilogue
4323 && !HAVE_epilogue
4324#endif
4325 )
4326 fixup_fallthru_exit_predecessor ();
4327 fixup_reorder_chain ();
4328
29f3fd5b
SB
4329 rebuild_jump_labels (get_insns ());
4330 delete_dead_jumptables ();
4331
78bde837
SB
4332#ifdef ENABLE_CHECKING
4333 verify_insn_chain ();
4334 verify_flow_info ();
4335#endif
4336}
4337
ba5e9aca 4338
9ee634e3 4339/* Same as split_block but update cfg_layout structures. */
f470c378
ZD
4340
4341static basic_block
d329e058 4342cfg_layout_split_block (basic_block bb, void *insnp)
9ee634e3 4343{
ae50c0cb 4344 rtx insn = (rtx) insnp;
f470c378 4345 basic_block new_bb = rtl_split_block (bb, insn);
9ee634e3 4346
d8ce2eae
DM
4347 BB_FOOTER (new_bb) = BB_FOOTER (bb);
4348 BB_FOOTER (bb) = NULL;
9ee634e3 4349
f470c378 4350 return new_bb;
9ee634e3
JH
4351}
4352
9ee634e3 4353/* Redirect Edge to DEST. */
6de9cd9a 4354static edge
d329e058 4355cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
9ee634e3
JH
4356{
4357 basic_block src = e->src;
6de9cd9a 4358 edge ret;
9ee634e3 4359
bc35512f 4360 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 4361 return NULL;
bc35512f 4362
3348b696 4363 if (e->dest == dest)
6de9cd9a 4364 return e;
bc35512f 4365
fefa31b5 4366 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
6de9cd9a 4367 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
f345f21a 4368 {
6fb5fa3c 4369 df_set_bb_dirty (src);
6de9cd9a 4370 return ret;
f345f21a 4371 }
bc35512f 4372
fefa31b5 4373 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
bc35512f
JH
4374 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4375 {
c263766c
RH
4376 if (dump_file)
4377 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
bc35512f
JH
4378 e->src->index, dest->index);
4379
6fb5fa3c 4380 df_set_bb_dirty (e->src);
bc35512f 4381 redirect_edge_succ (e, dest);
6de9cd9a 4382 return e;
bc35512f
JH
4383 }
4384
9ee634e3
JH
4385 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4386 in the case the basic block appears to be in sequence. Avoid this
4387 transformation. */
4388
9ee634e3
JH
4389 if (e->flags & EDGE_FALLTHRU)
4390 {
4391 /* Redirect any branch edges unified with the fallthru one. */
4b4bf941 4392 if (JUMP_P (BB_END (src))
432f982f
JH
4393 && label_is_jump_target_p (BB_HEAD (e->dest),
4394 BB_END (src)))
9ee634e3 4395 {
341c100f 4396 edge redirected;
c22cacf3 4397
c263766c
RH
4398 if (dump_file)
4399 fprintf (dump_file, "Fallthru edge unified with branch "
432f982f
JH
4400 "%i->%i redirected to %i\n",
4401 e->src->index, e->dest->index, dest->index);
4402 e->flags &= ~EDGE_FALLTHRU;
341c100f
NS
4403 redirected = redirect_branch_edge (e, dest);
4404 gcc_assert (redirected);
0c617be4
JL
4405 redirected->flags |= EDGE_FALLTHRU;
4406 df_set_bb_dirty (redirected->src);
4407 return redirected;
9ee634e3
JH
4408 }
4409 /* In case we are redirecting fallthru edge to the branch edge
c22cacf3 4410 of conditional jump, remove it. */
628f6a4e 4411 if (EDGE_COUNT (src->succs) == 2)
9ee634e3 4412 {
03101c6f
KH
4413 /* Find the edge that is different from E. */
4414 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
628f6a4e 4415
9ee634e3 4416 if (s->dest == dest
a813c111
SB
4417 && any_condjump_p (BB_END (src))
4418 && onlyjump_p (BB_END (src)))
4419 delete_insn (BB_END (src));
9ee634e3 4420 }
c263766c 4421 if (dump_file)
dc764d10 4422 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
bc35512f 4423 e->src->index, e->dest->index, dest->index);
0c617be4 4424 ret = redirect_edge_succ_nodup (e, dest);
9ee634e3
JH
4425 }
4426 else
bc35512f 4427 ret = redirect_branch_edge (e, dest);
9ee634e3
JH
4428
4429 /* We don't want simplejumps in the insn stream during cfglayout. */
341c100f 4430 gcc_assert (!simplejump_p (BB_END (src)));
9ee634e3 4431
6fb5fa3c 4432 df_set_bb_dirty (src);
9ee634e3
JH
4433 return ret;
4434}
4435
4436/* Simple wrapper as we always can redirect fallthru edges. */
4437static basic_block
d329e058 4438cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
9ee634e3 4439{
341c100f
NS
4440 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4441
4442 gcc_assert (redirected);
9ee634e3
JH
4443 return NULL;
4444}
4445
f470c378
ZD
4446/* Same as delete_basic_block but update cfg_layout structures. */
4447
9ee634e3 4448static void
d329e058 4449cfg_layout_delete_block (basic_block bb)
9ee634e3 4450{
8879d71c 4451 rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
1130d5e3 4452 rtx_insn **to;
9ee634e3 4453
bcc708fc 4454 if (BB_HEADER (bb))
9ee634e3 4455 {
a813c111 4456 next = BB_HEAD (bb);
9ee634e3 4457 if (prev)
0f82e5c9 4458 SET_NEXT_INSN (prev) = BB_HEADER (bb);
9ee634e3 4459 else
bcc708fc 4460 set_first_insn (BB_HEADER (bb));
0f82e5c9 4461 SET_PREV_INSN (BB_HEADER (bb)) = prev;
bcc708fc 4462 insn = BB_HEADER (bb);
9ee634e3
JH
4463 while (NEXT_INSN (insn))
4464 insn = NEXT_INSN (insn);
0f82e5c9
DM
4465 SET_NEXT_INSN (insn) = next;
4466 SET_PREV_INSN (next) = insn;
9ee634e3 4467 }
a813c111 4468 next = NEXT_INSN (BB_END (bb));
bcc708fc 4469 if (BB_FOOTER (bb))
9ee634e3 4470 {
bcc708fc 4471 insn = BB_FOOTER (bb);
bc35512f
JH
4472 while (insn)
4473 {
4b4bf941 4474 if (BARRIER_P (insn))
bc35512f
JH
4475 {
4476 if (PREV_INSN (insn))
0f82e5c9 4477 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
bc35512f 4478 else
d8ce2eae 4479 BB_FOOTER (bb) = NEXT_INSN (insn);
bc35512f 4480 if (NEXT_INSN (insn))
0f82e5c9 4481 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
bc35512f 4482 }
4b4bf941 4483 if (LABEL_P (insn))
bc35512f
JH
4484 break;
4485 insn = NEXT_INSN (insn);
4486 }
bcc708fc 4487 if (BB_FOOTER (bb))
bc35512f 4488 {
a813c111 4489 insn = BB_END (bb);
0f82e5c9
DM
4490 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4491 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
bc35512f
JH
4492 while (NEXT_INSN (insn))
4493 insn = NEXT_INSN (insn);
0f82e5c9 4494 SET_NEXT_INSN (insn) = next;
bc35512f 4495 if (next)
0f82e5c9 4496 SET_PREV_INSN (next) = insn;
bc35512f
JH
4497 else
4498 set_last_insn (insn);
4499 }
9ee634e3 4500 }
fefa31b5 4501 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1130d5e3 4502 to = &BB_HEADER (bb->next_bb);
9ee634e3
JH
4503 else
4504 to = &cfg_layout_function_footer;
997de8ed 4505
9ee634e3
JH
4506 rtl_delete_block (bb);
4507
4508 if (prev)
4509 prev = NEXT_INSN (prev);
d329e058 4510 else
9ee634e3
JH
4511 prev = get_insns ();
4512 if (next)
4513 next = PREV_INSN (next);
d329e058 4514 else
9ee634e3
JH
4515 next = get_last_insn ();
4516
4517 if (next && NEXT_INSN (next) != prev)
4518 {
4519 remaints = unlink_insn_chain (prev, next);
4520 insn = remaints;
4521 while (NEXT_INSN (insn))
4522 insn = NEXT_INSN (insn);
0f82e5c9 4523 SET_NEXT_INSN (insn) = *to;
9ee634e3 4524 if (*to)
0f82e5c9 4525 SET_PREV_INSN (*to) = insn;
9ee634e3
JH
4526 *to = remaints;
4527 }
4528}
4529
beb235f8 4530/* Return true when blocks A and B can be safely merged. */
b48d0358 4531
bc35512f 4532static bool
b48d0358 4533cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
bc35512f 4534{
750054a2
CT
4535 /* If we are partitioning hot/cold basic blocks, we don't want to
4536 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
4537 and cold sections.
4538
8e8d5162 4539 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
4540 be optimizable (or blocks that appear to be mergeable), but which really
4541 must be left untouched (they are required to make it safely across
4542 partition boundaries). See the comments at the top of
8e8d5162
CT
4543 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4544
87c8b4be 4545 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 4546 return false;
750054a2 4547
7d776ee2
RG
4548 /* Protect the loop latches. */
4549 if (current_loops && b->loop_father->latch == b)
4550 return false;
4551
894a84b5
BS
4552 /* If we would end up moving B's instructions, make sure it doesn't fall
4553 through into the exit block, since we cannot recover from a fallthrough
4554 edge into the exit block occurring in the middle of a function. */
4555 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4556 {
4557 edge e = find_fallthru_edge (b->succs);
fefa31b5 4558 if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
894a84b5
BS
4559 return false;
4560 }
4561
bc35512f 4562 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
4563 return (single_succ_p (a)
4564 && single_succ (a) == b
4565 && single_pred_p (b) == 1
628f6a4e 4566 && a != b
bc35512f 4567 /* Must be simple edge. */
c5cbcccf 4568 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
fefa31b5
DM
4569 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4570 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
49ea3702
SB
4571 /* If the jump insn has side effects, we can't kill the edge.
4572 When not optimizing, try_redirect_by_replacing_jump will
4573 not allow us to redirect an edge by replacing a table jump. */
4b4bf941 4574 && (!JUMP_P (BB_END (a))
49ea3702 4575 || ((!optimize || reload_completed)
a813c111 4576 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f
JH
4577}
4578
41806d92
NS
4579/* Merge block A and B. The blocks must be mergeable. */
4580
bc35512f
JH
4581static void
4582cfg_layout_merge_blocks (basic_block a, basic_block b)
4583{
50a36e42 4584 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
8879d71c 4585 rtx_insn *insn;
50a36e42 4586
77a74ed7 4587 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
bc35512f 4588
6fb5fa3c 4589 if (dump_file)
50a36e42
EB
4590 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4591 a->index);
6fb5fa3c 4592
bc35512f 4593 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 4594 if (LABEL_P (BB_HEAD (b)))
2c97f8e4 4595 {
2c97f8e4
RH
4596 delete_insn (BB_HEAD (b));
4597 }
bc35512f
JH
4598
4599 /* We should have fallthru edge in a, or we can do dummy redirection to get
4600 it cleaned up. */
4b4bf941 4601 if (JUMP_P (BB_END (a)))
628f6a4e 4602 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
341c100f 4603 gcc_assert (!JUMP_P (BB_END (a)));
bc35512f 4604
8a829274 4605 /* When not optimizing and the edge is the only place in RTL which holds
7241571e 4606 some unique locus, emit a nop with that locus in between. */
9be94227
EB
4607 if (!optimize)
4608 emit_nop_for_unique_locus_between (a, b);
7241571e 4609
4c8af858
SB
4610 /* Move things from b->footer after a->footer. */
4611 if (BB_FOOTER (b))
bc35512f 4612 {
4c8af858 4613 if (!BB_FOOTER (a))
d8ce2eae 4614 BB_FOOTER (a) = BB_FOOTER (b);
4c8af858
SB
4615 else
4616 {
8879d71c 4617 rtx_insn *last = BB_FOOTER (a);
4c8af858
SB
4618
4619 while (NEXT_INSN (last))
4620 last = NEXT_INSN (last);
0f82e5c9
DM
4621 SET_NEXT_INSN (last) = BB_FOOTER (b);
4622 SET_PREV_INSN (BB_FOOTER (b)) = last;
4c8af858 4623 }
d8ce2eae 4624 BB_FOOTER (b) = NULL;
4c8af858
SB
4625 }
4626
4627 /* Move things from b->header before a->footer.
4628 Note that this may include dead tablejump data, but we don't clean
4629 those up until we go out of cfglayout mode. */
4630 if (BB_HEADER (b))
4631 {
4632 if (! BB_FOOTER (a))
d8ce2eae 4633 BB_FOOTER (a) = BB_HEADER (b);
4c8af858
SB
4634 else
4635 {
8879d71c 4636 rtx_insn *last = BB_HEADER (b);
4c8af858
SB
4637
4638 while (NEXT_INSN (last))
4639 last = NEXT_INSN (last);
0f82e5c9
DM
4640 SET_NEXT_INSN (last) = BB_FOOTER (a);
4641 SET_PREV_INSN (BB_FOOTER (a)) = last;
d8ce2eae 4642 BB_FOOTER (a) = BB_HEADER (b);
4c8af858 4643 }
1130d5e3 4644 BB_HEADER (b) = NULL;
bc35512f
JH
4645 }
4646
4647 /* In the case basic blocks are not adjacent, move them around. */
a813c111 4648 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
bc35512f 4649 {
f9df6f16 4650 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
6c3d0e31 4651
f9df6f16 4652 emit_insn_after_noloc (insn, BB_END (a), a);
bc35512f
JH
4653 }
4654 /* Otherwise just re-associate the instructions. */
4655 else
4656 {
a813c111 4657 insn = BB_HEAD (b);
1130d5e3 4658 BB_END (a) = BB_END (b);
bc35512f
JH
4659 }
4660
f9df6f16
JJ
4661 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4662 We need to explicitly call. */
4663 update_bb_for_insn_chain (insn, BB_END (b), a);
4664
4665 /* Skip possible DELETED_LABEL insn. */
4666 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4667 insn = NEXT_INSN (insn);
4668 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
1130d5e3 4669 BB_HEAD (b) = BB_END (b) = NULL;
f9df6f16
JJ
4670 delete_insn (insn);
4671
6fb5fa3c
DB
4672 df_bb_delete (b->index);
4673
50a36e42 4674 /* If B was a forwarder block, propagate the locus on the edge. */
2f13f2de 4675 if (forwarder_p
fbc68f2a 4676 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
50a36e42
EB
4677 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4678
c263766c 4679 if (dump_file)
50a36e42 4680 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
bc35512f
JH
4681}
4682
4683/* Split edge E. */
f470c378 4684
bc35512f
JH
4685static basic_block
4686cfg_layout_split_edge (edge e)
4687{
bc35512f 4688 basic_block new_bb =
fefa31b5 4689 create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
a813c111 4690 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
bc35512f
JH
4691 NULL_RTX, e->src);
4692
fefa31b5 4693 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3e7eb734
JJ
4694 BB_COPY_PARTITION (new_bb, e->src);
4695 else
4696 BB_COPY_PARTITION (new_bb, e->dest);
a9b2ee88 4697 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
bc35512f
JH
4698 redirect_edge_and_branch_force (e, new_bb);
4699
4700 return new_bb;
4701}
4702
f470c378
ZD
4703/* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4704
4705static void
4706rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4707{
4708}
4709
df92c640
SB
4710/* Return true if BB contains only labels or non-executable
4711 instructions. */
4712
4713static bool
4714rtl_block_empty_p (basic_block bb)
4715{
8879d71c 4716 rtx_insn *insn;
df92c640 4717
fefa31b5
DM
4718 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4719 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
df92c640
SB
4720 return true;
4721
4722 FOR_BB_INSNS (bb, insn)
4723 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4724 return false;
4725
4726 return true;
4727}
4728
4729/* Split a basic block if it ends with a conditional branch and if
4730 the other part of the block is not empty. */
4731
4732static basic_block
4733rtl_split_block_before_cond_jump (basic_block bb)
4734{
8879d71c
DM
4735 rtx_insn *insn;
4736 rtx_insn *split_point = NULL;
4737 rtx_insn *last = NULL;
df92c640
SB
4738 bool found_code = false;
4739
4740 FOR_BB_INSNS (bb, insn)
4741 {
4742 if (any_condjump_p (insn))
4743 split_point = last;
4744 else if (NONDEBUG_INSN_P (insn))
4745 found_code = true;
4746 last = insn;
4747 }
4748
4749 /* Did not find everything. */
4750 if (found_code && split_point)
4751 return split_block (bb, split_point)->dest;
4752 else
4753 return NULL;
4754}
4755
6de9cd9a
DN
4756/* Return 1 if BB ends with a call, possibly followed by some
4757 instructions that must stay with the call, 0 otherwise. */
4758
4759static bool
b48d0358 4760rtl_block_ends_with_call_p (basic_block bb)
6de9cd9a 4761{
8879d71c 4762 rtx_insn *insn = BB_END (bb);
6de9cd9a 4763
4b4bf941 4764 while (!CALL_P (insn)
6de9cd9a 4765 && insn != BB_HEAD (bb)
92ddef69 4766 && (keep_with_call_p (insn)
b5b8b0ac
AO
4767 || NOTE_P (insn)
4768 || DEBUG_INSN_P (insn)))
6de9cd9a 4769 insn = PREV_INSN (insn);
4b4bf941 4770 return (CALL_P (insn));
6de9cd9a
DN
4771}
4772
4773/* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4774
4775static bool
9678086d 4776rtl_block_ends_with_condjump_p (const_basic_block bb)
6de9cd9a
DN
4777{
4778 return any_condjump_p (BB_END (bb));
4779}
4780
4781/* Return true if we need to add fake edge to exit.
4782 Helper function for rtl_flow_call_edges_add. */
4783
4784static bool
8879d71c 4785need_fake_edge_p (const rtx_insn *insn)
6de9cd9a
DN
4786{
4787 if (!INSN_P (insn))
4788 return false;
4789
4b4bf941 4790 if ((CALL_P (insn)
6de9cd9a
DN
4791 && !SIBLING_CALL_P (insn)
4792 && !find_reg_note (insn, REG_NORETURN, NULL)
becfd6e5 4793 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
6de9cd9a
DN
4794 return true;
4795
4796 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4797 && MEM_VOLATILE_P (PATTERN (insn)))
4798 || (GET_CODE (PATTERN (insn)) == PARALLEL
4799 && asm_noperands (insn) != -1
4800 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4801 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4802}
4803
4804/* Add fake edges to the function exit for any non constant and non noreturn
4805 calls, volatile inline assembly in the bitmap of blocks specified by
4806 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4807 that were split.
4808
4809 The goal is to expose cases in which entering a basic block does not imply
4810 that all subsequent instructions must be executed. */
4811
4812static int
4813rtl_flow_call_edges_add (sbitmap blocks)
4814{
4815 int i;
4816 int blocks_split = 0;
8b1c6fd7 4817 int last_bb = last_basic_block_for_fn (cfun);
6de9cd9a
DN
4818 bool check_last_block = false;
4819
0cae8d31 4820 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
6de9cd9a
DN
4821 return 0;
4822
4823 if (! blocks)
4824 check_last_block = true;
4825 else
fefa31b5
DM
4826 check_last_block = bitmap_bit_p (blocks,
4827 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
6de9cd9a
DN
4828
4829 /* In the last basic block, before epilogue generation, there will be
4830 a fallthru edge to EXIT. Special care is required if the last insn
4831 of the last basic block is a call because make_edge folds duplicate
4832 edges, which would result in the fallthru edge also being marked
4833 fake, which would result in the fallthru edge being removed by
4834 remove_fake_edges, which would result in an invalid CFG.
4835
4836 Moreover, we can't elide the outgoing fake edge, since the block
4837 profiler needs to take this into account in order to solve the minimal
4838 spanning tree in the case that the call doesn't return.
4839
4840 Handle this by adding a dummy instruction in a new last basic block. */
4841 if (check_last_block)
4842 {
fefa31b5 4843 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8879d71c 4844 rtx_insn *insn = BB_END (bb);
6de9cd9a
DN
4845
4846 /* Back up past insns that must be kept in the same block as a call. */
4847 while (insn != BB_HEAD (bb)
4848 && keep_with_call_p (insn))
4849 insn = PREV_INSN (insn);
4850
4851 if (need_fake_edge_p (insn))
4852 {
4853 edge e;
4854
fefa31b5 4855 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
9ff3d2de
JL
4856 if (e)
4857 {
c41c1387 4858 insert_insn_on_edge (gen_use (const0_rtx), e);
9ff3d2de
JL
4859 commit_edge_insertions ();
4860 }
6de9cd9a
DN
4861 }
4862 }
4863
4864 /* Now add fake edges to the function exit for any non constant
4865 calls since there is no way that we can determine if they will
4866 return or not... */
4867
24bd1a0b 4868 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
6de9cd9a 4869 {
06e28de2 4870 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8879d71c
DM
4871 rtx_insn *insn;
4872 rtx_insn *prev_insn;
6de9cd9a
DN
4873
4874 if (!bb)
4875 continue;
4876
d7c028c0 4877 if (blocks && !bitmap_bit_p (blocks, i))
6de9cd9a
DN
4878 continue;
4879
4880 for (insn = BB_END (bb); ; insn = prev_insn)
4881 {
4882 prev_insn = PREV_INSN (insn);
4883 if (need_fake_edge_p (insn))
4884 {
4885 edge e;
8879d71c 4886 rtx_insn *split_at_insn = insn;
6de9cd9a
DN
4887
4888 /* Don't split the block between a call and an insn that should
c22cacf3 4889 remain in the same block as the call. */
4b4bf941 4890 if (CALL_P (insn))
6de9cd9a
DN
4891 while (split_at_insn != BB_END (bb)
4892 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4893 split_at_insn = NEXT_INSN (split_at_insn);
4894
4895 /* The handling above of the final block before the epilogue
c22cacf3 4896 should be enough to verify that there is no edge to the exit
6de9cd9a
DN
4897 block in CFG already. Calling make_edge in such case would
4898 cause us to mark that edge as fake and remove it later. */
4899
4900#ifdef ENABLE_CHECKING
4901 if (split_at_insn == BB_END (bb))
628f6a4e 4902 {
fefa31b5 4903 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
9ff3d2de 4904 gcc_assert (e == NULL);
628f6a4e 4905 }
6de9cd9a
DN
4906#endif
4907
4908 /* Note that the following may create a new basic block
4909 and renumber the existing basic blocks. */
4910 if (split_at_insn != BB_END (bb))
4911 {
4912 e = split_block (bb, split_at_insn);
4913 if (e)
4914 blocks_split++;
4915 }
4916
fefa31b5 4917 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
6de9cd9a
DN
4918 }
4919
4920 if (insn == BB_HEAD (bb))
4921 break;
4922 }
4923 }
4924
4925 if (blocks_split)
4926 verify_flow_info ();
4927
4928 return blocks_split;
4929}
4930
1cb7dfc3 4931/* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
315682fb 4932 the conditional branch target, SECOND_HEAD should be the fall-thru
1cb7dfc3
MH
4933 there is no need to handle this here the loop versioning code handles
4934 this. the reason for SECON_HEAD is that it is needed for condition
4935 in trees, and this should be of the same type since it is a hook. */
4936static void
4937rtl_lv_add_condition_to_bb (basic_block first_head ,
c22cacf3
MS
4938 basic_block second_head ATTRIBUTE_UNUSED,
4939 basic_block cond_bb, void *comp_rtx)
1cb7dfc3 4940{
8879d71c
DM
4941 rtx label;
4942 rtx_insn *seq, *jump;
1cb7dfc3
MH
4943 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4944 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4945 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
ef4bddc2 4946 machine_mode mode;
1cb7dfc3
MH
4947
4948
4949 label = block_label (first_head);
4950 mode = GET_MODE (op0);
4951 if (mode == VOIDmode)
4952 mode = GET_MODE (op1);
4953
4954 start_sequence ();
4955 op0 = force_operand (op0, NULL_RTX);
4956 op1 = force_operand (op1, NULL_RTX);
4957 do_compare_rtx_and_jump (op0, op1, comp, 0,
40e90eac 4958 mode, NULL_RTX, NULL_RTX, label, -1);
1cb7dfc3
MH
4959 jump = get_last_insn ();
4960 JUMP_LABEL (jump) = label;
4961 LABEL_NUSES (label)++;
4962 seq = get_insns ();
4963 end_sequence ();
4964
2b9c63a2 4965 /* Add the new cond, in the new head. */
c3284718 4966 emit_insn_after (seq, BB_END (cond_bb));
1cb7dfc3
MH
4967}
4968
4969
4970/* Given a block B with unconditional branch at its end, get the
4971 store the return the branch edge and the fall-thru edge in
4972 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4973static void
4974rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4975 edge *fallthru_edge)
4976{
4977 edge e = EDGE_SUCC (b, 0);
4978
4979 if (e->flags & EDGE_FALLTHRU)
4980 {
4981 *fallthru_edge = e;
4982 *branch_edge = EDGE_SUCC (b, 1);
4983 }
4984 else
4985 {
4986 *branch_edge = e;
4987 *fallthru_edge = EDGE_SUCC (b, 1);
4988 }
4989}
4990
5e2d947c
JH
4991void
4992init_rtl_bb_info (basic_block bb)
4993{
bcc708fc
MM
4994 gcc_assert (!bb->il.x.rtl);
4995 bb->il.x.head_ = NULL;
766090c2 4996 bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5e2d947c
JH
4997}
4998
14fa2cc0
ZD
4999/* Returns true if it is possible to remove edge E by redirecting
5000 it to the destination of the other edge from E->src. */
5001
5002static bool
9678086d 5003rtl_can_remove_branch_p (const_edge e)
14fa2cc0 5004{
9678086d
KG
5005 const_basic_block src = e->src;
5006 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
8879d71c
DM
5007 const rtx_insn *insn = BB_END (src);
5008 rtx set;
14fa2cc0
ZD
5009
5010 /* The conditions are taken from try_redirect_by_replacing_jump. */
fefa31b5 5011 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
14fa2cc0
ZD
5012 return false;
5013
5014 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
5015 return false;
5016
3371a64f 5017 if (BB_PARTITION (src) != BB_PARTITION (target))
14fa2cc0
ZD
5018 return false;
5019
5020 if (!onlyjump_p (insn)
5021 || tablejump_p (insn, NULL, NULL))
5022 return false;
5023
5024 set = single_set (insn);
5025 if (!set || side_effects_p (set))
5026 return false;
5027
5028 return true;
5029}
5030
ffe14686
AM
5031static basic_block
5032rtl_duplicate_bb (basic_block bb)
5033{
5034 bb = cfg_layout_duplicate_bb (bb);
5035 bb->aux = NULL;
5036 return bb;
5037}
5038
aa4723d7
SB
5039/* Do book-keeping of basic block BB for the profile consistency checker.
5040 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
5041 then do post-pass accounting. Store the counting in RECORD. */
5042static void
5043rtl_account_profile_record (basic_block bb, int after_pass,
5044 struct profile_record *record)
5045{
8879d71c 5046 rtx_insn *insn;
aa4723d7
SB
5047 FOR_BB_INSNS (bb, insn)
5048 if (INSN_P (insn))
5049 {
5050 record->size[after_pass]
5051 += insn_rtx_cost (PATTERN (insn), false);
0a6a6ac9 5052 if (profile_status_for_fn (cfun) == PROFILE_READ)
aa4723d7
SB
5053 record->time[after_pass]
5054 += insn_rtx_cost (PATTERN (insn), true) * bb->count;
0a6a6ac9 5055 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
aa4723d7
SB
5056 record->time[after_pass]
5057 += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
5058 }
5059}
5060
9ee634e3
JH
5061/* Implementation of CFG manipulation for linearized RTL. */
5062struct cfg_hooks rtl_cfg_hooks = {
f470c378 5063 "rtl",
9ee634e3 5064 rtl_verify_flow_info,
10e9fecc 5065 rtl_dump_bb,
2c895bd1 5066 rtl_dump_bb_for_graph,
bc35512f 5067 rtl_create_basic_block,
9ee634e3
JH
5068 rtl_redirect_edge_and_branch,
5069 rtl_redirect_edge_and_branch_force,
14fa2cc0 5070 rtl_can_remove_branch_p,
9ee634e3
JH
5071 rtl_delete_block,
5072 rtl_split_block,
f470c378 5073 rtl_move_block_after,
bc35512f
JH
5074 rtl_can_merge_blocks, /* can_merge_blocks_p */
5075 rtl_merge_blocks,
6de9cd9a
DN
5076 rtl_predict_edge,
5077 rtl_predicted_by_p,
ffe14686
AM
5078 cfg_layout_can_duplicate_bb_p,
5079 rtl_duplicate_bb,
f470c378
ZD
5080 rtl_split_edge,
5081 rtl_make_forwarder_block,
6de9cd9a 5082 rtl_tidy_fallthru_edge,
cf103ca4 5083 rtl_force_nonfallthru,
6de9cd9a
DN
5084 rtl_block_ends_with_call_p,
5085 rtl_block_ends_with_condjump_p,
d9d4706f
KH
5086 rtl_flow_call_edges_add,
5087 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
5088 NULL, /* execute_on_shrinking_pred */
5089 NULL, /* duplicate loop for trees */
5090 NULL, /* lv_add_condition_to_bb */
5091 NULL, /* lv_adjust_loop_header_phi*/
5092 NULL, /* extract_cond_bb_edges */
df92c640
SB
5093 NULL, /* flush_pending_stmts */
5094 rtl_block_empty_p, /* block_empty_p */
5095 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
aa4723d7 5096 rtl_account_profile_record,
9ee634e3
JH
5097};
5098
5099/* Implementation of CFG manipulation for cfg layout RTL, where
5100 basic block connected via fallthru edges does not have to be adjacent.
5101 This representation will hopefully become the default one in future
5102 version of the compiler. */
6de9cd9a 5103
9ee634e3 5104struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
f470c378 5105 "cfglayout mode",
bc35512f 5106 rtl_verify_flow_info_1,
10e9fecc 5107 rtl_dump_bb,
2c895bd1 5108 rtl_dump_bb_for_graph,
bc35512f 5109 cfg_layout_create_basic_block,
9ee634e3
JH
5110 cfg_layout_redirect_edge_and_branch,
5111 cfg_layout_redirect_edge_and_branch_force,
14fa2cc0 5112 rtl_can_remove_branch_p,
9ee634e3
JH
5113 cfg_layout_delete_block,
5114 cfg_layout_split_block,
f470c378 5115 rtl_move_block_after,
bc35512f
JH
5116 cfg_layout_can_merge_blocks_p,
5117 cfg_layout_merge_blocks,
6de9cd9a
DN
5118 rtl_predict_edge,
5119 rtl_predicted_by_p,
5120 cfg_layout_can_duplicate_bb_p,
5121 cfg_layout_duplicate_bb,
f470c378
ZD
5122 cfg_layout_split_edge,
5123 rtl_make_forwarder_block,
cf103ca4
EB
5124 NULL, /* tidy_fallthru_edge */
5125 rtl_force_nonfallthru,
6de9cd9a
DN
5126 rtl_block_ends_with_call_p,
5127 rtl_block_ends_with_condjump_p,
d9d4706f
KH
5128 rtl_flow_call_edges_add,
5129 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
5130 NULL, /* execute_on_shrinking_pred */
5131 duplicate_loop_to_header_edge, /* duplicate loop for trees */
5132 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5133 NULL, /* lv_adjust_loop_header_phi*/
5134 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
df92c640
SB
5135 NULL, /* flush_pending_stmts */
5136 rtl_block_empty_p, /* block_empty_p */
5137 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
aa4723d7 5138 rtl_account_profile_record,
9ee634e3 5139};
78bde837
SB
5140
5141#include "gt-cfgrtl.h"