]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgrtl.c
Time profile-based function reordering (phase 2).
[thirdparty/gcc.git] / gcc / cfgrtl.c
CommitLineData
ca6c03ca 1/* Control flow graph manipulation code for GNU compiler.
d1e082c2 2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
ca6c03ca
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
ca6c03ca
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ca6c03ca 19
5f0d2358
RK
20/* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
ca6c03ca
JH
22
23 Available functionality:
bc35512f 24 - Basic CFG/RTL manipulation API documented in cfghooks.h
5f0d2358 25 - CFG-aware instruction chain manipulation
ca6c03ca 26 delete_insn, delete_insn_chain
bc35512f
JH
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
ba5e9aca
EB
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
bc35512f
JH
33
34 Functions not supposed for generic use:
5f0d2358 35 - Infrastructure to determine quickly basic block for insn
ca6c03ca 36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
5f0d2358 37 - Edge redirection with updating and optimizing of insn chain
bc35512f 38 block_label, tidy_fallthru_edge, force_nonfallthru */
ca6c03ca
JH
39\f
40#include "config.h"
41#include "system.h"
4977bab6
ZW
42#include "coretypes.h"
43#include "tm.h"
ca6c03ca 44#include "tree.h"
ca6c03ca
JH
45#include "hard-reg-set.h"
46#include "basic-block.h"
3371a64f 47#include "bb-reorder.h"
ca6c03ca
JH
48#include "regs.h"
49#include "flags.h"
ca6c03ca
JH
50#include "function.h"
51#include "except.h"
0cbd9993 52#include "rtl-error.h"
ca6c03ca
JH
53#include "tm_p.h"
54#include "obstack.h"
2a34bece 55#include "insn-attr.h"
0a2ed1f1 56#include "insn-config.h"
ff25ef99 57#include "expr.h"
9fb32434 58#include "target.h"
677f3fa8 59#include "common/common-target.h"
1cb7dfc3 60#include "cfgloop.h"
5e2d947c 61#include "ggc.h"
ef330312 62#include "tree-pass.h"
6fb5fa3c 63#include "df.h"
ca6c03ca 64
78bde837
SB
65/* Holds the interesting leading and trailing notes for the function.
66 Only applicable if the CFG is in cfglayout mode. */
67static GTY(()) rtx cfg_layout_function_footer;
68static GTY(()) rtx cfg_layout_function_header;
69
70static rtx skip_insns_after_block (basic_block);
71static void record_effective_endpoints (void);
72static rtx label_for_bb (basic_block);
73static void fixup_reorder_chain (void);
74
75void verify_insn_chain (void);
76static void fixup_fallthru_exit_predecessor (void);
9678086d
KG
77static int can_delete_note_p (const_rtx);
78static int can_delete_label_p (const_rtx);
d329e058 79static basic_block rtl_split_edge (edge);
f470c378 80static bool rtl_move_block_after (basic_block, basic_block);
d329e058 81static int rtl_verify_flow_info (void);
f470c378 82static basic_block cfg_layout_split_block (basic_block, void *);
6de9cd9a 83static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
d329e058
AJ
84static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
85static void cfg_layout_delete_block (basic_block);
86static void rtl_delete_block (basic_block);
87static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
6de9cd9a 88static edge rtl_redirect_edge_and_branch (edge, basic_block);
f470c378 89static basic_block rtl_split_block (basic_block, void *);
a315c44c 90static void rtl_dump_bb (FILE *, basic_block, int, int);
d329e058 91static int rtl_verify_flow_info_1 (void);
f470c378 92static void rtl_make_forwarder_block (edge);
ca6c03ca
JH
93\f
94/* Return true if NOTE is not one of the ones that must be kept paired,
5f0d2358 95 so that we may simply delete it. */
ca6c03ca
JH
96
97static int
9678086d 98can_delete_note_p (const_rtx note)
ca6c03ca 99{
cd9c1ca8
RH
100 switch (NOTE_KIND (note))
101 {
102 case NOTE_INSN_DELETED:
103 case NOTE_INSN_BASIC_BLOCK:
104 case NOTE_INSN_EPILOGUE_BEG:
105 return true;
106
107 default:
108 return false;
109 }
ca6c03ca
JH
110}
111
112/* True if a given label can be deleted. */
113
114static int
9678086d 115can_delete_label_p (const_rtx label)
ca6c03ca 116{
5f0d2358
RK
117 return (!LABEL_PRESERVE_P (label)
118 /* User declared labels must be preserved. */
119 && LABEL_NAME (label) == 0
610d2478 120 && !in_expr_list_p (forced_labels, label));
ca6c03ca
JH
121}
122
03fbe718 123/* Delete INSN by patching it out. */
ca6c03ca 124
03fbe718 125void
d329e058 126delete_insn (rtx insn)
ca6c03ca 127{
ca6c03ca
JH
128 rtx note;
129 bool really_delete = true;
130
4b4bf941 131 if (LABEL_P (insn))
ca6c03ca
JH
132 {
133 /* Some labels can't be directly removed from the INSN chain, as they
c22cacf3
MS
134 might be references via variables, constant pool etc.
135 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
ca6c03ca
JH
136 if (! can_delete_label_p (insn))
137 {
138 const char *name = LABEL_NAME (insn);
5e5df392 139 basic_block bb = BLOCK_FOR_INSN (insn);
03fbe718 140 rtx bb_note = NEXT_INSN (insn);
ca6c03ca
JH
141
142 really_delete = false;
143 PUT_CODE (insn, NOTE);
a38e7aa5 144 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
6773e15f 145 NOTE_DELETED_LABEL_NAME (insn) = name;
03fbe718 146
0f33baa9 147 /* If the note following the label starts a basic block, and the
5e5df392 148 label is a member of the same basic block, interchange the two. */
0f33baa9
TV
149 if (bb_note != NULL_RTX
150 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
5e5df392
TV
151 && bb != NULL
152 && bb == BLOCK_FOR_INSN (bb_note))
03fbe718
TV
153 {
154 reorder_insns_nobb (insn, insn, bb_note);
155 BB_HEAD (bb) = bb_note;
156 if (BB_END (bb) == bb_note)
157 BB_END (bb) = insn;
158 }
ca6c03ca 159 }
5f0d2358 160
ca6c03ca
JH
161 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
162 }
163
164 if (really_delete)
165 {
cda94cbb 166 /* If this insn has already been deleted, something is very wrong. */
341c100f 167 gcc_assert (!INSN_DELETED_P (insn));
1f397f45
SB
168 if (INSN_P (insn))
169 df_insn_delete (insn);
ca6c03ca
JH
170 remove_insn (insn);
171 INSN_DELETED_P (insn) = 1;
172 }
173
174 /* If deleting a jump, decrement the use count of the label. Deleting
175 the label itself should happen in the normal course of block merging. */
cf7c4aa6 176 if (JUMP_P (insn))
9295a326 177 {
cf7c4aa6
HPN
178 if (JUMP_LABEL (insn)
179 && LABEL_P (JUMP_LABEL (insn)))
180 LABEL_NUSES (JUMP_LABEL (insn))--;
181
182 /* If there are more targets, remove them too. */
183 while ((note
184 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
4b4bf941 185 && LABEL_P (XEXP (note, 0)))
9295a326
JZ
186 {
187 LABEL_NUSES (XEXP (note, 0))--;
188 remove_note (insn, note);
189 }
190 }
ca6c03ca 191
cf7c4aa6
HPN
192 /* Also if deleting any insn that references a label as an operand. */
193 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
194 && LABEL_P (XEXP (note, 0)))
195 {
196 LABEL_NUSES (XEXP (note, 0))--;
197 remove_note (insn, note);
198 }
199
481683e1 200 if (JUMP_TABLE_DATA_P (insn))
ca6c03ca
JH
201 {
202 rtx pat = PATTERN (insn);
203 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
204 int len = XVECLEN (pat, diff_vec_p);
205 int i;
206
207 for (i = 0; i < len; i++)
a124fcda
RH
208 {
209 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
210
211 /* When deleting code in bulk (e.g. removing many unreachable
212 blocks) we can delete a label that's a target of the vector
213 before deleting the vector itself. */
4b4bf941 214 if (!NOTE_P (label))
a124fcda
RH
215 LABEL_NUSES (label)--;
216 }
ca6c03ca 217 }
ca6c03ca
JH
218}
219
3dec4024 220/* Like delete_insn but also purge dead edges from BB. */
9ed7b221 221
03fbe718 222void
d329e058 223delete_insn_and_edges (rtx insn)
3dec4024 224{
3dec4024
JH
225 bool purge = false;
226
ba4f7968 227 if (INSN_P (insn)
3dec4024 228 && BLOCK_FOR_INSN (insn)
a813c111 229 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
3dec4024 230 purge = true;
03fbe718 231 delete_insn (insn);
3dec4024
JH
232 if (purge)
233 purge_dead_edges (BLOCK_FOR_INSN (insn));
3dec4024
JH
234}
235
ca6c03ca 236/* Unlink a chain of insns between START and FINISH, leaving notes
a7b87f73
ZD
237 that must be paired. If CLEAR_BB is true, we set bb field for
238 insns that cannot be removed to NULL. */
ca6c03ca
JH
239
240void
a7b87f73 241delete_insn_chain (rtx start, rtx finish, bool clear_bb)
ca6c03ca 242{
03fbe718 243 rtx prev, current;
ca6c03ca 244
5f0d2358
RK
245 /* Unchain the insns one by one. It would be quicker to delete all of these
246 with a single unchaining, rather than one at a time, but we need to keep
247 the NOTE's. */
03fbe718 248 current = finish;
ca6c03ca
JH
249 while (1)
250 {
03fbe718
TV
251 prev = PREV_INSN (current);
252 if (NOTE_P (current) && !can_delete_note_p (current))
ca6c03ca
JH
253 ;
254 else
03fbe718 255 delete_insn (current);
ca6c03ca 256
03fbe718
TV
257 if (clear_bb && !INSN_DELETED_P (current))
258 set_block_for_insn (current, NULL);
a7b87f73 259
03fbe718 260 if (current == start)
ca6c03ca 261 break;
03fbe718 262 current = prev;
ca6c03ca
JH
263 }
264}
265\f
5f0d2358
RK
266/* Create a new basic block consisting of the instructions between HEAD and END
267 inclusive. This function is designed to allow fast BB construction - reuses
268 the note and basic block struct in BB_NOTE, if any and do not grow
269 BASIC_BLOCK chain and should be used directly only by CFG construction code.
270 END can be NULL in to create new empty basic block before HEAD. Both END
918ed612
ZD
271 and HEAD can be NULL to create basic block at the end of INSN chain.
272 AFTER is the basic block we should be put after. */
ca6c03ca
JH
273
274basic_block
d329e058 275create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
ca6c03ca
JH
276{
277 basic_block bb;
278
279 if (bb_note
ca6c03ca
JH
280 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
281 && bb->aux == NULL)
282 {
283 /* If we found an existing note, thread it back onto the chain. */
284
285 rtx after;
286
4b4bf941 287 if (LABEL_P (head))
ca6c03ca
JH
288 after = head;
289 else
290 {
291 after = PREV_INSN (head);
292 head = bb_note;
293 }
294
295 if (after != bb_note && NEXT_INSN (after) != bb_note)
ba4f7968 296 reorder_insns_nobb (bb_note, bb_note, after);
ca6c03ca
JH
297 }
298 else
299 {
300 /* Otherwise we must create a note and a basic block structure. */
301
302 bb = alloc_block ();
303
5e2d947c 304 init_rtl_bb_info (bb);
ca6c03ca 305 if (!head && !end)
5f0d2358
RK
306 head = end = bb_note
307 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
4b4bf941 308 else if (LABEL_P (head) && end)
ca6c03ca
JH
309 {
310 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
311 if (head == end)
312 end = bb_note;
313 }
314 else
315 {
316 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
317 head = bb_note;
318 if (!end)
319 end = head;
320 }
5f0d2358 321
ca6c03ca
JH
322 NOTE_BASIC_BLOCK (bb_note) = bb;
323 }
324
325 /* Always include the bb note in the block. */
326 if (NEXT_INSN (end) == bb_note)
327 end = bb_note;
328
a813c111
SB
329 BB_HEAD (bb) = head;
330 BB_END (bb) = end;
8b1c6fd7 331 bb->index = last_basic_block_for_fn (cfun)++;
5e2d947c 332 bb->flags = BB_NEW | BB_RTL;
918ed612 333 link_block (bb, after);
557c4b49 334 SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
6fb5fa3c 335 df_bb_refs_record (bb->index, false);
ba4f7968 336 update_bb_for_insn (bb);
076c7ab8 337 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
ca6c03ca
JH
338
339 /* Tag the block so that we know it has been used when considering
340 other basic block notes. */
341 bb->aux = bb;
342
343 return bb;
344}
345
5f0d2358 346/* Create new basic block consisting of instructions in between HEAD and END
ffe14686
AM
347 and place it to the BB chain after block AFTER. END can be NULL to
348 create a new empty basic block before HEAD. Both END and HEAD can be
349 NULL to create basic block at the end of INSN chain. */
ca6c03ca 350
bc35512f
JH
351static basic_block
352rtl_create_basic_block (void *headp, void *endp, basic_block after)
ca6c03ca 353{
ae50c0cb 354 rtx head = (rtx) headp, end = (rtx) endp;
ca6c03ca 355 basic_block bb;
0b17ab2f 356
7eca0767 357 /* Grow the basic block array if needed. */
8b1c6fd7
DM
358 if ((size_t) last_basic_block_for_fn (cfun)
359 >= basic_block_info_for_fn (cfun)->length ())
7eca0767 360 {
8b1c6fd7
DM
361 size_t new_size =
362 (last_basic_block_for_fn (cfun)
363 + (last_basic_block_for_fn (cfun) + 3) / 4);
c5f05f68 364 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
7eca0767 365 }
0b17ab2f 366
0cae8d31 367 n_basic_blocks_for_fn (cfun)++;
ca6c03ca 368
852c6ec7 369 bb = create_basic_block_structure (head, end, NULL, after);
ca6c03ca
JH
370 bb->aux = NULL;
371 return bb;
372}
bc35512f
JH
373
374static basic_block
375cfg_layout_create_basic_block (void *head, void *end, basic_block after)
376{
377 basic_block newbb = rtl_create_basic_block (head, end, after);
378
bc35512f
JH
379 return newbb;
380}
ca6c03ca
JH
381\f
382/* Delete the insns in a (non-live) block. We physically delete every
383 non-deleted-note insn, and update the flow graph appropriately.
384
385 Return nonzero if we deleted an exception handler. */
386
387/* ??? Preserving all such notes strikes me as wrong. It would be nice
388 to post-process the stream to remove empty blocks, loops, ranges, etc. */
389
f0fda11c 390static void
d329e058 391rtl_delete_block (basic_block b)
ca6c03ca 392{
96370780 393 rtx insn, end;
ca6c03ca
JH
394
395 /* If the head of this block is a CODE_LABEL, then it might be the
f39e46ba
SB
396 label for an exception handler which can't be reached. We need
397 to remove the label from the exception_handler_label list. */
a813c111 398 insn = BB_HEAD (b);
ca6c03ca 399
96370780 400 end = get_last_bb_insn (b);
ca6c03ca
JH
401
402 /* Selectively delete the entire chain. */
a813c111 403 BB_HEAD (b) = NULL;
a7b87f73
ZD
404 delete_insn_chain (insn, end, true);
405
6fb5fa3c
DB
406
407 if (dump_file)
408 fprintf (dump_file, "deleting block %d\n", b->index);
409 df_bb_delete (b->index);
ca6c03ca
JH
410}
411\f
852c6ec7 412/* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
ca6c03ca
JH
413
414void
d329e058 415compute_bb_for_insn (void)
ca6c03ca 416{
e0082a72 417 basic_block bb;
ca6c03ca 418
11cd3bed 419 FOR_EACH_BB_FN (bb, cfun)
ca6c03ca 420 {
a813c111 421 rtx end = BB_END (bb);
5f0d2358 422 rtx insn;
ca6c03ca 423
a813c111 424 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
ca6c03ca 425 {
ba4f7968 426 BLOCK_FOR_INSN (insn) = bb;
ca6c03ca
JH
427 if (insn == end)
428 break;
ca6c03ca
JH
429 }
430 }
431}
432
433/* Release the basic_block_for_insn array. */
434
c2924966 435unsigned int
d329e058 436free_bb_for_insn (void)
ca6c03ca 437{
ba4f7968
JH
438 rtx insn;
439 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 440 if (!BARRIER_P (insn))
ba4f7968 441 BLOCK_FOR_INSN (insn) = NULL;
c2924966 442 return 0;
ca6c03ca
JH
443}
444
2a34bece
EB
445static unsigned int
446rest_of_pass_free_cfg (void)
447{
448#ifdef DELAY_SLOTS
449 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
450 valid at that point so it would be too late to call df_analyze. */
451 if (optimize > 0 && flag_delayed_branch)
ad78b8a6
EB
452 {
453 df_note_add_problem ();
454 df_analyze ();
455 }
2a34bece
EB
456#endif
457
3371a64f
TJ
458 if (crtl->has_bb_partition)
459 insert_section_boundary_note ();
460
2a34bece
EB
461 free_bb_for_insn ();
462 return 0;
463}
464
27a4cd48
DM
465namespace {
466
467const pass_data pass_data_free_cfg =
468{
469 RTL_PASS, /* type */
470 "*free_cfg", /* name */
471 OPTGROUP_NONE, /* optinfo_flags */
472 false, /* has_gate */
473 true, /* has_execute */
474 TV_NONE, /* tv_id */
475 0, /* properties_required */
476 0, /* properties_provided */
477 PROP_cfg, /* properties_destroyed */
478 0, /* todo_flags_start */
479 0, /* todo_flags_finish */
ef330312
PB
480};
481
27a4cd48
DM
482class pass_free_cfg : public rtl_opt_pass
483{
484public:
c3284718
RS
485 pass_free_cfg (gcc::context *ctxt)
486 : rtl_opt_pass (pass_data_free_cfg, ctxt)
27a4cd48
DM
487 {}
488
489 /* opt_pass methods: */
490 unsigned int execute () { return rest_of_pass_free_cfg (); }
491
492}; // class pass_free_cfg
493
494} // anon namespace
495
496rtl_opt_pass *
497make_pass_free_cfg (gcc::context *ctxt)
498{
499 return new pass_free_cfg (ctxt);
500}
501
91278841
AP
502/* Return RTX to emit after when we want to emit code on the entry of function. */
503rtx
504entry_of_function (void)
505{
0cae8d31 506 return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
fefa31b5 507 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
91278841
AP
508}
509
11b904a1
BS
510/* Emit INSN at the entry point of the function, ensuring that it is only
511 executed once per function. */
512void
513emit_insn_at_entry (rtx insn)
514{
fefa31b5 515 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
11b904a1 516 edge e = ei_safe_edge (ei);
5419bc7f 517 gcc_assert (e->flags & EDGE_FALLTHRU);
11b904a1
BS
518
519 insert_insn_on_edge (insn, e);
520 commit_edge_insertions ();
521}
522
6c3d0e31 523/* Update BLOCK_FOR_INSN of insns between BEGIN and END
b8698a0f 524 (or BARRIER if found) and notify df of the bb change.
6c3d0e31
SP
525 The insn chain range is inclusive
526 (i.e. both BEGIN and END will be updated. */
ca6c03ca 527
6c3d0e31
SP
528static void
529update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
ca6c03ca
JH
530{
531 rtx insn;
532
6c3d0e31
SP
533 end = NEXT_INSN (end);
534 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
63642d5a
AO
535 if (!BARRIER_P (insn))
536 df_insn_change_bb (insn, bb);
ca6c03ca 537}
6c3d0e31
SP
538
539/* Update BLOCK_FOR_INSN of insns in BB to BB,
540 and notify df of the change. */
541
542void
543update_bb_for_insn (basic_block bb)
544{
545 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
546}
547
532aafad
SB
548\f
549/* Like active_insn_p, except keep the return value clobber around
550 even after reload. */
551
552static bool
553flow_active_insn_p (const_rtx insn)
554{
555 if (active_insn_p (insn))
556 return true;
557
558 /* A clobber of the function return value exists for buggy
559 programs that fail to return a value. Its effect is to
560 keep the return value from being live across the entire
561 function. If we allow it to be skipped, we introduce the
562 possibility for register lifetime confusion. */
563 if (GET_CODE (PATTERN (insn)) == CLOBBER
564 && REG_P (XEXP (PATTERN (insn), 0))
565 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
566 return true;
567
568 return false;
569}
570
571/* Return true if the block has no effect and only forwards control flow to
572 its single destination. */
532aafad
SB
573
574bool
2407343c 575contains_no_active_insn_p (const_basic_block bb)
532aafad
SB
576{
577 rtx insn;
578
fefa31b5 579 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
532aafad
SB
580 || !single_succ_p (bb))
581 return false;
582
2407343c
JR
583 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
584 if (INSN_P (insn) && flow_active_insn_p (insn))
585 return false;
586
587 return (!INSN_P (insn)
588 || (JUMP_P (insn) && simplejump_p (insn))
589 || !flow_active_insn_p (insn));
590}
591
592/* Likewise, but protect loop latches, headers and preheaders. */
593/* FIXME: Make this a cfg hook. */
594
595bool
596forwarder_block_p (const_basic_block bb)
597{
598 if (!contains_no_active_insn_p (bb))
599 return false;
600
532aafad
SB
601 /* Protect loop latches, headers and preheaders. */
602 if (current_loops)
603 {
604 basic_block dest;
605 if (bb->loop_father->header == bb)
606 return false;
607 dest = EDGE_SUCC (bb, 0)->dest;
608 if (dest->loop_father->header == dest)
609 return false;
610 }
611
2407343c 612 return true;
532aafad
SB
613}
614
615/* Return nonzero if we can reach target from src by falling through. */
d7b6661b 616/* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
532aafad
SB
617
618bool
619can_fallthru (basic_block src, basic_block target)
620{
621 rtx insn = BB_END (src);
622 rtx insn2;
623 edge e;
624 edge_iterator ei;
625
fefa31b5 626 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
532aafad
SB
627 return true;
628 if (src->next_bb != target)
d7b6661b
SB
629 return false;
630
631 /* ??? Later we may add code to move jump tables offline. */
632 if (tablejump_p (insn, NULL, NULL))
633 return false;
634
532aafad 635 FOR_EACH_EDGE (e, ei, src->succs)
fefa31b5 636 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
532aafad 637 && e->flags & EDGE_FALLTHRU)
d7b6661b 638 return false;
532aafad
SB
639
640 insn2 = BB_HEAD (target);
d7b6661b 641 if (!active_insn_p (insn2))
532aafad
SB
642 insn2 = next_active_insn (insn2);
643
532aafad
SB
644 return next_active_insn (insn) == insn2;
645}
646
647/* Return nonzero if we could reach target from src by falling through,
648 if the target was made adjacent. If we already have a fall-through
649 edge to the exit block, we can't do that. */
650static bool
651could_fall_through (basic_block src, basic_block target)
652{
653 edge e;
654 edge_iterator ei;
655
fefa31b5 656 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
532aafad
SB
657 return true;
658 FOR_EACH_EDGE (e, ei, src->succs)
fefa31b5 659 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
532aafad
SB
660 && e->flags & EDGE_FALLTHRU)
661 return 0;
662 return true;
663}
ca6c03ca 664\f
ef2be249
RS
665/* Return the NOTE_INSN_BASIC_BLOCK of BB. */
666rtx
667bb_note (basic_block bb)
668{
669 rtx note;
670
671 note = BB_HEAD (bb);
672 if (LABEL_P (note))
673 note = NEXT_INSN (note);
674
675 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
676 return note;
677}
678
6fb5fa3c
DB
679/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
680 note associated with the BLOCK. */
681
682static rtx
683first_insn_after_basic_block_note (basic_block block)
684{
685 rtx insn;
686
687 /* Get the first instruction in the block. */
688 insn = BB_HEAD (block);
689
690 if (insn == NULL_RTX)
691 return NULL_RTX;
692 if (LABEL_P (insn))
693 insn = NEXT_INSN (insn);
694 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
695
696 return NEXT_INSN (insn);
697}
698
f470c378
ZD
699/* Creates a new basic block just after basic block B by splitting
700 everything after specified instruction I. */
ca6c03ca 701
f470c378 702static basic_block
d329e058 703rtl_split_block (basic_block bb, void *insnp)
ca6c03ca
JH
704{
705 basic_block new_bb;
ae50c0cb 706 rtx insn = (rtx) insnp;
f470c378 707 edge e;
628f6a4e 708 edge_iterator ei;
ca6c03ca 709
f470c378
ZD
710 if (!insn)
711 {
712 insn = first_insn_after_basic_block_note (bb);
713
714 if (insn)
b5b8b0ac
AO
715 {
716 rtx next = insn;
717
718 insn = PREV_INSN (insn);
719
720 /* If the block contains only debug insns, insn would have
721 been NULL in a non-debug compilation, and then we'd end
722 up emitting a DELETED note. For -fcompare-debug
723 stability, emit the note too. */
724 if (insn != BB_END (bb)
725 && DEBUG_INSN_P (next)
726 && DEBUG_INSN_P (BB_END (bb)))
727 {
728 while (next != BB_END (bb) && DEBUG_INSN_P (next))
729 next = NEXT_INSN (next);
730
731 if (next == BB_END (bb))
732 emit_note_after (NOTE_INSN_DELETED, next);
733 }
734 }
f470c378
ZD
735 else
736 insn = get_last_insn ();
737 }
738
739 /* We probably should check type of the insn so that we do not create
740 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
741 bother. */
742 if (insn == BB_END (bb))
743 emit_note_after (NOTE_INSN_DELETED, insn);
ca6c03ca
JH
744
745 /* Create the new basic block. */
a813c111 746 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
076c7ab8 747 BB_COPY_PARTITION (new_bb, bb);
a813c111 748 BB_END (bb) = insn;
ca6c03ca
JH
749
750 /* Redirect the outgoing edges. */
628f6a4e
BE
751 new_bb->succs = bb->succs;
752 bb->succs = NULL;
753 FOR_EACH_EDGE (e, ei, new_bb->succs)
ca6c03ca
JH
754 e->src = new_bb;
755
6fb5fa3c
DB
756 /* The new block starts off being dirty. */
757 df_set_bb_dirty (bb);
f470c378 758 return new_bb;
bc35512f
JH
759}
760
9be94227
EB
761/* Return true if the single edge between blocks A and B is the only place
762 in RTL which holds some unique locus. */
763
764static bool
765unique_locus_on_edge_between_p (basic_block a, basic_block b)
766{
5368224f 767 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
9be94227
EB
768 rtx insn, end;
769
2f13f2de 770 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
9be94227
EB
771 return false;
772
773 /* First scan block A backward. */
774 insn = BB_END (a);
775 end = PREV_INSN (BB_HEAD (a));
5368224f 776 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
9be94227
EB
777 insn = PREV_INSN (insn);
778
5368224f 779 if (insn != end && INSN_LOCATION (insn) == goto_locus)
9be94227
EB
780 return false;
781
782 /* Then scan block B forward. */
783 insn = BB_HEAD (b);
784 if (insn)
785 {
786 end = NEXT_INSN (BB_END (b));
787 while (insn != end && !NONDEBUG_INSN_P (insn))
788 insn = NEXT_INSN (insn);
789
5368224f
DC
790 if (insn != end && INSN_HAS_LOCATION (insn)
791 && INSN_LOCATION (insn) == goto_locus)
9be94227
EB
792 return false;
793 }
794
795 return true;
796}
797
798/* If the single edge between blocks A and B is the only place in RTL which
799 holds some unique locus, emit a nop with that locus between the blocks. */
800
801static void
802emit_nop_for_unique_locus_between (basic_block a, basic_block b)
803{
804 if (!unique_locus_on_edge_between_p (a, b))
805 return;
806
807 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
5368224f 808 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
9be94227
EB
809}
810
ca6c03ca 811/* Blocks A and B are to be merged into a single block A. The insns
bc35512f 812 are already contiguous. */
ca6c03ca 813
bc35512f
JH
814static void
815rtl_merge_blocks (basic_block a, basic_block b)
ca6c03ca 816{
a813c111 817 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
ca6c03ca 818 rtx del_first = NULL_RTX, del_last = NULL_RTX;
b5b8b0ac 819 rtx b_debug_start = b_end, b_debug_end = b_end;
50a36e42 820 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
ca6c03ca
JH
821 int b_empty = 0;
822
6fb5fa3c 823 if (dump_file)
50a36e42
EB
824 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
825 a->index);
6fb5fa3c 826
b5b8b0ac
AO
827 while (DEBUG_INSN_P (b_end))
828 b_end = PREV_INSN (b_debug_start = b_end);
829
ca6c03ca 830 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 831 if (LABEL_P (b_head))
ca6c03ca
JH
832 {
833 /* Detect basic blocks with nothing but a label. This can happen
834 in particular at the end of a function. */
835 if (b_head == b_end)
836 b_empty = 1;
5f0d2358 837
ca6c03ca
JH
838 del_first = del_last = b_head;
839 b_head = NEXT_INSN (b_head);
840 }
841
5f0d2358
RK
842 /* Delete the basic block note and handle blocks containing just that
843 note. */
ca6c03ca
JH
844 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
845 {
846 if (b_head == b_end)
847 b_empty = 1;
848 if (! del_last)
849 del_first = b_head;
5f0d2358 850
ca6c03ca
JH
851 del_last = b_head;
852 b_head = NEXT_INSN (b_head);
853 }
854
855 /* If there was a jump out of A, delete it. */
4b4bf941 856 if (JUMP_P (a_end))
ca6c03ca
JH
857 {
858 rtx prev;
859
860 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
4b4bf941 861 if (!NOTE_P (prev)
a38e7aa5 862 || NOTE_INSN_BASIC_BLOCK_P (prev)
a813c111 863 || prev == BB_HEAD (a))
ca6c03ca
JH
864 break;
865
866 del_first = a_end;
867
868#ifdef HAVE_cc0
869 /* If this was a conditional jump, we need to also delete
870 the insn that set cc0. */
871 if (only_sets_cc0_p (prev))
872 {
873 rtx tmp = prev;
5f0d2358 874
ca6c03ca
JH
875 prev = prev_nonnote_insn (prev);
876 if (!prev)
a813c111 877 prev = BB_HEAD (a);
ca6c03ca
JH
878 del_first = tmp;
879 }
880#endif
881
882 a_end = PREV_INSN (del_first);
883 }
4b4bf941 884 else if (BARRIER_P (NEXT_INSN (a_end)))
ca6c03ca
JH
885 del_first = NEXT_INSN (a_end);
886
ca6c03ca
JH
887 /* Delete everything marked above as well as crap that might be
888 hanging out between the two blocks. */
9be94227
EB
889 BB_END (a) = a_end;
890 BB_HEAD (b) = b_empty ? NULL_RTX : b_head;
a7b87f73 891 delete_insn_chain (del_first, del_last, true);
ca6c03ca 892
9be94227
EB
893 /* When not optimizing CFG and the edge is the only place in RTL which holds
894 some unique locus, emit a nop with that locus in between. */
895 if (!optimize)
896 {
897 emit_nop_for_unique_locus_between (a, b);
898 a_end = BB_END (a);
899 }
900
ca6c03ca
JH
901 /* Reassociate the insns of B with A. */
902 if (!b_empty)
903 {
b5b8b0ac 904 update_bb_for_insn_chain (a_end, b_debug_end, a);
5f0d2358 905
9be94227
EB
906 BB_END (a) = b_debug_end;
907 BB_HEAD (b) = NULL_RTX;
b5b8b0ac
AO
908 }
909 else if (b_end != b_debug_end)
910 {
911 /* Move any deleted labels and other notes between the end of A
912 and the debug insns that make up B after the debug insns,
913 bringing the debug insns into A while keeping the notes after
914 the end of A. */
915 if (NEXT_INSN (a_end) != b_debug_start)
916 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
917 b_debug_end);
918 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
9be94227 919 BB_END (a) = b_debug_end;
ca6c03ca 920 }
5f0d2358 921
6fb5fa3c 922 df_bb_delete (b->index);
50a36e42
EB
923
924 /* If B was a forwarder block, propagate the locus on the edge. */
fbc68f2a
DC
925 if (forwarder_p
926 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
50a36e42
EB
927 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
928
929 if (dump_file)
930 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
ca6c03ca 931}
bc35512f 932
6fb5fa3c 933
bc35512f 934/* Return true when block A and B can be merged. */
9678086d 935
b48d0358
DN
936static bool
937rtl_can_merge_blocks (basic_block a, basic_block b)
bc35512f 938{
750054a2
CT
939 /* If we are partitioning hot/cold basic blocks, we don't want to
940 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
941 and cold sections.
942
8e8d5162 943 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
944 be optimizable (or blocks that appear to be mergeable), but which really
945 must be left untouched (they are required to make it safely across
946 partition boundaries). See the comments at the top of
8e8d5162 947 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
076c7ab8 948
87c8b4be 949 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 950 return false;
750054a2 951
7d776ee2
RG
952 /* Protect the loop latches. */
953 if (current_loops && b->loop_father->latch == b)
954 return false;
955
bc35512f 956 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
957 return (single_succ_p (a)
958 && single_succ (a) == b
959 && single_pred_p (b)
628f6a4e 960 && a != b
bc35512f 961 /* Must be simple edge. */
c5cbcccf 962 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
bc35512f 963 && a->next_bb == b
fefa31b5
DM
964 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
965 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
bc35512f
JH
966 /* If the jump insn has side effects,
967 we can't kill the edge. */
4b4bf941 968 && (!JUMP_P (BB_END (a))
e24e7211 969 || (reload_completed
a813c111 970 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f 971}
ca6c03ca 972\f
5f0d2358
RK
973/* Return the label in the head of basic block BLOCK. Create one if it doesn't
974 exist. */
ca6c03ca
JH
975
976rtx
d329e058 977block_label (basic_block block)
ca6c03ca 978{
fefa31b5 979 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
ca6c03ca 980 return NULL_RTX;
5f0d2358 981
4b4bf941 982 if (!LABEL_P (BB_HEAD (block)))
ca6c03ca 983 {
a813c111 984 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
ca6c03ca 985 }
5f0d2358 986
a813c111 987 return BB_HEAD (block);
ca6c03ca
JH
988}
989
990/* Attempt to perform edge redirection by replacing possibly complex jump
5f0d2358
RK
991 instruction by unconditional jump or removing jump completely. This can
992 apply only if all edges now point to the same block. The parameters and
993 return values are equivalent to redirect_edge_and_branch. */
ca6c03ca 994
6de9cd9a 995edge
bc35512f 996try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
ca6c03ca
JH
997{
998 basic_block src = e->src;
a813c111 999 rtx insn = BB_END (src), kill_from;
e1233a7d 1000 rtx set;
ca6c03ca 1001 int fallthru = 0;
750054a2
CT
1002
1003 /* If we are partitioning hot/cold basic blocks, we don't want to
1004 mess up unconditional or indirect jumps that cross between hot
8e8d5162
CT
1005 and cold sections.
1006
1007 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
1008 be optimizable (or blocks that appear to be mergeable), but which really
1009 must be left untouched (they are required to make it safely across
1010 partition boundaries). See the comments at the top of
8e8d5162 1011 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
c22cacf3 1012
3371a64f 1013 if (BB_PARTITION (src) != BB_PARTITION (target))
9cf84a3c 1014 return NULL;
750054a2 1015
6a66a8a7
KH
1016 /* We can replace or remove a complex jump only when we have exactly
1017 two edges. Also, if we have exactly one outgoing edge, we can
1018 redirect that. */
1019 if (EDGE_COUNT (src->succs) >= 3
1020 /* Verify that all targets will be TARGET. Specifically, the
1021 edge that is not E must also go to TARGET. */
1022 || (EDGE_COUNT (src->succs) == 2
1023 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1024 return NULL;
5f0d2358 1025
6a66a8a7 1026 if (!onlyjump_p (insn))
6de9cd9a 1027 return NULL;
3348b696 1028 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
6de9cd9a 1029 return NULL;
ca6c03ca
JH
1030
1031 /* Avoid removing branch with side effects. */
1032 set = single_set (insn);
1033 if (!set || side_effects_p (set))
6de9cd9a 1034 return NULL;
ca6c03ca
JH
1035
1036 /* In case we zap a conditional jump, we'll need to kill
1037 the cc0 setter too. */
1038 kill_from = insn;
1039#ifdef HAVE_cc0
9caea4a7
RS
1040 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
1041 && only_sets_cc0_p (PREV_INSN (insn)))
ca6c03ca
JH
1042 kill_from = PREV_INSN (insn);
1043#endif
1044
1045 /* See if we can create the fallthru edge. */
bc35512f 1046 if (in_cfglayout || can_fallthru (src, target))
ca6c03ca 1047 {
c263766c
RH
1048 if (dump_file)
1049 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
ca6c03ca
JH
1050 fallthru = 1;
1051
eaec9b3d 1052 /* Selectively unlink whole insn chain. */
bc35512f
JH
1053 if (in_cfglayout)
1054 {
bcc708fc 1055 rtx insn = BB_FOOTER (src);
bc35512f 1056
a7b87f73 1057 delete_insn_chain (kill_from, BB_END (src), false);
bc35512f
JH
1058
1059 /* Remove barriers but keep jumptables. */
1060 while (insn)
1061 {
4b4bf941 1062 if (BARRIER_P (insn))
bc35512f
JH
1063 {
1064 if (PREV_INSN (insn))
1065 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1066 else
bcc708fc 1067 BB_FOOTER (src) = NEXT_INSN (insn);
bc35512f
JH
1068 if (NEXT_INSN (insn))
1069 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1070 }
4b4bf941 1071 if (LABEL_P (insn))
bc35512f
JH
1072 break;
1073 insn = NEXT_INSN (insn);
1074 }
1075 }
1076 else
a7b87f73
ZD
1077 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1078 false);
ca6c03ca 1079 }
5f0d2358 1080
ca6c03ca
JH
1081 /* If this already is simplejump, redirect it. */
1082 else if (simplejump_p (insn))
1083 {
1084 if (e->dest == target)
6de9cd9a 1085 return NULL;
c263766c
RH
1086 if (dump_file)
1087 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
0b17ab2f 1088 INSN_UID (insn), e->dest->index, target->index);
6ee3c8e4
JJ
1089 if (!redirect_jump (insn, block_label (target), 0))
1090 {
fefa31b5 1091 gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
341c100f 1092 return NULL;
6ee3c8e4 1093 }
ca6c03ca 1094 }
5f0d2358 1095
6ee3c8e4 1096 /* Cannot do anything for target exit block. */
fefa31b5 1097 else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
6de9cd9a 1098 return NULL;
6ee3c8e4 1099
ca6c03ca
JH
1100 /* Or replace possibly complicated jump insn by simple jump insn. */
1101 else
1102 {
1103 rtx target_label = block_label (target);
eb5b8ad4 1104 rtx barrier, label, table;
ca6c03ca 1105
a7102479 1106 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
a813c111 1107 JUMP_LABEL (BB_END (src)) = target_label;
ca6c03ca 1108 LABEL_NUSES (target_label)++;
c263766c
RH
1109 if (dump_file)
1110 fprintf (dump_file, "Replacing insn %i by jump %i\n",
a813c111 1111 INSN_UID (insn), INSN_UID (BB_END (src)));
ca6c03ca 1112
4da2eb6b 1113
ba4807a0
LB
1114 delete_insn_chain (kill_from, insn, false);
1115
4da2eb6b
RH
1116 /* Recognize a tablejump that we are converting to a
1117 simple jump and remove its associated CODE_LABEL
1118 and ADDR_VEC or ADDR_DIFF_VEC. */
ba4807a0
LB
1119 if (tablejump_p (insn, &label, &table))
1120 delete_insn_chain (label, table, false);
eb5b8ad4 1121
a813c111 1122 barrier = next_nonnote_insn (BB_END (src));
4b4bf941 1123 if (!barrier || !BARRIER_P (barrier))
a813c111 1124 emit_barrier_after (BB_END (src));
5d693491
JZ
1125 else
1126 {
a813c111 1127 if (barrier != NEXT_INSN (BB_END (src)))
5d693491
JZ
1128 {
1129 /* Move the jump before barrier so that the notes
1130 which originally were or were created before jump table are
1131 inside the basic block. */
a813c111 1132 rtx new_insn = BB_END (src);
5d693491 1133
6c3d0e31
SP
1134 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1135 PREV_INSN (barrier), src);
5d693491
JZ
1136
1137 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1138 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1139
1140 NEXT_INSN (new_insn) = barrier;
1141 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1142
1143 PREV_INSN (new_insn) = PREV_INSN (barrier);
1144 PREV_INSN (barrier) = new_insn;
1145 }
1146 }
ca6c03ca
JH
1147 }
1148
1149 /* Keep only one edge out and set proper flags. */
c5cbcccf 1150 if (!single_succ_p (src))
628f6a4e 1151 remove_edge (e);
c5cbcccf 1152 gcc_assert (single_succ_p (src));
628f6a4e 1153
c5cbcccf 1154 e = single_succ_edge (src);
ca6c03ca
JH
1155 if (fallthru)
1156 e->flags = EDGE_FALLTHRU;
1157 else
1158 e->flags = 0;
5f0d2358 1159
ca6c03ca
JH
1160 e->probability = REG_BR_PROB_BASE;
1161 e->count = src->count;
1162
ca6c03ca
JH
1163 if (e->dest != target)
1164 redirect_edge_succ (e, target);
6de9cd9a 1165 return e;
ca6c03ca
JH
1166}
1167
4e3825db
MM
1168/* Subroutine of redirect_branch_edge that tries to patch the jump
1169 instruction INSN so that it reaches block NEW. Do this
1170 only when it originally reached block OLD. Return true if this
1171 worked or the original target wasn't OLD, return false if redirection
1172 doesn't work. */
1173
1174static bool
1175patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
ca6c03ca
JH
1176{
1177 rtx tmp;
ca6c03ca 1178 /* Recognize a tablejump and adjust all matching cases. */
e1233a7d 1179 if (tablejump_p (insn, NULL, &tmp))
ca6c03ca
JH
1180 {
1181 rtvec vec;
1182 int j;
4e3825db 1183 rtx new_label = block_label (new_bb);
ca6c03ca 1184
fefa31b5 1185 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4e3825db 1186 return false;
ca6c03ca
JH
1187 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1188 vec = XVEC (PATTERN (tmp), 0);
1189 else
1190 vec = XVEC (PATTERN (tmp), 1);
1191
1192 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1193 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1194 {
1195 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1196 --LABEL_NUSES (old_label);
1197 ++LABEL_NUSES (new_label);
1198 }
1199
f9da5064 1200 /* Handle casesi dispatch insns. */
ca6c03ca
JH
1201 if ((tmp = single_set (insn)) != NULL
1202 && SET_DEST (tmp) == pc_rtx
1203 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1204 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1205 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1206 {
4c33cb26 1207 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
ca6c03ca
JH
1208 new_label);
1209 --LABEL_NUSES (old_label);
1210 ++LABEL_NUSES (new_label);
1211 }
1212 }
1c384bf1
RH
1213 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1214 {
1215 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1216 rtx new_label, note;
1217
fefa31b5 1218 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1c384bf1
RH
1219 return false;
1220 new_label = block_label (new_bb);
1221
1222 for (i = 0; i < n; ++i)
1223 {
1224 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1225 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1226 if (XEXP (old_ref, 0) == old_label)
1227 {
1228 ASM_OPERANDS_LABEL (tmp, i)
1229 = gen_rtx_LABEL_REF (Pmode, new_label);
1230 --LABEL_NUSES (old_label);
1231 ++LABEL_NUSES (new_label);
1232 }
1233 }
1234
1235 if (JUMP_LABEL (insn) == old_label)
1236 {
1237 JUMP_LABEL (insn) = new_label;
1238 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1239 if (note)
1240 remove_note (insn, note);
1241 }
1242 else
1243 {
1244 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1245 if (note)
1246 remove_note (insn, note);
1247 if (JUMP_LABEL (insn) != new_label
1248 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1249 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1250 }
3b5fda81
JJ
1251 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1252 != NULL_RTX)
1253 XEXP (note, 0) = new_label;
1c384bf1 1254 }
ca6c03ca
JH
1255 else
1256 {
1257 /* ?? We may play the games with moving the named labels from
1258 one basic block to the other in case only one computed_jump is
1259 available. */
5f0d2358
RK
1260 if (computed_jump_p (insn)
1261 /* A return instruction can't be redirected. */
1262 || returnjump_p (insn))
4e3825db 1263 return false;
6ee3c8e4 1264
4e3825db 1265 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
6ee3c8e4 1266 {
4e3825db
MM
1267 /* If the insn doesn't go where we think, we're confused. */
1268 gcc_assert (JUMP_LABEL (insn) == old_label);
1269
1270 /* If the substitution doesn't succeed, die. This can happen
1271 if the back end emitted unrecognizable instructions or if
1272 target is exit block on some arches. */
1273 if (!redirect_jump (insn, block_label (new_bb), 0))
1274 {
fefa31b5 1275 gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun));
4e3825db
MM
1276 return false;
1277 }
6ee3c8e4 1278 }
ca6c03ca 1279 }
4e3825db
MM
1280 return true;
1281}
1282
1283
1284/* Redirect edge representing branch of (un)conditional jump or tablejump,
1285 NULL on failure */
1286static edge
1287redirect_branch_edge (edge e, basic_block target)
1288{
1289 rtx old_label = BB_HEAD (e->dest);
1290 basic_block src = e->src;
1291 rtx insn = BB_END (src);
1292
1293 /* We can only redirect non-fallthru edges of jump insn. */
1294 if (e->flags & EDGE_FALLTHRU)
1295 return NULL;
1296 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1297 return NULL;
1298
1299 if (!currently_expanding_to_rtl)
1300 {
1301 if (!patch_jump_insn (insn, old_label, target))
1302 return NULL;
1303 }
1304 else
1305 /* When expanding this BB might actually contain multiple
1306 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1307 Redirect all of those that match our label. */
0f346928 1308 FOR_BB_INSNS (src, insn)
4e3825db
MM
1309 if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1310 return NULL;
ca6c03ca 1311
c263766c
RH
1312 if (dump_file)
1313 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
0b17ab2f 1314 e->src->index, e->dest->index, target->index);
5f0d2358 1315
ca6c03ca 1316 if (e->dest != target)
6de9cd9a 1317 e = redirect_edge_succ_nodup (e, target);
6fb5fa3c 1318
6de9cd9a 1319 return e;
bc35512f
JH
1320}
1321
3371a64f
TJ
1322/* Called when edge E has been redirected to a new destination,
1323 in order to update the region crossing flag on the edge and
1324 jump. */
1325
1326static void
1327fixup_partition_crossing (edge e)
1328{
1329 rtx note;
1330
fefa31b5
DM
1331 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1332 == EXIT_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1333 return;
1334 /* If we redirected an existing edge, it may already be marked
1335 crossing, even though the new src is missing a reg crossing note.
1336 But make sure reg crossing note doesn't already exist before
1337 inserting. */
1338 if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1339 {
1340 e->flags |= EDGE_CROSSING;
1341 note = find_reg_note (BB_END (e->src), REG_CROSSING_JUMP, NULL_RTX);
1342 if (JUMP_P (BB_END (e->src))
1343 && !note)
1344 add_reg_note (BB_END (e->src), REG_CROSSING_JUMP, NULL_RTX);
1345 }
1346 else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1347 {
1348 e->flags &= ~EDGE_CROSSING;
1349 /* Remove the section crossing note from jump at end of
1350 src if it exists, and if no other successors are
1351 still crossing. */
1352 note = find_reg_note (BB_END (e->src), REG_CROSSING_JUMP, NULL_RTX);
1353 if (note)
1354 {
1355 bool has_crossing_succ = false;
1356 edge e2;
1357 edge_iterator ei;
1358 FOR_EACH_EDGE (e2, ei, e->src->succs)
1359 {
1360 has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1361 if (has_crossing_succ)
1362 break;
1363 }
1364 if (!has_crossing_succ)
1365 remove_note (BB_END (e->src), note);
1366 }
1367 }
1368}
1369
600b5b1d
TJ
1370/* Called when block BB has been reassigned to the cold partition,
1371 because it is now dominated by another cold block,
1372 to ensure that the region crossing attributes are updated. */
1373
1374static void
1375fixup_new_cold_bb (basic_block bb)
1376{
1377 edge e;
1378 edge_iterator ei;
1379
1380 /* This is called when a hot bb is found to now be dominated
1381 by a cold bb and therefore needs to become cold. Therefore,
1382 its preds will no longer be region crossing. Any non-dominating
1383 preds that were previously hot would also have become cold
1384 in the caller for the same region. Any preds that were previously
1385 region-crossing will be adjusted in fixup_partition_crossing. */
1386 FOR_EACH_EDGE (e, ei, bb->preds)
1387 {
1388 fixup_partition_crossing (e);
1389 }
1390
1391 /* Possibly need to make bb's successor edges region crossing,
1392 or remove stale region crossing. */
1393 FOR_EACH_EDGE (e, ei, bb->succs)
1394 {
1395 /* We can't have fall-through edges across partition boundaries.
1396 Note that force_nonfallthru will do any necessary partition
1397 boundary fixup by calling fixup_partition_crossing itself. */
1398 if ((e->flags & EDGE_FALLTHRU)
1399 && BB_PARTITION (bb) != BB_PARTITION (e->dest)
fefa31b5 1400 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
600b5b1d
TJ
1401 force_nonfallthru (e);
1402 else
1403 fixup_partition_crossing (e);
1404 }
1405}
1406
bc35512f
JH
1407/* Attempt to change code to redirect edge E to TARGET. Don't do that on
1408 expense of adding new instructions or reordering basic blocks.
1409
1410 Function can be also called with edge destination equivalent to the TARGET.
1411 Then it should try the simplifications and do nothing if none is possible.
1412
6de9cd9a
DN
1413 Return edge representing the branch if transformation succeeded. Return NULL
1414 on failure.
1415 We still return NULL in case E already destinated TARGET and we didn't
1416 managed to simplify instruction stream. */
bc35512f 1417
6de9cd9a 1418static edge
5671bf27 1419rtl_redirect_edge_and_branch (edge e, basic_block target)
bc35512f 1420{
6de9cd9a 1421 edge ret;
f345f21a 1422 basic_block src = e->src;
3371a64f 1423 basic_block dest = e->dest;
f345f21a 1424
bc35512f 1425 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 1426 return NULL;
bc35512f 1427
3371a64f 1428 if (dest == target)
6de9cd9a 1429 return e;
3348b696 1430
6de9cd9a 1431 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
f345f21a 1432 {
6fb5fa3c 1433 df_set_bb_dirty (src);
3371a64f 1434 fixup_partition_crossing (ret);
6de9cd9a 1435 return ret;
f345f21a 1436 }
bc35512f 1437
6de9cd9a
DN
1438 ret = redirect_branch_edge (e, target);
1439 if (!ret)
1440 return NULL;
5f0d2358 1441
6fb5fa3c 1442 df_set_bb_dirty (src);
3371a64f 1443 fixup_partition_crossing (ret);
6de9cd9a 1444 return ret;
ca6c03ca
JH
1445}
1446
3371a64f
TJ
1447/* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1448
1449void
1450emit_barrier_after_bb (basic_block bb)
1451{
1452 rtx barrier = emit_barrier_after (BB_END (bb));
c3284718 1453 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
3371a64f
TJ
1454 || current_ir_type () == IR_RTL_CFGLAYOUT);
1455 if (current_ir_type () == IR_RTL_CFGLAYOUT)
1456 BB_FOOTER (bb) = unlink_insn_chain (barrier, barrier);
1457}
1458
4fe9b91c 1459/* Like force_nonfallthru below, but additionally performs redirection
26898771
BS
1460 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1461 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1462 simple_return_rtx, indicating which kind of returnjump to create.
1463 It should be NULL otherwise. */
ca6c03ca 1464
dc0ff1c8 1465basic_block
26898771 1466force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
ca6c03ca 1467{
a3716585 1468 basic_block jump_block, new_bb = NULL, src = e->src;
ca6c03ca
JH
1469 rtx note;
1470 edge new_edge;
a3716585 1471 int abnormal_edge_flags = 0;
a3e6a37b 1472 bool asm_goto_edge = false;
7241571e 1473 int loc;
ca6c03ca 1474
cb9a1d9b
JH
1475 /* In the case the last instruction is conditional jump to the next
1476 instruction, first redirect the jump itself and then continue
b20b352b 1477 by creating a basic block afterwards to redirect fallthru edge. */
fefa31b5
DM
1478 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1479 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
a813c111 1480 && any_condjump_p (BB_END (e->src))
a813c111 1481 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
cb9a1d9b
JH
1482 {
1483 rtx note;
58e6ae30 1484 edge b = unchecked_make_edge (e->src, target, 0);
341c100f 1485 bool redirected;
cb9a1d9b 1486
341c100f
NS
1487 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1488 gcc_assert (redirected);
c22cacf3 1489
a813c111 1490 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
cb9a1d9b
JH
1491 if (note)
1492 {
e5af9ddd 1493 int prob = XINT (note, 0);
cb9a1d9b
JH
1494
1495 b->probability = prob;
8ddb5a29 1496 /* Update this to use GCOV_COMPUTE_SCALE. */
cb9a1d9b
JH
1497 b->count = e->count * prob / REG_BR_PROB_BASE;
1498 e->probability -= e->probability;
1499 e->count -= b->count;
1500 if (e->probability < 0)
1501 e->probability = 0;
1502 if (e->count < 0)
1503 e->count = 0;
1504 }
1505 }
1506
ca6c03ca 1507 if (e->flags & EDGE_ABNORMAL)
a3716585
JH
1508 {
1509 /* Irritating special case - fallthru edge to the same block as abnormal
1510 edge.
1511 We can't redirect abnormal edge, but we still can split the fallthru
d329e058 1512 one and create separate abnormal edge to original destination.
a3716585 1513 This allows bb-reorder to make such edge non-fallthru. */
341c100f 1514 gcc_assert (e->dest == target);
ee44c28d
SB
1515 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1516 e->flags &= EDGE_FALLTHRU;
a3716585 1517 }
341c100f 1518 else
24c545ff 1519 {
341c100f 1520 gcc_assert (e->flags & EDGE_FALLTHRU);
fefa31b5 1521 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
341c100f
NS
1522 {
1523 /* We can't redirect the entry block. Create an empty block
628f6a4e
BE
1524 at the start of the function which we use to add the new
1525 jump. */
1526 edge tmp;
1527 edge_iterator ei;
1528 bool found = false;
c22cacf3 1529
fefa31b5
DM
1530 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1531 ENTRY_BLOCK_PTR_FOR_FN (cfun));
c22cacf3 1532
341c100f
NS
1533 /* Change the existing edge's source to be the new block, and add
1534 a new edge from the entry block to the new block. */
1535 e->src = bb;
fefa31b5
DM
1536 for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1537 (tmp = ei_safe_edge (ei)); )
628f6a4e
BE
1538 {
1539 if (tmp == e)
1540 {
fefa31b5 1541 ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
628f6a4e
BE
1542 found = true;
1543 break;
1544 }
1545 else
1546 ei_next (&ei);
1547 }
c22cacf3 1548
628f6a4e 1549 gcc_assert (found);
c22cacf3 1550
9771b263 1551 vec_safe_push (bb->succs, e);
fefa31b5
DM
1552 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1553 EDGE_FALLTHRU);
341c100f 1554 }
24c545ff
BS
1555 }
1556
a3e6a37b 1557 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
891ca07d 1558 don't point to the target or fallthru label. */
a3e6a37b 1559 if (JUMP_P (BB_END (e->src))
fefa31b5 1560 && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
a3e6a37b
JJ
1561 && (e->flags & EDGE_FALLTHRU)
1562 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
ca6c03ca 1563 {
a3e6a37b 1564 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
5d68b7e6 1565 bool adjust_jump_target = false;
a3e6a37b
JJ
1566
1567 for (i = 0; i < n; ++i)
891ca07d
JJ
1568 {
1569 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
5d68b7e6
JJ
1570 {
1571 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1572 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1573 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1574 adjust_jump_target = true;
1575 }
891ca07d 1576 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
a3e6a37b 1577 asm_goto_edge = true;
891ca07d 1578 }
5d68b7e6
JJ
1579 if (adjust_jump_target)
1580 {
1581 rtx insn = BB_END (e->src), note;
1582 rtx old_label = BB_HEAD (e->dest);
1583 rtx new_label = BB_HEAD (target);
1584
1585 if (JUMP_LABEL (insn) == old_label)
1586 {
1587 JUMP_LABEL (insn) = new_label;
1588 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1589 if (note)
1590 remove_note (insn, note);
1591 }
1592 else
1593 {
1594 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1595 if (note)
1596 remove_note (insn, note);
1597 if (JUMP_LABEL (insn) != new_label
1598 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1599 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1600 }
1601 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1602 != NULL_RTX)
1603 XEXP (note, 0) = new_label;
1604 }
a3e6a37b
JJ
1605 }
1606
1607 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1608 {
1609 gcov_type count = e->count;
1610 int probability = e->probability;
ca6c03ca 1611 /* Create the new structures. */
31a78298 1612
79019985
RH
1613 /* If the old block ended with a tablejump, skip its table
1614 by searching forward from there. Otherwise start searching
1615 forward from the last instruction of the old block. */
a813c111
SB
1616 if (!tablejump_p (BB_END (e->src), NULL, &note))
1617 note = BB_END (e->src);
31a78298
RH
1618 note = NEXT_INSN (note);
1619
31a78298 1620 jump_block = create_basic_block (note, NULL, e->src);
a3e6a37b 1621 jump_block->count = count;
ca6c03ca 1622 jump_block->frequency = EDGE_FREQUENCY (e);
ca6c03ca 1623
750054a2
CT
1624 /* Make sure new block ends up in correct hot/cold section. */
1625
076c7ab8 1626 BB_COPY_PARTITION (jump_block, e->src);
c22cacf3 1627
ca6c03ca
JH
1628 /* Wire edge in. */
1629 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
a3e6a37b
JJ
1630 new_edge->probability = probability;
1631 new_edge->count = count;
ca6c03ca
JH
1632
1633 /* Redirect old edge. */
1634 redirect_edge_pred (e, jump_block);
1635 e->probability = REG_BR_PROB_BASE;
1636
3371a64f
TJ
1637 /* If e->src was previously region crossing, it no longer is
1638 and the reg crossing note should be removed. */
1639 fixup_partition_crossing (new_edge);
1640
a3e6a37b
JJ
1641 /* If asm goto has any label refs to target's label,
1642 add also edge from asm goto bb to target. */
1643 if (asm_goto_edge)
1644 {
1645 new_edge->probability /= 2;
1646 new_edge->count /= 2;
1647 jump_block->count /= 2;
1648 jump_block->frequency /= 2;
1649 new_edge = make_edge (new_edge->src, target,
1650 e->flags & ~EDGE_FALLTHRU);
1651 new_edge->probability = probability - probability / 2;
1652 new_edge->count = count - count / 2;
1653 }
1654
ca6c03ca
JH
1655 new_bb = jump_block;
1656 }
1657 else
1658 jump_block = e->src;
5f0d2358 1659
2f13f2de 1660 loc = e->goto_locus;
ca6c03ca 1661 e->flags &= ~EDGE_FALLTHRU;
fefa31b5 1662 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
ca6c03ca 1663 {
26898771
BS
1664 if (jump_label == ret_rtx)
1665 {
cf22ce3c 1666#ifdef HAVE_return
26898771 1667 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
cf22ce3c 1668#else
26898771 1669 gcc_unreachable ();
cf22ce3c 1670#endif
26898771
BS
1671 }
1672 else
1673 {
1674 gcc_assert (jump_label == simple_return_rtx);
1675#ifdef HAVE_simple_return
1676 emit_jump_insn_after_setloc (gen_simple_return (),
1677 BB_END (jump_block), loc);
1678#else
1679 gcc_unreachable ();
1680#endif
1681 }
387748de 1682 set_return_jump_label (BB_END (jump_block));
ca6c03ca
JH
1683 }
1684 else
1685 {
1686 rtx label = block_label (target);
7241571e 1687 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
a813c111 1688 JUMP_LABEL (BB_END (jump_block)) = label;
ca6c03ca
JH
1689 LABEL_NUSES (label)++;
1690 }
5f0d2358 1691
3371a64f
TJ
1692 /* We might be in cfg layout mode, and if so, the following routine will
1693 insert the barrier correctly. */
1694 emit_barrier_after_bb (jump_block);
ca6c03ca
JH
1695 redirect_edge_succ_nodup (e, target);
1696
a3716585
JH
1697 if (abnormal_edge_flags)
1698 make_edge (src, target, abnormal_edge_flags);
1699
b8698a0f 1700 df_mark_solutions_dirty ();
3371a64f 1701 fixup_partition_crossing (e);
ca6c03ca
JH
1702 return new_bb;
1703}
1704
1705/* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1706 (and possibly create new basic block) to make edge non-fallthru.
1707 Return newly created BB or NULL if none. */
5f0d2358 1708
cf103ca4
EB
1709static basic_block
1710rtl_force_nonfallthru (edge e)
ca6c03ca 1711{
26898771 1712 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
ca6c03ca
JH
1713}
1714
1715/* Redirect edge even at the expense of creating new jump insn or
1716 basic block. Return new basic block if created, NULL otherwise.
41806d92 1717 Conversion must be possible. */
ca6c03ca 1718
9ee634e3 1719static basic_block
d329e058 1720rtl_redirect_edge_and_branch_force (edge e, basic_block target)
ca6c03ca 1721{
5f0d2358
RK
1722 if (redirect_edge_and_branch (e, target)
1723 || e->dest == target)
ca6c03ca
JH
1724 return NULL;
1725
1726 /* In case the edge redirection failed, try to force it to be non-fallthru
1727 and redirect newly created simplejump. */
6fb5fa3c 1728 df_set_bb_dirty (e->src);
26898771 1729 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
ca6c03ca
JH
1730}
1731
1732/* The given edge should potentially be a fallthru edge. If that is in
1733 fact true, delete the jump and barriers that are in the way. */
1734
f470c378
ZD
1735static void
1736rtl_tidy_fallthru_edge (edge e)
ca6c03ca
JH
1737{
1738 rtx q;
f470c378
ZD
1739 basic_block b = e->src, c = b->next_bb;
1740
ca6c03ca
JH
1741 /* ??? In a late-running flow pass, other folks may have deleted basic
1742 blocks by nopping out blocks, leaving multiple BARRIERs between here
0fa2e4df 1743 and the target label. They ought to be chastised and fixed.
ca6c03ca
JH
1744
1745 We can also wind up with a sequence of undeletable labels between
1746 one block and the next.
1747
1748 So search through a sequence of barriers, labels, and notes for
1749 the head of block C and assert that we really do fall through. */
1750
a813c111 1751 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
9c0a0632
RH
1752 if (INSN_P (q))
1753 return;
ca6c03ca
JH
1754
1755 /* Remove what will soon cease being the jump insn from the source block.
1756 If block B consisted only of this single jump, turn it into a deleted
1757 note. */
a813c111 1758 q = BB_END (b);
4b4bf941 1759 if (JUMP_P (q)
ca6c03ca
JH
1760 && onlyjump_p (q)
1761 && (any_uncondjump_p (q)
c5cbcccf 1762 || single_succ_p (b)))
ca6c03ca
JH
1763 {
1764#ifdef HAVE_cc0
1765 /* If this was a conditional jump, we need to also delete
1766 the insn that set cc0. */
1767 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1768 q = PREV_INSN (q);
1769#endif
1770
1771 q = PREV_INSN (q);
ca6c03ca
JH
1772 }
1773
1774 /* Selectively unlink the sequence. */
a813c111 1775 if (q != PREV_INSN (BB_HEAD (c)))
a7b87f73 1776 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
ca6c03ca
JH
1777
1778 e->flags |= EDGE_FALLTHRU;
1779}
ca6c03ca 1780\f
f470c378
ZD
1781/* Should move basic block BB after basic block AFTER. NIY. */
1782
1783static bool
1784rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1785 basic_block after ATTRIBUTE_UNUSED)
1786{
1787 return false;
1788}
1789
3371a64f
TJ
1790/* Locate the last bb in the same partition as START_BB. */
1791
1792static basic_block
1793last_bb_in_partition (basic_block start_bb)
1794{
1795 basic_block bb;
fefa31b5 1796 FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
3371a64f
TJ
1797 {
1798 if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1799 return bb;
1800 }
6626665f 1801 /* Return bb before the exit block. */
3371a64f
TJ
1802 return bb->prev_bb;
1803}
1804
ca6c03ca 1805/* Split a (typically critical) edge. Return the new block.
41806d92 1806 The edge must not be abnormal.
ca6c03ca
JH
1807
1808 ??? The code generally expects to be called on critical edges.
1809 The case of a block ending in an unconditional jump to a
1810 block with multiple predecessors is not handled optimally. */
1811
8ce33230 1812static basic_block
d329e058 1813rtl_split_edge (edge edge_in)
ca6c03ca 1814{
3371a64f 1815 basic_block bb, new_bb;
ca6c03ca
JH
1816 rtx before;
1817
1818 /* Abnormal edges cannot be split. */
341c100f 1819 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
ca6c03ca
JH
1820
1821 /* We are going to place the new block in front of edge destination.
eaec9b3d 1822 Avoid existence of fallthru predecessors. */
ca6c03ca
JH
1823 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1824 {
0fd4b31d 1825 edge e = find_fallthru_edge (edge_in->dest->preds);
ca6c03ca
JH
1826
1827 if (e)
1828 force_nonfallthru (e);
1829 }
1830
96e82e0a 1831 /* Create the basic block note. */
fefa31b5 1832 if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
a813c111 1833 before = BB_HEAD (edge_in->dest);
ca6c03ca
JH
1834 else
1835 before = NULL_RTX;
1836
623a66fa 1837 /* If this is a fall through edge to the exit block, the blocks might be
ffe14686 1838 not adjacent, and the right place is after the source. */
fefa31b5
DM
1839 if ((edge_in->flags & EDGE_FALLTHRU)
1840 && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
623a66fa
R
1841 {
1842 before = NEXT_INSN (BB_END (edge_in->src));
623a66fa 1843 bb = create_basic_block (before, NULL, edge_in->src);
076c7ab8 1844 BB_COPY_PARTITION (bb, edge_in->src);
623a66fa
R
1845 }
1846 else
9fb32434 1847 {
fefa31b5 1848 if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1849 {
1850 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1851 BB_COPY_PARTITION (bb, edge_in->dest);
1852 }
1853 else
1854 {
1855 basic_block after = edge_in->dest->prev_bb;
1856 /* If this is post-bb reordering, and the edge crosses a partition
1857 boundary, the new block needs to be inserted in the bb chain
1858 at the end of the src partition (since we put the new bb into
1859 that partition, see below). Otherwise we may end up creating
1860 an extra partition crossing in the chain, which is illegal.
1861 It can't go after the src, because src may have a fall-through
1862 to a different block. */
1863 if (crtl->bb_reorder_complete
1864 && (edge_in->flags & EDGE_CROSSING))
1865 {
1866 after = last_bb_in_partition (edge_in->src);
1867 before = NEXT_INSN (BB_END (after));
1868 /* The instruction following the last bb in partition should
1869 be a barrier, since it cannot end in a fall-through. */
1870 gcc_checking_assert (BARRIER_P (before));
1871 before = NEXT_INSN (before);
1872 }
1873 bb = create_basic_block (before, NULL, after);
1874 /* Put the split bb into the src partition, to avoid creating
1875 a situation where a cold bb dominates a hot bb, in the case
1876 where src is cold and dest is hot. The src will dominate
1877 the new bb (whereas it might not have dominated dest). */
1878 BB_COPY_PARTITION (bb, edge_in->src);
1879 }
9fb32434 1880 }
ca6c03ca 1881
4977bab6 1882 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
ca6c03ca 1883
3371a64f
TJ
1884 /* Can't allow a region crossing edge to be fallthrough. */
1885 if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
fefa31b5 1886 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1887 {
1888 new_bb = force_nonfallthru (single_succ_edge (bb));
1889 gcc_assert (!new_bb);
1890 }
1891
4d6922ee 1892 /* For non-fallthru edges, we must adjust the predecessor's
ca6c03ca
JH
1893 jump instruction to target our new block. */
1894 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1895 {
341c100f
NS
1896 edge redirected = redirect_edge_and_branch (edge_in, bb);
1897 gcc_assert (redirected);
ca6c03ca
JH
1898 }
1899 else
3b5fda81 1900 {
fefa31b5 1901 if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
3b5fda81
JJ
1902 {
1903 /* For asm goto even splitting of fallthru edge might
1904 need insn patching, as other labels might point to the
1905 old label. */
1906 rtx last = BB_END (edge_in->src);
1907 if (last
1908 && JUMP_P (last)
fefa31b5 1909 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3b5fda81
JJ
1910 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1911 && patch_jump_insn (last, before, bb))
1912 df_set_bb_dirty (edge_in->src);
1913 }
1914 redirect_edge_succ (edge_in, bb);
1915 }
ca6c03ca
JH
1916
1917 return bb;
1918}
1919
1920/* Queue instructions for insertion on an edge between two basic blocks.
1921 The new instructions and basic blocks (if any) will not appear in the
1922 CFG until commit_edge_insertions is called. */
1923
1924void
d329e058 1925insert_insn_on_edge (rtx pattern, edge e)
ca6c03ca
JH
1926{
1927 /* We cannot insert instructions on an abnormal critical edge.
1928 It will be easier to find the culprit if we die now. */
341c100f 1929 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
ca6c03ca 1930
6de9cd9a 1931 if (e->insns.r == NULL_RTX)
ca6c03ca
JH
1932 start_sequence ();
1933 else
6de9cd9a 1934 push_to_sequence (e->insns.r);
ca6c03ca
JH
1935
1936 emit_insn (pattern);
1937
6de9cd9a 1938 e->insns.r = get_insns ();
ca6c03ca
JH
1939 end_sequence ();
1940}
1941
1942/* Update the CFG for the instructions queued on edge E. */
1943
4e3825db 1944void
2ac66157 1945commit_one_edge_insertion (edge e)
ca6c03ca
JH
1946{
1947 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
898c90c0 1948 basic_block bb;
ca6c03ca
JH
1949
1950 /* Pull the insns off the edge now since the edge might go away. */
6de9cd9a
DN
1951 insns = e->insns.r;
1952 e->insns.r = NULL_RTX;
ca6c03ca 1953
898c90c0
EB
1954 /* Figure out where to put these insns. If the destination has
1955 one predecessor, insert there. Except for the exit block. */
fefa31b5 1956 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
ca6c03ca 1957 {
898c90c0
EB
1958 bb = e->dest;
1959
1960 /* Get the location correct wrt a code label, and "nice" wrt
1961 a basic block note, and before everything else. */
1962 tmp = BB_HEAD (bb);
1963 if (LABEL_P (tmp))
1964 tmp = NEXT_INSN (tmp);
1965 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1966 tmp = NEXT_INSN (tmp);
1967 if (tmp == BB_HEAD (bb))
1968 before = tmp;
1969 else if (tmp)
1970 after = PREV_INSN (tmp);
1971 else
1972 after = get_last_insn ();
1973 }
3dec4024 1974
898c90c0 1975 /* If the source has one successor and the edge is not abnormal,
a1d50386
JJ
1976 insert there. Except for the entry block.
1977 Don't do this if the predecessor ends in a jump other than
1978 unconditional simple jump. E.g. for asm goto that points all
1979 its labels at the fallthru basic block, we can't insert instructions
1980 before the asm goto, as the asm goto can have various of side effects,
1981 and can't emit instructions after the asm goto, as it must end
1982 the basic block. */
898c90c0
EB
1983 else if ((e->flags & EDGE_ABNORMAL) == 0
1984 && single_succ_p (e->src)
fefa31b5 1985 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
a1d50386
JJ
1986 && (!JUMP_P (BB_END (e->src))
1987 || simplejump_p (BB_END (e->src))))
898c90c0
EB
1988 {
1989 bb = e->src;
3dec4024 1990
898c90c0
EB
1991 /* It is possible to have a non-simple jump here. Consider a target
1992 where some forms of unconditional jumps clobber a register. This
1993 happens on the fr30 for example.
ca6c03ca 1994
898c90c0
EB
1995 We know this block has a single successor, so we can just emit
1996 the queued insns before the jump. */
1997 if (JUMP_P (BB_END (bb)))
1998 before = BB_END (bb);
3dec4024
JH
1999 else
2000 {
898c90c0
EB
2001 /* We'd better be fallthru, or we've lost track of what's what. */
2002 gcc_assert (e->flags & EDGE_FALLTHRU);
750054a2 2003
898c90c0 2004 after = BB_END (bb);
ca6c03ca
JH
2005 }
2006 }
2007
898c90c0
EB
2008 /* Otherwise we must split the edge. */
2009 else
2010 {
2011 bb = split_edge (e);
3371a64f
TJ
2012
2013 /* If E crossed a partition boundary, we needed to make bb end in
2014 a region-crossing jump, even though it was originally fallthru. */
2015 if (JUMP_P (BB_END (bb)))
2016 before = BB_END (bb);
2017 else
2018 after = BB_END (bb);
898c90c0
EB
2019 }
2020
2021 /* Now that we've found the spot, do the insertion. */
ca6c03ca
JH
2022 if (before)
2023 {
6fb5fa3c 2024 emit_insn_before_noloc (insns, before, bb);
ca6c03ca
JH
2025 last = prev_nonnote_insn (before);
2026 }
2027 else
6fb5fa3c 2028 last = emit_insn_after_noloc (insns, after, bb);
ca6c03ca
JH
2029
2030 if (returnjump_p (last))
2031 {
2032 /* ??? Remove all outgoing edges from BB and add one for EXIT.
c22cacf3
MS
2033 This is not currently a problem because this only happens
2034 for the (single) epilogue, which already has a fallthru edge
2035 to EXIT. */
ca6c03ca 2036
c5cbcccf 2037 e = single_succ_edge (bb);
fefa31b5 2038 gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
c5cbcccf 2039 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
ca6c03ca 2040
5f0d2358 2041 e->flags &= ~EDGE_FALLTHRU;
ca6c03ca 2042 emit_barrier_after (last);
0b17ab2f 2043
ca6c03ca
JH
2044 if (before)
2045 delete_insn (before);
2046 }
341c100f
NS
2047 else
2048 gcc_assert (!JUMP_P (last));
ca6c03ca
JH
2049}
2050
2051/* Update the CFG for all queued instructions. */
2052
2053void
d329e058 2054commit_edge_insertions (void)
ca6c03ca 2055{
ca6c03ca
JH
2056 basic_block bb;
2057
600b5b1d
TJ
2058 /* Optimization passes that invoke this routine can cause hot blocks
2059 previously reached by both hot and cold blocks to become dominated only
2060 by cold blocks. This will cause the verification below to fail,
2061 and lead to now cold code in the hot section. In some cases this
2062 may only be visible after newly unreachable blocks are deleted,
2063 which will be done by fixup_partitions. */
2064 fixup_partitions ();
2065
ca6c03ca
JH
2066#ifdef ENABLE_CHECKING
2067 verify_flow_info ();
2068#endif
2069
fefa31b5
DM
2070 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2071 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
ca6c03ca 2072 {
628f6a4e
BE
2073 edge e;
2074 edge_iterator ei;
ca6c03ca 2075
628f6a4e
BE
2076 FOR_EACH_EDGE (e, ei, bb->succs)
2077 if (e->insns.r)
cf103ca4 2078 commit_one_edge_insertion (e);
3dec4024 2079 }
ca6c03ca
JH
2080}
2081\f
6fb5fa3c 2082
f470c378 2083/* Print out RTL-specific basic block information (live information
a315c44c
SB
2084 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2085 documented in dumpfile.h. */
ca6c03ca 2086
10e9fecc 2087static void
a315c44c 2088rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
ca6c03ca
JH
2089{
2090 rtx insn;
2091 rtx last;
f470c378 2092 char *s_indent;
ca6c03ca 2093
ae50c0cb 2094 s_indent = (char *) alloca ((size_t) indent + 1);
400e39e3 2095 memset (s_indent, ' ', (size_t) indent);
f470c378 2096 s_indent[indent] = '\0';
b8698a0f 2097
a315c44c 2098 if (df && (flags & TDF_DETAILS))
6fb5fa3c
DB
2099 {
2100 df_dump_top (bb, outf);
2101 putc ('\n', outf);
2102 }
ca6c03ca 2103
68cc3174
EB
2104 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2105 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
2106 insn = NEXT_INSN (insn))
a315c44c 2107 {
7b19209f
SB
2108 if (flags & TDF_DETAILS)
2109 df_dump_insn_top (insn, outf);
a315c44c
SB
2110 if (! (flags & TDF_SLIM))
2111 print_rtl_single (outf, insn);
2112 else
2113 dump_insn_slim (outf, insn);
7b19209f
SB
2114 if (flags & TDF_DETAILS)
2115 df_dump_insn_bottom (insn, outf);
a315c44c
SB
2116 }
2117
2118 if (df && (flags & TDF_DETAILS))
6fb5fa3c
DB
2119 {
2120 df_dump_bottom (bb, outf);
2121 putc ('\n', outf);
2122 }
2123
ca6c03ca
JH
2124}
2125\f
c4669594
SB
2126/* Like dump_function_to_file, but for RTL. Print out dataflow information
2127 for the start of each basic block. FLAGS are the TDF_* masks documented
2128 in dumpfile.h. */
ca6c03ca
JH
2129
2130void
a315c44c 2131print_rtl_with_bb (FILE *outf, const_rtx rtx_first, int flags)
ca6c03ca 2132{
22ea9ec0 2133 const_rtx tmp_rtx;
ca6c03ca
JH
2134 if (rtx_first == 0)
2135 fprintf (outf, "(nil)\n");
2136 else
2137 {
ca6c03ca
JH
2138 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2139 int max_uid = get_max_uid ();
5ed6ace5
MD
2140 basic_block *start = XCNEWVEC (basic_block, max_uid);
2141 basic_block *end = XCNEWVEC (basic_block, max_uid);
2142 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
e0082a72
ZD
2143 basic_block bb;
2144
c4669594
SB
2145 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2146 insns, but the CFG is not maintained so the basic block info
2147 is not reliable. Therefore it's omitted from the dumps. */
2148 if (! (cfun->curr_properties & PROP_cfg))
2149 flags &= ~TDF_BLOCKS;
2150
6fb5fa3c
DB
2151 if (df)
2152 df_dump_start (outf);
2153
c4669594 2154 if (flags & TDF_BLOCKS)
ca6c03ca 2155 {
4f42035e 2156 FOR_EACH_BB_REVERSE_FN (bb, cfun)
ca6c03ca 2157 {
c4669594
SB
2158 rtx x;
2159
2160 start[INSN_UID (BB_HEAD (bb))] = bb;
2161 end[INSN_UID (BB_END (bb))] = bb;
2162 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2163 {
2164 enum bb_state state = IN_MULTIPLE_BB;
5f0d2358 2165
c4669594
SB
2166 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2167 state = IN_ONE_BB;
2168 in_bb_p[INSN_UID (x)] = state;
ca6c03ca 2169
c4669594
SB
2170 if (x == BB_END (bb))
2171 break;
2172 }
ca6c03ca
JH
2173 }
2174 }
2175
2176 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
2177 {
c4669594
SB
2178 if (flags & TDF_BLOCKS)
2179 {
2180 bb = start[INSN_UID (tmp_rtx)];
2181 if (bb != NULL)
2182 {
2183 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
2184 if (df && (flags & TDF_DETAILS))
2185 df_dump_top (bb, outf);
2186 }
ca6c03ca 2187
c4669594
SB
2188 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2189 && !NOTE_P (tmp_rtx)
2190 && !BARRIER_P (tmp_rtx))
2191 fprintf (outf, ";; Insn is not within a basic block\n");
2192 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2193 fprintf (outf, ";; Insn is in multiple basic blocks\n");
2194 }
ca6c03ca 2195
7b19209f
SB
2196 if (flags & TDF_DETAILS)
2197 df_dump_insn_top (tmp_rtx, outf);
a315c44c
SB
2198 if (! (flags & TDF_SLIM))
2199 print_rtl_single (outf, tmp_rtx);
2200 else
2201 dump_insn_slim (outf, tmp_rtx);
7b19209f
SB
2202 if (flags & TDF_DETAILS)
2203 df_dump_insn_bottom (tmp_rtx, outf);
ca6c03ca 2204
c4669594
SB
2205 if (flags & TDF_BLOCKS)
2206 {
2207 bb = end[INSN_UID (tmp_rtx)];
2208 if (bb != NULL)
2209 {
2210 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
2211 if (df && (flags & TDF_DETAILS))
2212 df_dump_bottom (bb, outf);
1b03a96d 2213 putc ('\n', outf);
c4669594
SB
2214 }
2215 }
ca6c03ca
JH
2216 }
2217
2218 free (start);
2219 free (end);
2220 free (in_bb_p);
2221 }
ca6c03ca
JH
2222}
2223\f
532aafad
SB
2224/* Update the branch probability of BB if a REG_BR_PROB is present. */
2225
b446e5a2 2226void
d329e058 2227update_br_prob_note (basic_block bb)
b446e5a2
JH
2228{
2229 rtx note;
4b4bf941 2230 if (!JUMP_P (BB_END (bb)))
b446e5a2 2231 return;
a813c111 2232 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
e5af9ddd 2233 if (!note || XINT (note, 0) == BRANCH_EDGE (bb)->probability)
b446e5a2 2234 return;
e5af9ddd 2235 XINT (note, 0) = BRANCH_EDGE (bb)->probability;
b446e5a2 2236}
96370780
MK
2237
2238/* Get the last insn associated with block BB (that includes barriers and
2239 tablejumps after BB). */
2240rtx
2241get_last_bb_insn (basic_block bb)
2242{
2243 rtx tmp;
2244 rtx end = BB_END (bb);
2245
2246 /* Include any jump table following the basic block. */
2247 if (tablejump_p (end, NULL, &tmp))
2248 end = tmp;
2249
2250 /* Include any barriers that may follow the basic block. */
1e211590 2251 tmp = next_nonnote_insn_bb (end);
96370780
MK
2252 while (tmp && BARRIER_P (tmp))
2253 {
2254 end = tmp;
1e211590 2255 tmp = next_nonnote_insn_bb (end);
96370780
MK
2256 }
2257
2258 return end;
2259}
af205f67 2260
600b5b1d
TJ
2261/* Sanity check partition hotness to ensure that basic blocks in
2262   the cold partition don't dominate basic blocks in the hot partition.
2263 If FLAG_ONLY is true, report violations as errors. Otherwise
2264 re-mark the dominated blocks as cold, since this is run after
2265 cfg optimizations that may make hot blocks previously reached
2266 by both hot and cold blocks now only reachable along cold paths. */
2267
2268static vec<basic_block>
2269find_partition_fixes (bool flag_only)
2270{
2271 basic_block bb;
2272 vec<basic_block> bbs_in_cold_partition = vNULL;
2273 vec<basic_block> bbs_to_fix = vNULL;
2274
2275 /* Callers check this. */
2276 gcc_checking_assert (crtl->has_bb_partition);
2277
11cd3bed 2278 FOR_EACH_BB_FN (bb, cfun)
600b5b1d
TJ
2279 if ((BB_PARTITION (bb) == BB_COLD_PARTITION))
2280 bbs_in_cold_partition.safe_push (bb);
2281
2282 if (bbs_in_cold_partition.is_empty ())
2283 return vNULL;
2284
2285 bool dom_calculated_here = !dom_info_available_p (CDI_DOMINATORS);
2286
2287 if (dom_calculated_here)
2288 calculate_dominance_info (CDI_DOMINATORS);
2289
2290 while (! bbs_in_cold_partition.is_empty ())
2291 {
2292 bb = bbs_in_cold_partition.pop ();
2293 /* Any blocks dominated by a block in the cold section
2294 must also be cold. */
2295 basic_block son;
2296 for (son = first_dom_son (CDI_DOMINATORS, bb);
2297 son;
2298 son = next_dom_son (CDI_DOMINATORS, son))
2299 {
2300 /* If son is not yet cold, then mark it cold here and
2301 enqueue it for further processing. */
2302 if ((BB_PARTITION (son) != BB_COLD_PARTITION))
2303 {
2304 if (flag_only)
2305 error ("non-cold basic block %d dominated "
2306 "by a block in the cold partition (%d)", son->index, bb->index);
2307 else
2308 BB_SET_PARTITION (son, BB_COLD_PARTITION);
2309 bbs_to_fix.safe_push (son);
2310 bbs_in_cold_partition.safe_push (son);
2311 }
2312 }
2313 }
2314
2315 if (dom_calculated_here)
2316 free_dominance_info (CDI_DOMINATORS);
2317
2318 return bbs_to_fix;
2319}
2320
2321/* Perform cleanup on the hot/cold bb partitioning after optimization
2322 passes that modify the cfg. */
2323
2324void
2325fixup_partitions (void)
2326{
2327 basic_block bb;
2328
2329 if (!crtl->has_bb_partition)
2330 return;
2331
2332 /* Delete any blocks that became unreachable and weren't
2333 already cleaned up, for example during edge forwarding
2334 and convert_jumps_to_returns. This will expose more
2335 opportunities for fixing the partition boundaries here.
2336 Also, the calculation of the dominance graph during verification
2337 will assert if there are unreachable nodes. */
2338 delete_unreachable_blocks ();
2339
2340 /* If there are partitions, do a sanity check on them: A basic block in
2341   a cold partition cannot dominate a basic block in a hot partition.
2342 Fixup any that now violate this requirement, as a result of edge
2343 forwarding and unreachable block deletion.  */
2344 vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2345
2346 /* Do the partition fixup after all necessary blocks have been converted to
2347 cold, so that we only update the region crossings the minimum number of
2348 places, which can require forcing edges to be non fallthru. */
2349 while (! bbs_to_fix.is_empty ())
2350 {
2351 bb = bbs_to_fix.pop ();
2352 fixup_new_cold_bb (bb);
2353 }
2354}
2355
af205f67
TJ
2356/* Verify, in the basic block chain, that there is at most one switch
2357 between hot/cold partitions. This condition will not be true until
2358 after reorder_basic_blocks is called. */
2359
251a41b9 2360static int
af205f67
TJ
2361verify_hot_cold_block_grouping (void)
2362{
2363 basic_block bb;
2364 int err = 0;
2365 bool switched_sections = false;
2366 int current_partition = BB_UNPARTITIONED;
2367
3371a64f
TJ
2368 /* Even after bb reordering is complete, we go into cfglayout mode
2369 again (in compgoto). Ensure we don't call this before going back
2370 into linearized RTL when any layout fixes would have been committed. */
2371 if (!crtl->bb_reorder_complete
c3284718 2372 || current_ir_type () != IR_RTL_CFGRTL)
251a41b9 2373 return err;
af205f67 2374
11cd3bed 2375 FOR_EACH_BB_FN (bb, cfun)
af205f67
TJ
2376 {
2377 if (current_partition != BB_UNPARTITIONED
2378 && BB_PARTITION (bb) != current_partition)
2379 {
2380 if (switched_sections)
2381 {
2382 error ("multiple hot/cold transitions found (bb %i)",
2383 bb->index);
2384 err = 1;
2385 }
2386 else
2387 switched_sections = true;
2388
2389 if (!crtl->has_bb_partition)
2390 error ("partition found but function partition flag not set");
2391 }
2392 current_partition = BB_PARTITION (bb);
2393 }
2394
251a41b9 2395 return err;
af205f67 2396}
b446e5a2 2397\f
ca6c03ca 2398
251a41b9
TJ
2399/* Perform several checks on the edges out of each block, such as
2400 the consistency of the branch probabilities, the correctness
2401 of hot/cold partition crossing edges, and the number of expected
600b5b1d
TJ
2402 successor edges. Also verify that the dominance relationship
2403 between hot/cold blocks is sane. */
f470c378 2404
10e9fecc 2405static int
251a41b9 2406rtl_verify_edges (void)
ca6c03ca 2407{
10e9fecc 2408 int err = 0;
94eb5ddb 2409 basic_block bb;
ca6c03ca 2410
4f42035e 2411 FOR_EACH_BB_REVERSE_FN (bb, cfun)
ca6c03ca 2412 {
f99ffaa3
EB
2413 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2414 int n_eh = 0, n_abnormal = 0;
3cf54412 2415 edge e, fallthru = NULL;
628f6a4e 2416 edge_iterator ei;
251a41b9 2417 rtx note;
3371a64f 2418 bool has_crossing_edge = false;
ca6c03ca 2419
2085a21f 2420 if (JUMP_P (BB_END (bb))
a813c111 2421 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
628f6a4e 2422 && EDGE_COUNT (bb->succs) >= 2
a813c111 2423 && any_condjump_p (BB_END (bb)))
5a1a3e5e 2424 {
e5af9ddd 2425 if (XINT (note, 0) != BRANCH_EDGE (bb)->probability
0a6a6ac9 2426 && profile_status_for_fn (cfun) != PROFILE_ABSENT)
5a1a3e5e 2427 {
e5af9ddd
RS
2428 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2429 XINT (note, 0), BRANCH_EDGE (bb)->probability);
5a1a3e5e
JH
2430 err = 1;
2431 }
2432 }
251a41b9 2433
628f6a4e 2434 FOR_EACH_EDGE (e, ei, bb->succs)
ca6c03ca 2435 {
0be7e7a6
RH
2436 bool is_crossing;
2437
ca6c03ca 2438 if (e->flags & EDGE_FALLTHRU)
0be7e7a6
RH
2439 n_fallthru++, fallthru = e;
2440
2441 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
fefa31b5
DM
2442 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2443 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
3371a64f 2444 has_crossing_edge |= is_crossing;
0be7e7a6 2445 if (e->flags & EDGE_CROSSING)
750054a2 2446 {
0be7e7a6
RH
2447 if (!is_crossing)
2448 {
2449 error ("EDGE_CROSSING incorrectly set across same section");
2450 err = 1;
2451 }
2452 if (e->flags & EDGE_FALLTHRU)
2453 {
f99ffaa3 2454 error ("fallthru edge crosses section boundary in bb %i",
750054a2
CT
2455 e->src->index);
2456 err = 1;
2457 }
0be7e7a6
RH
2458 if (e->flags & EDGE_EH)
2459 {
f99ffaa3 2460 error ("EH edge crosses section boundary in bb %i",
0be7e7a6
RH
2461 e->src->index);
2462 err = 1;
2463 }
3371a64f
TJ
2464 if (JUMP_P (BB_END (bb))
2465 && !find_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX))
2466 {
2467 error ("No region crossing jump at section boundary in bb %i",
2468 bb->index);
2469 err = 1;
2470 }
0be7e7a6
RH
2471 }
2472 else if (is_crossing)
2473 {
2474 error ("EDGE_CROSSING missing across section boundary");
2475 err = 1;
750054a2 2476 }
3dec4024 2477
65f43cdf
ZD
2478 if ((e->flags & ~(EDGE_DFS_BACK
2479 | EDGE_CAN_FALLTHRU
2480 | EDGE_IRREDUCIBLE_LOOP
9beb1c84 2481 | EDGE_LOOP_EXIT
7e872b90
JJ
2482 | EDGE_CROSSING
2483 | EDGE_PRESERVE)) == 0)
3dec4024
JH
2484 n_branch++;
2485
2486 if (e->flags & EDGE_ABNORMAL_CALL)
f99ffaa3
EB
2487 n_abnormal_call++;
2488
2489 if (e->flags & EDGE_SIBCALL)
2490 n_sibcall++;
3dec4024
JH
2491
2492 if (e->flags & EDGE_EH)
2493 n_eh++;
f99ffaa3
EB
2494
2495 if (e->flags & EDGE_ABNORMAL)
3dec4024 2496 n_abnormal++;
ca6c03ca 2497 }
5f0d2358 2498
3371a64f
TJ
2499 if (!has_crossing_edge
2500 && find_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX))
2501 {
2502 print_rtl_with_bb (stderr, get_insns (), TDF_RTL | TDF_BLOCKS | TDF_DETAILS);
2503 error ("Region crossing jump across same section in bb %i",
2504 bb->index);
2505 err = 1;
2506 }
2507
1d65f45c 2508 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
3dec4024 2509 {
f99ffaa3 2510 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
3dec4024
JH
2511 err = 1;
2512 }
1d65f45c
RH
2513 if (n_eh > 1)
2514 {
f99ffaa3 2515 error ("too many exception handling edges in bb %i", bb->index);
1d65f45c
RH
2516 err = 1;
2517 }
3dec4024 2518 if (n_branch
4b4bf941 2519 && (!JUMP_P (BB_END (bb))
a813c111
SB
2520 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2521 || any_condjump_p (BB_END (bb))))))
3dec4024 2522 {
ab532386 2523 error ("too many outgoing branch edges from bb %i", bb->index);
3dec4024
JH
2524 err = 1;
2525 }
a813c111 2526 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
3dec4024 2527 {
f99ffaa3 2528 error ("fallthru edge after unconditional jump in bb %i", bb->index);
3dec4024
JH
2529 err = 1;
2530 }
a813c111 2531 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
3dec4024 2532 {
f99ffaa3
EB
2533 error ("wrong number of branch edges after unconditional jump"
2534 " in bb %i", bb->index);
3dec4024
JH
2535 err = 1;
2536 }
a813c111 2537 if (n_branch != 1 && any_condjump_p (BB_END (bb))
c11fd0b2 2538 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
3dec4024 2539 {
f99ffaa3
EB
2540 error ("wrong amount of branch edges after conditional jump"
2541 " in bb %i", bb->index);
2542 err = 1;
2543 }
2544 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2545 {
2546 error ("abnormal call edges for non-call insn in bb %i", bb->index);
3dec4024
JH
2547 err = 1;
2548 }
f99ffaa3 2549 if (n_sibcall && !CALL_P (BB_END (bb)))
3dec4024 2550 {
f99ffaa3 2551 error ("sibcall edges for non-call insn in bb %i", bb->index);
3dec4024
JH
2552 err = 1;
2553 }
f99ffaa3
EB
2554 if (n_abnormal > n_eh
2555 && !(CALL_P (BB_END (bb))
2556 && n_abnormal == n_abnormal_call + n_sibcall)
4b4bf941 2557 && (!JUMP_P (BB_END (bb))
a813c111
SB
2558 || any_condjump_p (BB_END (bb))
2559 || any_uncondjump_p (BB_END (bb))))
3dec4024 2560 {
ab532386 2561 error ("abnormal edges for no purpose in bb %i", bb->index);
3dec4024
JH
2562 err = 1;
2563 }
251a41b9 2564 }
f87c27b4 2565
600b5b1d
TJ
2566 /* If there are partitions, do a sanity check on them: A basic block in
2567   a cold partition cannot dominate a basic block in a hot partition.  */
2568 if (crtl->has_bb_partition && !err)
2569 {
2570 vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2571 err = !bbs_to_fix.is_empty ();
2572 }
2573
251a41b9
TJ
2574 /* Clean up. */
2575 return err;
2576}
5f0d2358 2577
251a41b9
TJ
2578/* Checks on the instructions within blocks. Currently checks that each
2579 block starts with a basic block note, and that basic block notes and
2580 control flow jumps are not found in the middle of the block. */
ca6c03ca 2581
251a41b9
TJ
2582static int
2583rtl_verify_bb_insns (void)
2584{
2585 rtx x;
2586 int err = 0;
2587 basic_block bb;
2588
4f42035e 2589 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9
TJ
2590 {
2591 /* Now check the header of basic
c22cacf3 2592 block. It ought to contain optional CODE_LABEL followed
ca6c03ca 2593 by NOTE_BASIC_BLOCK. */
a813c111 2594 x = BB_HEAD (bb);
4b4bf941 2595 if (LABEL_P (x))
ca6c03ca 2596 {
a813c111 2597 if (BB_END (bb) == x)
ca6c03ca
JH
2598 {
2599 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 2600 bb->index);
ca6c03ca
JH
2601 err = 1;
2602 }
5f0d2358 2603
ca6c03ca
JH
2604 x = NEXT_INSN (x);
2605 }
5f0d2358 2606
ca6c03ca
JH
2607 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2608 {
2609 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 2610 bb->index);
ca6c03ca
JH
2611 err = 1;
2612 }
2613
a813c111 2614 if (BB_END (bb) == x)
49243025 2615 /* Do checks for empty blocks here. */
5f0d2358 2616 ;
ca6c03ca 2617 else
5f0d2358
RK
2618 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2619 {
2620 if (NOTE_INSN_BASIC_BLOCK_P (x))
2621 {
2622 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
0b17ab2f 2623 INSN_UID (x), bb->index);
5f0d2358
RK
2624 err = 1;
2625 }
2626
a813c111 2627 if (x == BB_END (bb))
5f0d2358 2628 break;
ca6c03ca 2629
83fd323c 2630 if (control_flow_insn_p (x))
5f0d2358 2631 {
0b17ab2f 2632 error ("in basic block %d:", bb->index);
5f0d2358
RK
2633 fatal_insn ("flow control insn inside a basic block", x);
2634 }
2635 }
ca6c03ca
JH
2636 }
2637
251a41b9
TJ
2638 /* Clean up. */
2639 return err;
2640}
2641
2642/* Verify that block pointers for instructions in basic blocks, headers and
2643 footers are set appropriately. */
2644
2645static int
2646rtl_verify_bb_pointers (void)
2647{
2648 int err = 0;
2649 basic_block bb;
2650
2651 /* Check the general integrity of the basic blocks. */
4f42035e 2652 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9
TJ
2653 {
2654 rtx insn;
2655
2656 if (!(bb->flags & BB_RTL))
2657 {
2658 error ("BB_RTL flag not set for block %d", bb->index);
2659 err = 1;
2660 }
2661
2662 FOR_BB_INSNS (bb, insn)
2663 if (BLOCK_FOR_INSN (insn) != bb)
2664 {
2665 error ("insn %d basic block pointer is %d, should be %d",
2666 INSN_UID (insn),
2667 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2668 bb->index);
2669 err = 1;
2670 }
2671
2672 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2673 if (!BARRIER_P (insn)
2674 && BLOCK_FOR_INSN (insn) != NULL)
2675 {
2676 error ("insn %d in header of bb %d has non-NULL basic block",
2677 INSN_UID (insn), bb->index);
2678 err = 1;
2679 }
2680 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2681 if (!BARRIER_P (insn)
2682 && BLOCK_FOR_INSN (insn) != NULL)
2683 {
2684 error ("insn %d in footer of bb %d has non-NULL basic block",
2685 INSN_UID (insn), bb->index);
2686 err = 1;
2687 }
2688 }
af205f67 2689
10e9fecc 2690 /* Clean up. */
10e9fecc
JH
2691 return err;
2692}
5f0d2358 2693
10e9fecc
JH
2694/* Verify the CFG and RTL consistency common for both underlying RTL and
2695 cfglayout RTL.
5f0d2358 2696
10e9fecc 2697 Currently it does following checks:
251a41b9
TJ
2698
2699 - overlapping of basic blocks
2700 - insns with wrong BLOCK_FOR_INSN pointers
2701 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2702 - tails of basic blocks (ensure that boundary is necessary)
2703 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2704 and NOTE_INSN_BASIC_BLOCK
2705 - verify that no fall_thru edge crosses hot/cold partition boundaries
2706 - verify that there are no pending RTL branch predictions
600b5b1d 2707 - verify that hot blocks are not dominated by cold blocks
251a41b9
TJ
2708
2709 In future it can be extended check a lot of other stuff as well
2710 (reachability of basic blocks, life information, etc. etc.). */
9eab6785 2711
10e9fecc 2712static int
251a41b9
TJ
2713rtl_verify_flow_info_1 (void)
2714{
2715 int err = 0;
2716
2717 err |= rtl_verify_bb_pointers ();
2718
2719 err |= rtl_verify_bb_insns ();
2720
2721 err |= rtl_verify_edges ();
2722
251a41b9
TJ
2723 return err;
2724}
2725
2726/* Walk the instruction chain and verify that bb head/end pointers
2727 are correct, and that instructions are in exactly one bb and have
2728 correct block pointers. */
2729
2730static int
2731rtl_verify_bb_insn_chain (void)
10e9fecc
JH
2732{
2733 basic_block bb;
251a41b9 2734 int err = 0;
10e9fecc 2735 rtx x;
9eab6785
SB
2736 rtx last_head = get_last_insn ();
2737 basic_block *bb_info;
9eab6785
SB
2738 const int max_uid = get_max_uid ();
2739
2740 bb_info = XCNEWVEC (basic_block, max_uid);
ca6c03ca 2741
4f42035e 2742 FOR_EACH_BB_REVERSE_FN (bb, cfun)
10e9fecc 2743 {
9eab6785
SB
2744 rtx head = BB_HEAD (bb);
2745 rtx end = BB_END (bb);
628f6a4e 2746
9eab6785 2747 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
a7b87f73
ZD
2748 {
2749 /* Verify the end of the basic block is in the INSN chain. */
2750 if (x == end)
2751 break;
2752
251a41b9
TJ
2753 /* And that the code outside of basic blocks has NULL bb field. */
2754 if (!BARRIER_P (x)
2755 && BLOCK_FOR_INSN (x) != NULL)
2756 {
2757 error ("insn %d outside of basic blocks has non-NULL bb field",
2758 INSN_UID (x));
2759 err = 1;
2760 }
a7b87f73 2761 }
9eab6785
SB
2762
2763 if (!x)
2764 {
2765 error ("end insn %d for block %d not found in the insn stream",
2766 INSN_UID (end), bb->index);
2767 err = 1;
2768 }
2769
2770 /* Work backwards from the end to the head of the basic block
2771 to verify the head is in the RTL chain. */
2772 for (; x != NULL_RTX; x = PREV_INSN (x))
a00d11f0 2773 {
9eab6785
SB
2774 /* While walking over the insn chain, verify insns appear
2775 in only one basic block. */
2776 if (bb_info[INSN_UID (x)] != NULL)
2777 {
2778 error ("insn %d is in multiple basic blocks (%d and %d)",
2779 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2780 err = 1;
2781 }
2782
2783 bb_info[INSN_UID (x)] = bb;
2784
2785 if (x == head)
2786 break;
2787 }
2788 if (!x)
2789 {
2790 error ("head insn %d for block %d not found in the insn stream",
2791 INSN_UID (head), bb->index);
a00d11f0
JH
2792 err = 1;
2793 }
2794
a7b87f73 2795 last_head = PREV_INSN (x);
251a41b9
TJ
2796 }
2797
2798 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2799 {
2800 /* Check that the code before the first basic block has NULL
2801 bb field. */
2802 if (!BARRIER_P (x)
2803 && BLOCK_FOR_INSN (x) != NULL)
2804 {
2805 error ("insn %d outside of basic blocks has non-NULL bb field",
2806 INSN_UID (x));
2807 err = 1;
2808 }
2809 }
2810 free (bb_info);
2811
2812 return err;
2813}
2814
2815/* Verify that fallthru edges point to adjacent blocks in layout order and
2816 that barriers exist after non-fallthru blocks. */
2817
2818static int
2819rtl_verify_fallthru (void)
2820{
2821 basic_block bb;
2822 int err = 0;
2823
4f42035e 2824 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9
TJ
2825 {
2826 edge e;
9eab6785 2827
0fd4b31d 2828 e = find_fallthru_edge (bb->succs);
10e9fecc
JH
2829 if (!e)
2830 {
2831 rtx insn;
2832
2833 /* Ensure existence of barrier in BB with no fallthru edges. */
468059bc
DD
2834 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2835 {
2836 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
10e9fecc
JH
2837 {
2838 error ("missing barrier after block %i", bb->index);
2839 err = 1;
2840 break;
2841 }
468059bc
DD
2842 if (BARRIER_P (insn))
2843 break;
2844 }
10e9fecc 2845 }
fefa31b5
DM
2846 else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2847 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
c22cacf3 2848 {
10e9fecc
JH
2849 rtx insn;
2850
2851 if (e->src->next_bb != e->dest)
2852 {
2853 error
2854 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2855 e->src->index, e->dest->index);
2856 err = 1;
2857 }
2858 else
a813c111 2859 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
10e9fecc 2860 insn = NEXT_INSN (insn))
6be85b25 2861 if (BARRIER_P (insn) || INSN_P (insn))
10e9fecc
JH
2862 {
2863 error ("verify_flow_info: Incorrect fallthru %i->%i",
2864 e->src->index, e->dest->index);
2865 fatal_insn ("wrong insn in the fallthru edge", insn);
2866 err = 1;
2867 }
c22cacf3 2868 }
10e9fecc 2869 }
ca6c03ca 2870
251a41b9
TJ
2871 return err;
2872}
2873
2874/* Verify that blocks are laid out in consecutive order. While walking the
2875 instructions, verify that all expected instructions are inside the basic
2876 blocks, and that all returns are followed by barriers. */
2877
2878static int
2879rtl_verify_bb_layout (void)
2880{
2881 basic_block bb;
2882 int err = 0;
2883 rtx x;
2884 int num_bb_notes;
2885 const rtx rtx_first = get_insns ();
fefa31b5 2886 basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
9eab6785 2887
ca6c03ca 2888 num_bb_notes = 0;
fefa31b5 2889 last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
e0082a72 2890
5f0d2358 2891 for (x = rtx_first; x; x = NEXT_INSN (x))
ca6c03ca
JH
2892 {
2893 if (NOTE_INSN_BASIC_BLOCK_P (x))
2894 {
bf77398c 2895 bb = NOTE_BASIC_BLOCK (x);
5f0d2358 2896
ca6c03ca 2897 num_bb_notes++;
e0082a72 2898 if (bb != last_bb_seen->next_bb)
10e9fecc 2899 internal_error ("basic blocks not laid down consecutively");
ca6c03ca 2900
10e9fecc 2901 curr_bb = last_bb_seen = bb;
ca6c03ca
JH
2902 }
2903
10e9fecc 2904 if (!curr_bb)
ca6c03ca
JH
2905 {
2906 switch (GET_CODE (x))
2907 {
2908 case BARRIER:
2909 case NOTE:
2910 break;
2911
2912 case CODE_LABEL:
39718607 2913 /* An ADDR_VEC is placed outside any basic block. */
ca6c03ca 2914 if (NEXT_INSN (x)
481683e1 2915 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
5f0d2358 2916 x = NEXT_INSN (x);
ca6c03ca
JH
2917
2918 /* But in any case, non-deletable labels can appear anywhere. */
2919 break;
2920
2921 default:
1f978f5f 2922 fatal_insn ("insn outside basic block", x);
ca6c03ca
JH
2923 }
2924 }
2925
26cae194 2926 if (JUMP_P (x)
ca6c03ca 2927 && returnjump_p (x) && ! condjump_p (x)
15eb3a2e 2928 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
1f978f5f 2929 fatal_insn ("return not followed by barrier", x);
251a41b9 2930
a813c111 2931 if (curr_bb && x == BB_END (curr_bb))
10e9fecc 2932 curr_bb = NULL;
ca6c03ca
JH
2933 }
2934
0cae8d31 2935 if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
ca6c03ca 2936 internal_error
0b17ab2f 2937 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
0cae8d31 2938 num_bb_notes, n_basic_blocks_for_fn (cfun));
ca6c03ca 2939
10e9fecc 2940 return err;
ca6c03ca 2941}
251a41b9
TJ
2942
2943/* Verify the CFG and RTL consistency common for both underlying RTL and
2944 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
2945
2946 Currently it does following checks:
2947 - all checks of rtl_verify_flow_info_1
2948 - test head/end pointers
2949 - check that blocks are laid out in consecutive order
2950 - check that all insns are in the basic blocks
2951 (except the switch handling code, barriers and notes)
2952 - check that all returns are followed by barriers
600b5b1d
TJ
2953 - check that all fallthru edge points to the adjacent blocks
2954 - verify that there is a single hot/cold partition boundary after bbro */
251a41b9
TJ
2955
2956static int
2957rtl_verify_flow_info (void)
2958{
2959 int err = 0;
2960
2961 err |= rtl_verify_flow_info_1 ();
2962
2963 err |= rtl_verify_bb_insn_chain ();
2964
2965 err |= rtl_verify_fallthru ();
2966
2967 err |= rtl_verify_bb_layout ();
2968
3371a64f
TJ
2969 err |= verify_hot_cold_block_grouping ();
2970
251a41b9
TJ
2971 return err;
2972}
ca6c03ca 2973\f
eaec9b3d 2974/* Assume that the preceding pass has possibly eliminated jump instructions
ca6c03ca
JH
2975 or converted the unconditional jumps. Eliminate the edges from CFG.
2976 Return true if any edges are eliminated. */
2977
2978bool
d329e058 2979purge_dead_edges (basic_block bb)
ca6c03ca 2980{
628f6a4e 2981 edge e;
a813c111 2982 rtx insn = BB_END (bb), note;
ca6c03ca 2983 bool purged = false;
628f6a4e
BE
2984 bool found;
2985 edge_iterator ei;
ca6c03ca 2986
b5b8b0ac
AO
2987 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2988 do
2989 insn = PREV_INSN (insn);
2990 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2991
70da1d03 2992 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
4b4bf941 2993 if (NONJUMP_INSN_P (insn)
70da1d03
JH
2994 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2995 {
2996 rtx eqnote;
2997
2998 if (! may_trap_p (PATTERN (insn))
2999 || ((eqnote = find_reg_equal_equiv_note (insn))
3000 && ! may_trap_p (XEXP (eqnote, 0))))
3001 remove_note (insn, note);
3002 }
3003
546c093e 3004 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
628f6a4e 3005 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
546c093e 3006 {
1d65f45c
RH
3007 bool remove = false;
3008
e5f9a909
JW
3009 /* There are three types of edges we need to handle correctly here: EH
3010 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3011 latter can appear when nonlocal gotos are used. */
1d65f45c 3012 if (e->flags & EDGE_ABNORMAL_CALL)
546c093e 3013 {
1d65f45c
RH
3014 if (!CALL_P (insn))
3015 remove = true;
3016 else if (can_nonlocal_goto (insn))
3017 ;
3018 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3019 ;
0a35513e
AH
3020 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3021 ;
1d65f45c
RH
3022 else
3023 remove = true;
546c093e 3024 }
1d65f45c
RH
3025 else if (e->flags & EDGE_EH)
3026 remove = !can_throw_internal (insn);
3027
3028 if (remove)
546c093e 3029 {
1d65f45c
RH
3030 remove_edge (e);
3031 df_set_bb_dirty (bb);
3032 purged = true;
546c093e
RH
3033 }
3034 else
1d65f45c 3035 ei_next (&ei);
546c093e 3036 }
5f0d2358 3037
4b4bf941 3038 if (JUMP_P (insn))
ca6c03ca
JH
3039 {
3040 rtx note;
3041 edge b,f;
628f6a4e 3042 edge_iterator ei;
5f0d2358 3043
ca6c03ca
JH
3044 /* We do care only about conditional jumps and simplejumps. */
3045 if (!any_condjump_p (insn)
3046 && !returnjump_p (insn)
3047 && !simplejump_p (insn))
c51d95ec 3048 return purged;
5f0d2358 3049
5a1a3e5e
JH
3050 /* Branch probability/prediction notes are defined only for
3051 condjumps. We've possibly turned condjump into simplejump. */
3052 if (simplejump_p (insn))
3053 {
3054 note = find_reg_note (insn, REG_BR_PROB, NULL);
3055 if (note)
3056 remove_note (insn, note);
3057 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3058 remove_note (insn, note);
3059 }
3060
628f6a4e 3061 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 3062 {
7fcd7218
JH
3063 /* Avoid abnormal flags to leak from computed jumps turned
3064 into simplejumps. */
f87c27b4 3065
0e1638d4 3066 e->flags &= ~EDGE_ABNORMAL;
7fcd7218 3067
5a566bed
MM
3068 /* See if this edge is one we should keep. */
3069 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3070 /* A conditional jump can fall through into the next
3071 block, so we should keep the edge. */
628f6a4e
BE
3072 {
3073 ei_next (&ei);
3074 continue;
3075 }
fefa31b5 3076 else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
a813c111 3077 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
5a566bed
MM
3078 /* If the destination block is the target of the jump,
3079 keep the edge. */
628f6a4e
BE
3080 {
3081 ei_next (&ei);
3082 continue;
3083 }
fefa31b5
DM
3084 else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3085 && returnjump_p (insn))
5a566bed
MM
3086 /* If the destination block is the exit block, and this
3087 instruction is a return, then keep the edge. */
628f6a4e
BE
3088 {
3089 ei_next (&ei);
3090 continue;
3091 }
5a566bed
MM
3092 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3093 /* Keep the edges that correspond to exceptions thrown by
0b75beaa
EB
3094 this instruction and rematerialize the EDGE_ABNORMAL
3095 flag we just cleared above. */
3096 {
3097 e->flags |= EDGE_ABNORMAL;
628f6a4e 3098 ei_next (&ei);
0b75beaa
EB
3099 continue;
3100 }
5f0d2358 3101
5a566bed 3102 /* We do not need this edge. */
6fb5fa3c 3103 df_set_bb_dirty (bb);
ca6c03ca
JH
3104 purged = true;
3105 remove_edge (e);
3106 }
5f0d2358 3107
628f6a4e 3108 if (EDGE_COUNT (bb->succs) == 0 || !purged)
c51d95ec 3109 return purged;
5f0d2358 3110
c263766c
RH
3111 if (dump_file)
3112 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
5f0d2358 3113
ca6c03ca
JH
3114 if (!optimize)
3115 return purged;
3116
3117 /* Redistribute probabilities. */
c5cbcccf 3118 if (single_succ_p (bb))
ca6c03ca 3119 {
c5cbcccf
ZD
3120 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3121 single_succ_edge (bb)->count = bb->count;
f87c27b4 3122 }
ca6c03ca
JH
3123 else
3124 {
3125 note = find_reg_note (insn, REG_BR_PROB, NULL);
3126 if (!note)
3127 return purged;
5f0d2358 3128
ca6c03ca
JH
3129 b = BRANCH_EDGE (bb);
3130 f = FALLTHRU_EDGE (bb);
e5af9ddd 3131 b->probability = XINT (note, 0);
ca6c03ca 3132 f->probability = REG_BR_PROB_BASE - b->probability;
8ddb5a29 3133 /* Update these to use GCOV_COMPUTE_SCALE. */
ca6c03ca
JH
3134 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
3135 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
3136 }
5f0d2358 3137
ca6c03ca
JH
3138 return purged;
3139 }
4b4bf941 3140 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
1722c2c8
RH
3141 {
3142 /* First, there should not be any EH or ABCALL edges resulting
3143 from non-local gotos and the like. If there were, we shouldn't
3144 have created the sibcall in the first place. Second, there
3145 should of course never have been a fallthru edge. */
c5cbcccf
ZD
3146 gcc_assert (single_succ_p (bb));
3147 gcc_assert (single_succ_edge (bb)->flags
3148 == (EDGE_SIBCALL | EDGE_ABNORMAL));
1722c2c8
RH
3149
3150 return 0;
3151 }
ca6c03ca 3152
ca6c03ca
JH
3153 /* If we don't see a jump insn, we don't know exactly why the block would
3154 have been broken at this point. Look for a simple, non-fallthru edge,
3155 as these are only created by conditional branches. If we find such an
3156 edge we know that there used to be a jump here and can then safely
3157 remove all non-fallthru edges. */
628f6a4e
BE
3158 found = false;
3159 FOR_EACH_EDGE (e, ei, bb->succs)
3160 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3161 {
3162 found = true;
3163 break;
3164 }
5f0d2358 3165
628f6a4e 3166 if (!found)
ca6c03ca 3167 return purged;
5f0d2358 3168
2afa8dce
DB
3169 /* Remove all but the fake and fallthru edges. The fake edge may be
3170 the only successor for this block in the case of noreturn
3171 calls. */
628f6a4e 3172 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 3173 {
2afa8dce 3174 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
c51d95ec 3175 {
6fb5fa3c 3176 df_set_bb_dirty (bb);
c51d95ec
JH
3177 remove_edge (e);
3178 purged = true;
3179 }
628f6a4e
BE
3180 else
3181 ei_next (&ei);
ca6c03ca 3182 }
5f0d2358 3183
c5cbcccf 3184 gcc_assert (single_succ_p (bb));
5f0d2358 3185
c5cbcccf
ZD
3186 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3187 single_succ_edge (bb)->count = bb->count;
ca6c03ca 3188
c263766c
RH
3189 if (dump_file)
3190 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
0b17ab2f 3191 bb->index);
ca6c03ca
JH
3192 return purged;
3193}
3194
5f0d2358
RK
3195/* Search all basic blocks for potentially dead edges and purge them. Return
3196 true if some edge has been eliminated. */
ca6c03ca
JH
3197
3198bool
25cd19de 3199purge_all_dead_edges (void)
ca6c03ca 3200{
e0082a72 3201 int purged = false;
e0082a72 3202 basic_block bb;
473fb060 3203
11cd3bed 3204 FOR_EACH_BB_FN (bb, cfun)
473fb060 3205 {
e0082a72 3206 bool purged_here = purge_dead_edges (bb);
5f0d2358 3207
473fb060 3208 purged |= purged_here;
473fb060 3209 }
5f0d2358 3210
ca6c03ca
JH
3211 return purged;
3212}
9ee634e3 3213
ba5e9aca
EB
3214/* This is used by a few passes that emit some instructions after abnormal
3215 calls, moving the basic block's end, while they in fact do want to emit
3216 them on the fallthru edge. Look for abnormal call edges, find backward
3217 the call in the block and insert the instructions on the edge instead.
3218
3219 Similarly, handle instructions throwing exceptions internally.
3220
3221 Return true when instructions have been found and inserted on edges. */
3222
3223bool
3224fixup_abnormal_edges (void)
3225{
3226 bool inserted = false;
3227 basic_block bb;
3228
11cd3bed 3229 FOR_EACH_BB_FN (bb, cfun)
ba5e9aca
EB
3230 {
3231 edge e;
3232 edge_iterator ei;
3233
3234 /* Look for cases we are interested in - calls or instructions causing
3235 exceptions. */
3236 FOR_EACH_EDGE (e, ei, bb->succs)
3237 if ((e->flags & EDGE_ABNORMAL_CALL)
3238 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3239 == (EDGE_ABNORMAL | EDGE_EH)))
3240 break;
3241
3242 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3243 {
3244 rtx insn;
3245
3246 /* Get past the new insns generated. Allow notes, as the insns
3247 may be already deleted. */
3248 insn = BB_END (bb);
3249 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3250 && !can_throw_internal (insn)
3251 && insn != BB_HEAD (bb))
3252 insn = PREV_INSN (insn);
3253
3254 if (CALL_P (insn) || can_throw_internal (insn))
3255 {
3256 rtx stop, next;
3257
3258 e = find_fallthru_edge (bb->succs);
3259
3260 stop = NEXT_INSN (BB_END (bb));
3261 BB_END (bb) = insn;
3262
3263 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3264 {
3265 next = NEXT_INSN (insn);
3266 if (INSN_P (insn))
3267 {
3268 delete_insn (insn);
3269
3270 /* Sometimes there's still the return value USE.
3271 If it's placed after a trapping call (i.e. that
3272 call is the last insn anyway), we have no fallthru
3273 edge. Simply delete this use and don't try to insert
3274 on the non-existent edge. */
3275 if (GET_CODE (PATTERN (insn)) != USE)
3276 {
3277 /* We're not deleting it, we're moving it. */
3278 INSN_DELETED_P (insn) = 0;
3279 PREV_INSN (insn) = NULL_RTX;
3280 NEXT_INSN (insn) = NULL_RTX;
3281
3282 insert_insn_on_edge (insn, e);
3283 inserted = true;
3284 }
3285 }
3286 else if (!BARRIER_P (insn))
3287 set_block_for_insn (insn, NULL);
3288 }
3289 }
3290
3291 /* It may be that we don't find any trapping insn. In this
3292 case we discovered quite late that the insn that had been
3293 marked as can_throw_internal in fact couldn't trap at all.
3294 So we should in fact delete the EH edges out of the block. */
3295 else
3296 purge_dead_edges (bb);
3297 }
3298 }
3299
3300 return inserted;
3301}
78bde837
SB
3302\f
3303/* Cut the insns from FIRST to LAST out of the insns stream. */
3304
3305rtx
3306unlink_insn_chain (rtx first, rtx last)
3307{
3308 rtx prevfirst = PREV_INSN (first);
3309 rtx nextlast = NEXT_INSN (last);
3310
3311 PREV_INSN (first) = NULL;
3312 NEXT_INSN (last) = NULL;
3313 if (prevfirst)
3314 NEXT_INSN (prevfirst) = nextlast;
3315 if (nextlast)
3316 PREV_INSN (nextlast) = prevfirst;
3317 else
3318 set_last_insn (prevfirst);
3319 if (!prevfirst)
3320 set_first_insn (nextlast);
3321 return first;
3322}
3323\f
3324/* Skip over inter-block insns occurring after BB which are typically
3325 associated with BB (e.g., barriers). If there are any such insns,
3326 we return the last one. Otherwise, we return the end of BB. */
3327
3328static rtx
3329skip_insns_after_block (basic_block bb)
3330{
3331 rtx insn, last_insn, next_head, prev;
3332
3333 next_head = NULL_RTX;
fefa31b5 3334 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3335 next_head = BB_HEAD (bb->next_bb);
3336
3337 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3338 {
3339 if (insn == next_head)
3340 break;
3341
3342 switch (GET_CODE (insn))
3343 {
3344 case BARRIER:
3345 last_insn = insn;
3346 continue;
3347
3348 case NOTE:
3349 switch (NOTE_KIND (insn))
3350 {
3351 case NOTE_INSN_BLOCK_END:
3352 gcc_unreachable ();
3353 continue;
3354 default:
3355 continue;
3356 break;
3357 }
3358 break;
3359
3360 case CODE_LABEL:
3361 if (NEXT_INSN (insn)
3362 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3363 {
3364 insn = NEXT_INSN (insn);
3365 last_insn = insn;
3366 continue;
3367 }
3368 break;
3369
3370 default:
3371 break;
3372 }
3373
3374 break;
3375 }
3376
3377 /* It is possible to hit contradictory sequence. For instance:
3378
3379 jump_insn
3380 NOTE_INSN_BLOCK_BEG
3381 barrier
3382
3383 Where barrier belongs to jump_insn, but the note does not. This can be
3384 created by removing the basic block originally following
3385 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3386
3387 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3388 {
3389 prev = PREV_INSN (insn);
3390 if (NOTE_P (insn))
3391 switch (NOTE_KIND (insn))
3392 {
3393 case NOTE_INSN_BLOCK_END:
3394 gcc_unreachable ();
3395 break;
3396 case NOTE_INSN_DELETED:
3397 case NOTE_INSN_DELETED_LABEL:
3398 case NOTE_INSN_DELETED_DEBUG_LABEL:
3399 continue;
3400 default:
3401 reorder_insns (insn, insn, last_insn);
3402 }
3403 }
3404
3405 return last_insn;
3406}
3407
3408/* Locate or create a label for a given basic block. */
3409
3410static rtx
3411label_for_bb (basic_block bb)
3412{
3413 rtx label = BB_HEAD (bb);
3414
3415 if (!LABEL_P (label))
3416 {
3417 if (dump_file)
3418 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3419
3420 label = block_label (bb);
3421 }
3422
3423 return label;
3424}
3425
3426/* Locate the effective beginning and end of the insn chain for each
3427 block, as defined by skip_insns_after_block above. */
3428
3429static void
3430record_effective_endpoints (void)
3431{
3432 rtx next_insn;
3433 basic_block bb;
3434 rtx insn;
3435
3436 for (insn = get_insns ();
3437 insn
3438 && NOTE_P (insn)
3439 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3440 insn = NEXT_INSN (insn))
3441 continue;
3442 /* No basic blocks at all? */
3443 gcc_assert (insn);
3444
3445 if (PREV_INSN (insn))
3446 cfg_layout_function_header =
3447 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3448 else
3449 cfg_layout_function_header = NULL_RTX;
3450
3451 next_insn = get_insns ();
11cd3bed 3452 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
3453 {
3454 rtx end;
3455
3456 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3457 BB_HEADER (bb) = unlink_insn_chain (next_insn,
3458 PREV_INSN (BB_HEAD (bb)));
3459 end = skip_insns_after_block (bb);
3460 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3461 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3462 next_insn = NEXT_INSN (BB_END (bb));
3463 }
3464
3465 cfg_layout_function_footer = next_insn;
3466 if (cfg_layout_function_footer)
3467 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3468}
3469\f
3470static unsigned int
3471into_cfg_layout_mode (void)
3472{
3473 cfg_layout_initialize (0);
3474 return 0;
3475}
3476
3477static unsigned int
3478outof_cfg_layout_mode (void)
3479{
3480 basic_block bb;
3481
11cd3bed 3482 FOR_EACH_BB_FN (bb, cfun)
fefa31b5 3483 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3484 bb->aux = bb->next_bb;
3485
3486 cfg_layout_finalize ();
3487
3488 return 0;
3489}
3490
27a4cd48
DM
3491namespace {
3492
3493const pass_data pass_data_into_cfg_layout_mode =
3494{
3495 RTL_PASS, /* type */
3496 "into_cfglayout", /* name */
3497 OPTGROUP_NONE, /* optinfo_flags */
3498 false, /* has_gate */
3499 true, /* has_execute */
3500 TV_CFG, /* tv_id */
3501 0, /* properties_required */
3502 PROP_cfglayout, /* properties_provided */
3503 0, /* properties_destroyed */
3504 0, /* todo_flags_start */
3505 0, /* todo_flags_finish */
78bde837
SB
3506};
3507
27a4cd48
DM
3508class pass_into_cfg_layout_mode : public rtl_opt_pass
3509{
3510public:
c3284718
RS
3511 pass_into_cfg_layout_mode (gcc::context *ctxt)
3512 : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
27a4cd48
DM
3513 {}
3514
3515 /* opt_pass methods: */
3516 unsigned int execute () { return into_cfg_layout_mode (); }
3517
3518}; // class pass_into_cfg_layout_mode
3519
3520} // anon namespace
3521
3522rtl_opt_pass *
3523make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3524{
3525 return new pass_into_cfg_layout_mode (ctxt);
3526}
3527
3528namespace {
3529
3530const pass_data pass_data_outof_cfg_layout_mode =
3531{
3532 RTL_PASS, /* type */
3533 "outof_cfglayout", /* name */
3534 OPTGROUP_NONE, /* optinfo_flags */
3535 false, /* has_gate */
3536 true, /* has_execute */
3537 TV_CFG, /* tv_id */
3538 0, /* properties_required */
3539 0, /* properties_provided */
3540 PROP_cfglayout, /* properties_destroyed */
3541 0, /* todo_flags_start */
3542 0, /* todo_flags_finish */
78bde837 3543};
27a4cd48
DM
3544
3545class pass_outof_cfg_layout_mode : public rtl_opt_pass
3546{
3547public:
c3284718
RS
3548 pass_outof_cfg_layout_mode (gcc::context *ctxt)
3549 : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
27a4cd48
DM
3550 {}
3551
3552 /* opt_pass methods: */
3553 unsigned int execute () { return outof_cfg_layout_mode (); }
3554
3555}; // class pass_outof_cfg_layout_mode
3556
3557} // anon namespace
3558
3559rtl_opt_pass *
3560make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3561{
3562 return new pass_outof_cfg_layout_mode (ctxt);
3563}
78bde837
SB
3564\f
3565
3566/* Link the basic blocks in the correct order, compacting the basic
3567 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3568 function also clears the basic block header and footer fields.
3569
3570 This function is usually called after a pass (e.g. tracer) finishes
3571 some transformations while in cfglayout mode. The required sequence
3572 of the basic blocks is in a linked list along the bb->aux field.
3573 This functions re-links the basic block prev_bb and next_bb pointers
532aafad
SB
3574 accordingly, and it compacts and renumbers the blocks.
3575
3576 FIXME: This currently works only for RTL, but the only RTL-specific
3577 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3578 to GIMPLE a long time ago, but it doesn't relink the basic block
3579 chain. It could do that (to give better initial RTL) if this function
3580 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
78bde837
SB
3581
3582void
3583relink_block_chain (bool stay_in_cfglayout_mode)
3584{
3585 basic_block bb, prev_bb;
3586 int index;
3587
3588 /* Maybe dump the re-ordered sequence. */
3589 if (dump_file)
3590 {
3591 fprintf (dump_file, "Reordered sequence:\n");
fefa31b5
DM
3592 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3593 NUM_FIXED_BLOCKS;
78bde837
SB
3594 bb;
3595 bb = (basic_block) bb->aux, index++)
3596 {
3597 fprintf (dump_file, " %i ", index);
3598 if (get_bb_original (bb))
3599 fprintf (dump_file, "duplicate of %i ",
3600 get_bb_original (bb)->index);
3601 else if (forwarder_block_p (bb)
3602 && !LABEL_P (BB_HEAD (bb)))
3603 fprintf (dump_file, "compensation ");
3604 else
3605 fprintf (dump_file, "bb %i ", bb->index);
3606 fprintf (dump_file, " [%i]\n", bb->frequency);
3607 }
3608 }
3609
3610 /* Now reorder the blocks. */
fefa31b5
DM
3611 prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3612 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
78bde837
SB
3613 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3614 {
3615 bb->prev_bb = prev_bb;
3616 prev_bb->next_bb = bb;
3617 }
fefa31b5
DM
3618 prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3619 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
78bde837
SB
3620
3621 /* Then, clean up the aux fields. */
04a90bec 3622 FOR_ALL_BB_FN (bb, cfun)
78bde837
SB
3623 {
3624 bb->aux = NULL;
3625 if (!stay_in_cfglayout_mode)
3626 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3627 }
3628
3629 /* Maybe reset the original copy tables, they are not valid anymore
3630 when we renumber the basic blocks in compact_blocks. If we are
3631 are going out of cfglayout mode, don't re-allocate the tables. */
3632 free_original_copy_tables ();
3633 if (stay_in_cfglayout_mode)
3634 initialize_original_copy_tables ();
3635
3636 /* Finally, put basic_block_info in the new order. */
3637 compact_blocks ();
3638}
3639\f
3640
3641/* Given a reorder chain, rearrange the code to match. */
3642
3643static void
3644fixup_reorder_chain (void)
3645{
3646 basic_block bb;
3647 rtx insn = NULL;
3648
3649 if (cfg_layout_function_header)
3650 {
3651 set_first_insn (cfg_layout_function_header);
3652 insn = cfg_layout_function_header;
3653 while (NEXT_INSN (insn))
3654 insn = NEXT_INSN (insn);
3655 }
3656
3657 /* First do the bulk reordering -- rechain the blocks without regard to
3658 the needed changes to jumps and labels. */
3659
fefa31b5
DM
3660 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3661 bb->aux)
78bde837
SB
3662 {
3663 if (BB_HEADER (bb))
3664 {
3665 if (insn)
3666 NEXT_INSN (insn) = BB_HEADER (bb);
3667 else
3668 set_first_insn (BB_HEADER (bb));
3669 PREV_INSN (BB_HEADER (bb)) = insn;
3670 insn = BB_HEADER (bb);
3671 while (NEXT_INSN (insn))
3672 insn = NEXT_INSN (insn);
3673 }
3674 if (insn)
3675 NEXT_INSN (insn) = BB_HEAD (bb);
3676 else
3677 set_first_insn (BB_HEAD (bb));
3678 PREV_INSN (BB_HEAD (bb)) = insn;
3679 insn = BB_END (bb);
3680 if (BB_FOOTER (bb))
3681 {
3682 NEXT_INSN (insn) = BB_FOOTER (bb);
3683 PREV_INSN (BB_FOOTER (bb)) = insn;
3684 while (NEXT_INSN (insn))
3685 insn = NEXT_INSN (insn);
3686 }
3687 }
3688
3689 NEXT_INSN (insn) = cfg_layout_function_footer;
3690 if (cfg_layout_function_footer)
3691 PREV_INSN (cfg_layout_function_footer) = insn;
3692
3693 while (NEXT_INSN (insn))
3694 insn = NEXT_INSN (insn);
3695
3696 set_last_insn (insn);
3697#ifdef ENABLE_CHECKING
3698 verify_insn_chain ();
3699#endif
3700
3701 /* Now add jumps and labels as needed to match the blocks new
29f3fd5b 3702 outgoing edges. */
78bde837 3703
29f3fd5b
SB
3704 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3705 bb->aux)
78bde837
SB
3706 {
3707 edge e_fall, e_taken, e;
3708 rtx bb_end_insn;
3709 rtx ret_label = NULL_RTX;
3371a64f 3710 basic_block nb;
78bde837
SB
3711 edge_iterator ei;
3712
3713 if (EDGE_COUNT (bb->succs) == 0)
3714 continue;
3715
3716 /* Find the old fallthru edge, and another non-EH edge for
3717 a taken jump. */
3718 e_taken = e_fall = NULL;
3719
3720 FOR_EACH_EDGE (e, ei, bb->succs)
3721 if (e->flags & EDGE_FALLTHRU)
3722 e_fall = e;
3723 else if (! (e->flags & EDGE_EH))
3724 e_taken = e;
3725
3726 bb_end_insn = BB_END (bb);
3727 if (JUMP_P (bb_end_insn))
3728 {
3729 ret_label = JUMP_LABEL (bb_end_insn);
3730 if (any_condjump_p (bb_end_insn))
3731 {
3732 /* This might happen if the conditional jump has side
3733 effects and could therefore not be optimized away.
3734 Make the basic block to end with a barrier in order
3735 to prevent rtl_verify_flow_info from complaining. */
3736 if (!e_fall)
3737 {
3738 gcc_assert (!onlyjump_p (bb_end_insn)
3739 || returnjump_p (bb_end_insn));
39718607 3740 emit_barrier_after (bb_end_insn);
78bde837
SB
3741 continue;
3742 }
3743
3744 /* If the old fallthru is still next, nothing to do. */
3745 if (bb->aux == e_fall->dest
fefa31b5 3746 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3747 continue;
3748
3749 /* The degenerated case of conditional jump jumping to the next
3750 instruction can happen for jumps with side effects. We need
3751 to construct a forwarder block and this will be done just
3752 fine by force_nonfallthru below. */
3753 if (!e_taken)
3754 ;
3755
3756 /* There is another special case: if *neither* block is next,
3757 such as happens at the very end of a function, then we'll
3758 need to add a new unconditional jump. Choose the taken
3759 edge based on known or assumed probability. */
3760 else if (bb->aux != e_taken->dest)
3761 {
3762 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
3763
3764 if (note
e5af9ddd 3765 && XINT (note, 0) < REG_BR_PROB_BASE / 2
78bde837 3766 && invert_jump (bb_end_insn,
fefa31b5
DM
3767 (e_fall->dest
3768 == EXIT_BLOCK_PTR_FOR_FN (cfun)
78bde837
SB
3769 ? NULL_RTX
3770 : label_for_bb (e_fall->dest)), 0))
3771 {
3772 e_fall->flags &= ~EDGE_FALLTHRU;
3773 gcc_checking_assert (could_fall_through
3774 (e_taken->src, e_taken->dest));
3775 e_taken->flags |= EDGE_FALLTHRU;
3776 update_br_prob_note (bb);
3777 e = e_fall, e_fall = e_taken, e_taken = e;
3778 }
3779 }
3780
3781 /* If the "jumping" edge is a crossing edge, and the fall
3782 through edge is non-crossing, leave things as they are. */
3783 else if ((e_taken->flags & EDGE_CROSSING)
3784 && !(e_fall->flags & EDGE_CROSSING))
3785 continue;
3786
3787 /* Otherwise we can try to invert the jump. This will
3788 basically never fail, however, keep up the pretense. */
3789 else if (invert_jump (bb_end_insn,
fefa31b5
DM
3790 (e_fall->dest
3791 == EXIT_BLOCK_PTR_FOR_FN (cfun)
78bde837
SB
3792 ? NULL_RTX
3793 : label_for_bb (e_fall->dest)), 0))
3794 {
3795 e_fall->flags &= ~EDGE_FALLTHRU;
3796 gcc_checking_assert (could_fall_through
3797 (e_taken->src, e_taken->dest));
3798 e_taken->flags |= EDGE_FALLTHRU;
3799 update_br_prob_note (bb);
3800 if (LABEL_NUSES (ret_label) == 0
3801 && single_pred_p (e_taken->dest))
3802 delete_insn (ret_label);
3803 continue;
3804 }
3805 }
3806 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3807 {
3808 /* If the old fallthru is still next or if
3809 asm goto doesn't have a fallthru (e.g. when followed by
3810 __builtin_unreachable ()), nothing to do. */
3811 if (! e_fall
3812 || bb->aux == e_fall->dest
fefa31b5 3813 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3814 continue;
3815
3816 /* Otherwise we'll have to use the fallthru fixup below. */
3817 }
3818 else
3819 {
3820 /* Otherwise we have some return, switch or computed
3821 jump. In the 99% case, there should not have been a
3822 fallthru edge. */
3823 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3824 continue;
3825 }
3826 }
3827 else
3828 {
3829 /* No fallthru implies a noreturn function with EH edges, or
3830 something similarly bizarre. In any case, we don't need to
3831 do anything. */
3832 if (! e_fall)
3833 continue;
3834
3835 /* If the fallthru block is still next, nothing to do. */
3836 if (bb->aux == e_fall->dest)
3837 continue;
3838
3839 /* A fallthru to exit block. */
fefa31b5 3840 if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3841 continue;
3842 }
3843
3844 /* We got here if we need to add a new jump insn.
3845 Note force_nonfallthru can delete E_FALL and thus we have to
3846 save E_FALL->src prior to the call to force_nonfallthru. */
78bde837
SB
3847 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3848 if (nb)
3849 {
3850 nb->aux = bb->aux;
3851 bb->aux = nb;
3852 /* Don't process this new block. */
3853 bb = nb;
78bde837
SB
3854 }
3855 }
3856
3857 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3858
29f3fd5b 3859 /* Annoying special case - jump around dead jumptables left in the code. */
11cd3bed 3860 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
3861 {
3862 edge e = find_fallthru_edge (bb->succs);
3863
29f3fd5b
SB
3864 if (e && !can_fallthru (e->src, e->dest))
3865 force_nonfallthru (e);
78bde837
SB
3866 }
3867
3868 /* Ensure goto_locus from edges has some instructions with that locus
3869 in RTL. */
3870 if (!optimize)
11cd3bed 3871 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
3872 {
3873 edge e;
3874 edge_iterator ei;
3875
3876 FOR_EACH_EDGE (e, ei, bb->succs)
2f13f2de 3877 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
5368224f 3878 && !(e->flags & EDGE_ABNORMAL))
78bde837
SB
3879 {
3880 edge e2;
3881 edge_iterator ei2;
3882 basic_block dest, nb;
3883 rtx end;
3884
3885 insn = BB_END (e->src);
3886 end = PREV_INSN (BB_HEAD (e->src));
3887 while (insn != end
5368224f 3888 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
78bde837
SB
3889 insn = PREV_INSN (insn);
3890 if (insn != end
5368224f 3891 && INSN_LOCATION (insn) == e->goto_locus)
78bde837
SB
3892 continue;
3893 if (simplejump_p (BB_END (e->src))
5368224f 3894 && !INSN_HAS_LOCATION (BB_END (e->src)))
78bde837 3895 {
5368224f 3896 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
78bde837
SB
3897 continue;
3898 }
3899 dest = e->dest;
fefa31b5 3900 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3901 {
3902 /* Non-fallthru edges to the exit block cannot be split. */
3903 if (!(e->flags & EDGE_FALLTHRU))
3904 continue;
3905 }
3906 else
3907 {
3908 insn = BB_HEAD (dest);
3909 end = NEXT_INSN (BB_END (dest));
3910 while (insn != end && !NONDEBUG_INSN_P (insn))
3911 insn = NEXT_INSN (insn);
5368224f
DC
3912 if (insn != end && INSN_HAS_LOCATION (insn)
3913 && INSN_LOCATION (insn) == e->goto_locus)
78bde837
SB
3914 continue;
3915 }
3916 nb = split_edge (e);
3917 if (!INSN_P (BB_END (nb)))
3918 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3919 nb);
5368224f 3920 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
78bde837
SB
3921
3922 /* If there are other incoming edges to the destination block
3923 with the same goto locus, redirect them to the new block as
3924 well, this can prevent other such blocks from being created
3925 in subsequent iterations of the loop. */
3926 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
2f13f2de 3927 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
78bde837 3928 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
5368224f 3929 && e->goto_locus == e2->goto_locus)
78bde837
SB
3930 redirect_edge_and_branch (e2, nb);
3931 else
3932 ei_next (&ei2);
3933 }
3934 }
3935}
3936\f
3937/* Perform sanity checks on the insn chain.
3938 1. Check that next/prev pointers are consistent in both the forward and
3939 reverse direction.
3940 2. Count insns in chain, going both directions, and check if equal.
3941 3. Check that get_last_insn () returns the actual end of chain. */
3942
3943DEBUG_FUNCTION void
3944verify_insn_chain (void)
3945{
3946 rtx x, prevx, nextx;
3947 int insn_cnt1, insn_cnt2;
3948
3949 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3950 x != 0;
3951 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
3952 gcc_assert (PREV_INSN (x) == prevx);
3953
3954 gcc_assert (prevx == get_last_insn ());
3955
3956 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
3957 x != 0;
3958 nextx = x, insn_cnt2++, x = PREV_INSN (x))
3959 gcc_assert (NEXT_INSN (x) == nextx);
3960
3961 gcc_assert (insn_cnt1 == insn_cnt2);
3962}
3963\f
3964/* If we have assembler epilogues, the block falling through to exit must
3965 be the last one in the reordered chain when we reach final. Ensure
3966 that this condition is met. */
3967static void
3968fixup_fallthru_exit_predecessor (void)
3969{
3970 edge e;
3971 basic_block bb = NULL;
3972
3973 /* This transformation is not valid before reload, because we might
3974 separate a call from the instruction that copies the return
3975 value. */
3976 gcc_assert (reload_completed);
3977
fefa31b5 3978 e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
78bde837
SB
3979 if (e)
3980 bb = e->src;
3981
3982 if (bb && bb->aux)
3983 {
fefa31b5 3984 basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
78bde837
SB
3985
3986 /* If the very first block is the one with the fall-through exit
3987 edge, we have to split that block. */
3988 if (c == bb)
3989 {
3990 bb = split_block (bb, NULL)->dest;
3991 bb->aux = c->aux;
3992 c->aux = bb;
3993 BB_FOOTER (bb) = BB_FOOTER (c);
3994 BB_FOOTER (c) = NULL;
3995 }
3996
3997 while (c->aux != bb)
3998 c = (basic_block) c->aux;
3999
4000 c->aux = bb->aux;
4001 while (c->aux)
4002 c = (basic_block) c->aux;
4003
4004 c->aux = bb;
4005 bb->aux = NULL;
4006 }
4007}
4008
4009/* In case there are more than one fallthru predecessors of exit, force that
4010 there is only one. */
4011
4012static void
4013force_one_exit_fallthru (void)
4014{
4015 edge e, predecessor = NULL;
4016 bool more = false;
4017 edge_iterator ei;
4018 basic_block forwarder, bb;
4019
fefa31b5 4020 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
78bde837
SB
4021 if (e->flags & EDGE_FALLTHRU)
4022 {
4023 if (predecessor == NULL)
4024 predecessor = e;
4025 else
4026 {
4027 more = true;
4028 break;
4029 }
4030 }
4031
4032 if (!more)
4033 return;
4034
4035 /* Exit has several fallthru predecessors. Create a forwarder block for
4036 them. */
4037 forwarder = split_edge (predecessor);
fefa31b5
DM
4038 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4039 (e = ei_safe_edge (ei)); )
78bde837
SB
4040 {
4041 if (e->src == forwarder
4042 || !(e->flags & EDGE_FALLTHRU))
4043 ei_next (&ei);
4044 else
4045 redirect_edge_and_branch_force (e, forwarder);
4046 }
4047
4048 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4049 exit block. */
11cd3bed 4050 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
4051 {
4052 if (bb->aux == NULL && bb != forwarder)
4053 {
4054 bb->aux = forwarder;
4055 break;
4056 }
4057 }
4058}
4059\f
4060/* Return true in case it is possible to duplicate the basic block BB. */
4061
4062static bool
4063cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4064{
4065 /* Do not attempt to duplicate tablejumps, as we need to unshare
4066 the dispatch table. This is difficult to do, as the instructions
4067 computing jump destination may be hoisted outside the basic block. */
4068 if (tablejump_p (BB_END (bb), NULL, NULL))
4069 return false;
4070
4071 /* Do not duplicate blocks containing insns that can't be copied. */
4072 if (targetm.cannot_copy_insn_p)
4073 {
4074 rtx insn = BB_HEAD (bb);
4075 while (1)
4076 {
4077 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4078 return false;
4079 if (insn == BB_END (bb))
4080 break;
4081 insn = NEXT_INSN (insn);
4082 }
4083 }
4084
4085 return true;
4086}
4087
4088rtx
4089duplicate_insn_chain (rtx from, rtx to)
4090{
39718607 4091 rtx insn, next, last, copy;
78bde837
SB
4092
4093 /* Avoid updating of boundaries of previous basic block. The
4094 note will get removed from insn stream in fixup. */
4095 last = emit_note (NOTE_INSN_DELETED);
4096
4097 /* Create copy at the end of INSN chain. The chain will
4098 be reordered later. */
4099 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4100 {
4101 switch (GET_CODE (insn))
4102 {
4103 case DEBUG_INSN:
4104 /* Don't duplicate label debug insns. */
4105 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4106 break;
4107 /* FALLTHRU */
4108 case INSN:
4109 case CALL_INSN:
4110 case JUMP_INSN:
78bde837
SB
4111 copy = emit_copy_of_insn_after (insn, get_last_insn ());
4112 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4113 && ANY_RETURN_P (JUMP_LABEL (insn)))
4114 JUMP_LABEL (copy) = JUMP_LABEL (insn);
4115 maybe_copy_prologue_epilogue_insn (insn, copy);
4116 break;
4117
39718607
SB
4118 case JUMP_TABLE_DATA:
4119 /* Avoid copying of dispatch tables. We never duplicate
4120 tablejumps, so this can hit only in case the table got
4121 moved far from original jump.
4122 Avoid copying following barrier as well if any
4123 (and debug insns in between). */
4124 for (next = NEXT_INSN (insn);
4125 next != NEXT_INSN (to);
4126 next = NEXT_INSN (next))
4127 if (!DEBUG_INSN_P (next))
4128 break;
4129 if (next != NEXT_INSN (to) && BARRIER_P (next))
4130 insn = next;
4131 break;
4132
78bde837
SB
4133 case CODE_LABEL:
4134 break;
4135
4136 case BARRIER:
4137 emit_barrier ();
4138 break;
4139
4140 case NOTE:
4141 switch (NOTE_KIND (insn))
4142 {
4143 /* In case prologue is empty and function contain label
4144 in first BB, we may want to copy the block. */
4145 case NOTE_INSN_PROLOGUE_END:
4146
4147 case NOTE_INSN_DELETED:
4148 case NOTE_INSN_DELETED_LABEL:
4149 case NOTE_INSN_DELETED_DEBUG_LABEL:
4150 /* No problem to strip these. */
4151 case NOTE_INSN_FUNCTION_BEG:
4152 /* There is always just single entry to function. */
4153 case NOTE_INSN_BASIC_BLOCK:
3371a64f
TJ
4154 /* We should only switch text sections once. */
4155 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
78bde837
SB
4156 break;
4157
4158 case NOTE_INSN_EPILOGUE_BEG:
78bde837
SB
4159 emit_note_copy (insn);
4160 break;
4161
4162 default:
4163 /* All other notes should have already been eliminated. */
4164 gcc_unreachable ();
4165 }
4166 break;
4167 default:
4168 gcc_unreachable ();
4169 }
4170 }
4171 insn = NEXT_INSN (last);
4172 delete_insn (last);
4173 return insn;
4174}
4175
4176/* Create a duplicate of the basic block BB. */
4177
4178static basic_block
4179cfg_layout_duplicate_bb (basic_block bb)
4180{
4181 rtx insn;
4182 basic_block new_bb;
4183
4184 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4185 new_bb = create_basic_block (insn,
4186 insn ? get_last_insn () : NULL,
fefa31b5 4187 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
78bde837
SB
4188
4189 BB_COPY_PARTITION (new_bb, bb);
4190 if (BB_HEADER (bb))
4191 {
4192 insn = BB_HEADER (bb);
4193 while (NEXT_INSN (insn))
4194 insn = NEXT_INSN (insn);
4195 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4196 if (insn)
4197 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4198 }
4199
4200 if (BB_FOOTER (bb))
4201 {
4202 insn = BB_FOOTER (bb);
4203 while (NEXT_INSN (insn))
4204 insn = NEXT_INSN (insn);
4205 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4206 if (insn)
4207 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4208 }
4209
4210 return new_bb;
4211}
4212
4213\f
4214/* Main entry point to this module - initialize the datastructures for
4215 CFG layout changes. It keeps LOOPS up-to-date if not null.
4216
4217 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4218
4219void
4220cfg_layout_initialize (unsigned int flags)
4221{
4222 rtx x;
4223 basic_block bb;
4224
4ca19309
TJ
4225 /* Once bb reordering is complete, cfg layout mode should not be re-entered.
4226 Entering cfg layout mode will perform optimizations on the cfg that
4227 could affect the bb layout negatively or even require fixups. An
4228 example of the latter is if edge forwarding performed when optimizing
4229 the cfg layout required moving a block from the hot to the cold section
4230 under -freorder-blocks-and-partition. This would create an illegal
4231 partitioning unless some manual fixup was performed. */
4232 gcc_assert (!crtl->bb_reorder_complete);
4233
78bde837
SB
4234 initialize_original_copy_tables ();
4235
4236 cfg_layout_rtl_register_cfg_hooks ();
4237
4238 record_effective_endpoints ();
4239
4240 /* Make sure that the targets of non local gotos are marked. */
4241 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
4242 {
4243 bb = BLOCK_FOR_INSN (XEXP (x, 0));
4244 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4245 }
4246
4247 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4248}
4249
4250/* Splits superblocks. */
4251void
4252break_superblocks (void)
4253{
4254 sbitmap superblocks;
4255 bool need = false;
4256 basic_block bb;
4257
8b1c6fd7 4258 superblocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
f61e445a 4259 bitmap_clear (superblocks);
78bde837 4260
11cd3bed 4261 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
4262 if (bb->flags & BB_SUPERBLOCK)
4263 {
4264 bb->flags &= ~BB_SUPERBLOCK;
d7c028c0 4265 bitmap_set_bit (superblocks, bb->index);
78bde837
SB
4266 need = true;
4267 }
4268
4269 if (need)
4270 {
4271 rebuild_jump_labels (get_insns ());
4272 find_many_sub_basic_blocks (superblocks);
4273 }
4274
4275 free (superblocks);
4276}
4277
4278/* Finalize the changes: reorder insn list according to the sequence specified
29f3fd5b 4279 by aux pointers, enter compensation code, rebuild scope forest. */
78bde837
SB
4280
4281void
4282cfg_layout_finalize (void)
4283{
4284#ifdef ENABLE_CHECKING
4285 verify_flow_info ();
4286#endif
4287 force_one_exit_fallthru ();
4288 rtl_register_cfg_hooks ();
4289 if (reload_completed
4290#ifdef HAVE_epilogue
4291 && !HAVE_epilogue
4292#endif
4293 )
4294 fixup_fallthru_exit_predecessor ();
4295 fixup_reorder_chain ();
4296
29f3fd5b
SB
4297 rebuild_jump_labels (get_insns ());
4298 delete_dead_jumptables ();
4299
78bde837
SB
4300#ifdef ENABLE_CHECKING
4301 verify_insn_chain ();
4302 verify_flow_info ();
4303#endif
4304}
4305
ba5e9aca 4306
9ee634e3 4307/* Same as split_block but update cfg_layout structures. */
f470c378
ZD
4308
4309static basic_block
d329e058 4310cfg_layout_split_block (basic_block bb, void *insnp)
9ee634e3 4311{
ae50c0cb 4312 rtx insn = (rtx) insnp;
f470c378 4313 basic_block new_bb = rtl_split_block (bb, insn);
9ee634e3 4314
bcc708fc
MM
4315 BB_FOOTER (new_bb) = BB_FOOTER (bb);
4316 BB_FOOTER (bb) = NULL;
9ee634e3 4317
f470c378 4318 return new_bb;
9ee634e3
JH
4319}
4320
9ee634e3 4321/* Redirect Edge to DEST. */
6de9cd9a 4322static edge
d329e058 4323cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
9ee634e3
JH
4324{
4325 basic_block src = e->src;
6de9cd9a 4326 edge ret;
9ee634e3 4327
bc35512f 4328 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 4329 return NULL;
bc35512f 4330
3348b696 4331 if (e->dest == dest)
6de9cd9a 4332 return e;
bc35512f 4333
fefa31b5 4334 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
6de9cd9a 4335 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
f345f21a 4336 {
6fb5fa3c 4337 df_set_bb_dirty (src);
6de9cd9a 4338 return ret;
f345f21a 4339 }
bc35512f 4340
fefa31b5 4341 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
bc35512f
JH
4342 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4343 {
c263766c
RH
4344 if (dump_file)
4345 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
bc35512f
JH
4346 e->src->index, dest->index);
4347
6fb5fa3c 4348 df_set_bb_dirty (e->src);
bc35512f 4349 redirect_edge_succ (e, dest);
6de9cd9a 4350 return e;
bc35512f
JH
4351 }
4352
9ee634e3
JH
4353 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4354 in the case the basic block appears to be in sequence. Avoid this
4355 transformation. */
4356
9ee634e3
JH
4357 if (e->flags & EDGE_FALLTHRU)
4358 {
4359 /* Redirect any branch edges unified with the fallthru one. */
4b4bf941 4360 if (JUMP_P (BB_END (src))
432f982f
JH
4361 && label_is_jump_target_p (BB_HEAD (e->dest),
4362 BB_END (src)))
9ee634e3 4363 {
341c100f 4364 edge redirected;
c22cacf3 4365
c263766c
RH
4366 if (dump_file)
4367 fprintf (dump_file, "Fallthru edge unified with branch "
432f982f
JH
4368 "%i->%i redirected to %i\n",
4369 e->src->index, e->dest->index, dest->index);
4370 e->flags &= ~EDGE_FALLTHRU;
341c100f
NS
4371 redirected = redirect_branch_edge (e, dest);
4372 gcc_assert (redirected);
0c617be4
JL
4373 redirected->flags |= EDGE_FALLTHRU;
4374 df_set_bb_dirty (redirected->src);
4375 return redirected;
9ee634e3
JH
4376 }
4377 /* In case we are redirecting fallthru edge to the branch edge
c22cacf3 4378 of conditional jump, remove it. */
628f6a4e 4379 if (EDGE_COUNT (src->succs) == 2)
9ee634e3 4380 {
03101c6f
KH
4381 /* Find the edge that is different from E. */
4382 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
628f6a4e 4383
9ee634e3 4384 if (s->dest == dest
a813c111
SB
4385 && any_condjump_p (BB_END (src))
4386 && onlyjump_p (BB_END (src)))
4387 delete_insn (BB_END (src));
9ee634e3 4388 }
c263766c 4389 if (dump_file)
dc764d10 4390 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
bc35512f 4391 e->src->index, e->dest->index, dest->index);
0c617be4 4392 ret = redirect_edge_succ_nodup (e, dest);
9ee634e3
JH
4393 }
4394 else
bc35512f 4395 ret = redirect_branch_edge (e, dest);
9ee634e3
JH
4396
4397 /* We don't want simplejumps in the insn stream during cfglayout. */
341c100f 4398 gcc_assert (!simplejump_p (BB_END (src)));
9ee634e3 4399
6fb5fa3c 4400 df_set_bb_dirty (src);
9ee634e3
JH
4401 return ret;
4402}
4403
4404/* Simple wrapper as we always can redirect fallthru edges. */
4405static basic_block
d329e058 4406cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
9ee634e3 4407{
341c100f
NS
4408 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4409
4410 gcc_assert (redirected);
9ee634e3
JH
4411 return NULL;
4412}
4413
f470c378
ZD
4414/* Same as delete_basic_block but update cfg_layout structures. */
4415
9ee634e3 4416static void
d329e058 4417cfg_layout_delete_block (basic_block bb)
9ee634e3 4418{
a813c111 4419 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
9ee634e3 4420
bcc708fc 4421 if (BB_HEADER (bb))
9ee634e3 4422 {
a813c111 4423 next = BB_HEAD (bb);
9ee634e3 4424 if (prev)
bcc708fc 4425 NEXT_INSN (prev) = BB_HEADER (bb);
9ee634e3 4426 else
bcc708fc
MM
4427 set_first_insn (BB_HEADER (bb));
4428 PREV_INSN (BB_HEADER (bb)) = prev;
4429 insn = BB_HEADER (bb);
9ee634e3
JH
4430 while (NEXT_INSN (insn))
4431 insn = NEXT_INSN (insn);
4432 NEXT_INSN (insn) = next;
4433 PREV_INSN (next) = insn;
4434 }
a813c111 4435 next = NEXT_INSN (BB_END (bb));
bcc708fc 4436 if (BB_FOOTER (bb))
9ee634e3 4437 {
bcc708fc 4438 insn = BB_FOOTER (bb);
bc35512f
JH
4439 while (insn)
4440 {
4b4bf941 4441 if (BARRIER_P (insn))
bc35512f
JH
4442 {
4443 if (PREV_INSN (insn))
4444 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4445 else
bcc708fc 4446 BB_FOOTER (bb) = NEXT_INSN (insn);
bc35512f
JH
4447 if (NEXT_INSN (insn))
4448 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4449 }
4b4bf941 4450 if (LABEL_P (insn))
bc35512f
JH
4451 break;
4452 insn = NEXT_INSN (insn);
4453 }
bcc708fc 4454 if (BB_FOOTER (bb))
bc35512f 4455 {
a813c111 4456 insn = BB_END (bb);
bcc708fc
MM
4457 NEXT_INSN (insn) = BB_FOOTER (bb);
4458 PREV_INSN (BB_FOOTER (bb)) = insn;
bc35512f
JH
4459 while (NEXT_INSN (insn))
4460 insn = NEXT_INSN (insn);
4461 NEXT_INSN (insn) = next;
4462 if (next)
4463 PREV_INSN (next) = insn;
4464 else
4465 set_last_insn (insn);
4466 }
9ee634e3 4467 }
fefa31b5 4468 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
bcc708fc 4469 to = &BB_HEADER (bb->next_bb);
9ee634e3
JH
4470 else
4471 to = &cfg_layout_function_footer;
997de8ed 4472
9ee634e3
JH
4473 rtl_delete_block (bb);
4474
4475 if (prev)
4476 prev = NEXT_INSN (prev);
d329e058 4477 else
9ee634e3
JH
4478 prev = get_insns ();
4479 if (next)
4480 next = PREV_INSN (next);
d329e058 4481 else
9ee634e3
JH
4482 next = get_last_insn ();
4483
4484 if (next && NEXT_INSN (next) != prev)
4485 {
4486 remaints = unlink_insn_chain (prev, next);
4487 insn = remaints;
4488 while (NEXT_INSN (insn))
4489 insn = NEXT_INSN (insn);
4490 NEXT_INSN (insn) = *to;
4491 if (*to)
4492 PREV_INSN (*to) = insn;
4493 *to = remaints;
4494 }
4495}
4496
beb235f8 4497/* Return true when blocks A and B can be safely merged. */
b48d0358 4498
bc35512f 4499static bool
b48d0358 4500cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
bc35512f 4501{
750054a2
CT
4502 /* If we are partitioning hot/cold basic blocks, we don't want to
4503 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
4504 and cold sections.
4505
8e8d5162 4506 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
4507 be optimizable (or blocks that appear to be mergeable), but which really
4508 must be left untouched (they are required to make it safely across
4509 partition boundaries). See the comments at the top of
8e8d5162
CT
4510 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4511
87c8b4be 4512 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 4513 return false;
750054a2 4514
7d776ee2
RG
4515 /* Protect the loop latches. */
4516 if (current_loops && b->loop_father->latch == b)
4517 return false;
4518
894a84b5
BS
4519 /* If we would end up moving B's instructions, make sure it doesn't fall
4520 through into the exit block, since we cannot recover from a fallthrough
4521 edge into the exit block occurring in the middle of a function. */
4522 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4523 {
4524 edge e = find_fallthru_edge (b->succs);
fefa31b5 4525 if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
894a84b5
BS
4526 return false;
4527 }
4528
bc35512f 4529 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
4530 return (single_succ_p (a)
4531 && single_succ (a) == b
4532 && single_pred_p (b) == 1
628f6a4e 4533 && a != b
bc35512f 4534 /* Must be simple edge. */
c5cbcccf 4535 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
fefa31b5
DM
4536 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4537 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
49ea3702
SB
4538 /* If the jump insn has side effects, we can't kill the edge.
4539 When not optimizing, try_redirect_by_replacing_jump will
4540 not allow us to redirect an edge by replacing a table jump. */
4b4bf941 4541 && (!JUMP_P (BB_END (a))
49ea3702 4542 || ((!optimize || reload_completed)
a813c111 4543 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f
JH
4544}
4545
41806d92
NS
4546/* Merge block A and B. The blocks must be mergeable. */
4547
bc35512f
JH
4548static void
4549cfg_layout_merge_blocks (basic_block a, basic_block b)
4550{
50a36e42 4551 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
f9df6f16 4552 rtx insn;
50a36e42 4553
77a74ed7 4554 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
bc35512f 4555
6fb5fa3c 4556 if (dump_file)
50a36e42
EB
4557 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4558 a->index);
6fb5fa3c 4559
bc35512f 4560 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 4561 if (LABEL_P (BB_HEAD (b)))
2c97f8e4 4562 {
2c97f8e4
RH
4563 delete_insn (BB_HEAD (b));
4564 }
bc35512f
JH
4565
4566 /* We should have fallthru edge in a, or we can do dummy redirection to get
4567 it cleaned up. */
4b4bf941 4568 if (JUMP_P (BB_END (a)))
628f6a4e 4569 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
341c100f 4570 gcc_assert (!JUMP_P (BB_END (a)));
bc35512f 4571
9be94227 4572 /* When not optimizing CFG and the edge is the only place in RTL which holds
7241571e 4573 some unique locus, emit a nop with that locus in between. */
9be94227
EB
4574 if (!optimize)
4575 emit_nop_for_unique_locus_between (a, b);
7241571e 4576
4c8af858
SB
4577 /* Move things from b->footer after a->footer. */
4578 if (BB_FOOTER (b))
bc35512f 4579 {
4c8af858
SB
4580 if (!BB_FOOTER (a))
4581 BB_FOOTER (a) = BB_FOOTER (b);
4582 else
4583 {
4584 rtx last = BB_FOOTER (a);
4585
4586 while (NEXT_INSN (last))
4587 last = NEXT_INSN (last);
4588 NEXT_INSN (last) = BB_FOOTER (b);
4589 PREV_INSN (BB_FOOTER (b)) = last;
4590 }
4591 BB_FOOTER (b) = NULL;
4592 }
4593
4594 /* Move things from b->header before a->footer.
4595 Note that this may include dead tablejump data, but we don't clean
4596 those up until we go out of cfglayout mode. */
4597 if (BB_HEADER (b))
4598 {
4599 if (! BB_FOOTER (a))
4600 BB_FOOTER (a) = BB_HEADER (b);
4601 else
4602 {
4603 rtx last = BB_HEADER (b);
4604
4605 while (NEXT_INSN (last))
4606 last = NEXT_INSN (last);
4607 NEXT_INSN (last) = BB_FOOTER (a);
4608 PREV_INSN (BB_FOOTER (a)) = last;
4609 BB_FOOTER (a) = BB_HEADER (b);
4610 }
bcc708fc 4611 BB_HEADER (b) = NULL;
bc35512f
JH
4612 }
4613
4614 /* In the case basic blocks are not adjacent, move them around. */
a813c111 4615 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
bc35512f 4616 {
f9df6f16 4617 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
6c3d0e31 4618
f9df6f16 4619 emit_insn_after_noloc (insn, BB_END (a), a);
bc35512f
JH
4620 }
4621 /* Otherwise just re-associate the instructions. */
4622 else
4623 {
a813c111 4624 insn = BB_HEAD (b);
a813c111 4625 BB_END (a) = BB_END (b);
bc35512f
JH
4626 }
4627
f9df6f16
JJ
4628 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4629 We need to explicitly call. */
4630 update_bb_for_insn_chain (insn, BB_END (b), a);
4631
4632 /* Skip possible DELETED_LABEL insn. */
4633 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4634 insn = NEXT_INSN (insn);
4635 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4c8af858 4636 BB_HEAD (b) = BB_END (b) = NULL;
f9df6f16
JJ
4637 delete_insn (insn);
4638
6fb5fa3c
DB
4639 df_bb_delete (b->index);
4640
50a36e42 4641 /* If B was a forwarder block, propagate the locus on the edge. */
2f13f2de 4642 if (forwarder_p
fbc68f2a 4643 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
50a36e42
EB
4644 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4645
c263766c 4646 if (dump_file)
50a36e42 4647 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
bc35512f
JH
4648}
4649
4650/* Split edge E. */
f470c378 4651
bc35512f
JH
4652static basic_block
4653cfg_layout_split_edge (edge e)
4654{
bc35512f 4655 basic_block new_bb =
fefa31b5 4656 create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
a813c111 4657 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
bc35512f
JH
4658 NULL_RTX, e->src);
4659
fefa31b5 4660 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3e7eb734
JJ
4661 BB_COPY_PARTITION (new_bb, e->src);
4662 else
4663 BB_COPY_PARTITION (new_bb, e->dest);
a9b2ee88 4664 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
bc35512f
JH
4665 redirect_edge_and_branch_force (e, new_bb);
4666
4667 return new_bb;
4668}
4669
f470c378
ZD
4670/* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4671
4672static void
4673rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4674{
4675}
4676
df92c640
SB
4677/* Return true if BB contains only labels or non-executable
4678 instructions. */
4679
4680static bool
4681rtl_block_empty_p (basic_block bb)
4682{
4683 rtx insn;
4684
fefa31b5
DM
4685 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4686 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
df92c640
SB
4687 return true;
4688
4689 FOR_BB_INSNS (bb, insn)
4690 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4691 return false;
4692
4693 return true;
4694}
4695
4696/* Split a basic block if it ends with a conditional branch and if
4697 the other part of the block is not empty. */
4698
4699static basic_block
4700rtl_split_block_before_cond_jump (basic_block bb)
4701{
4702 rtx insn;
4703 rtx split_point = NULL;
4704 rtx last = NULL;
4705 bool found_code = false;
4706
4707 FOR_BB_INSNS (bb, insn)
4708 {
4709 if (any_condjump_p (insn))
4710 split_point = last;
4711 else if (NONDEBUG_INSN_P (insn))
4712 found_code = true;
4713 last = insn;
4714 }
4715
4716 /* Did not find everything. */
4717 if (found_code && split_point)
4718 return split_block (bb, split_point)->dest;
4719 else
4720 return NULL;
4721}
4722
6de9cd9a
DN
4723/* Return 1 if BB ends with a call, possibly followed by some
4724 instructions that must stay with the call, 0 otherwise. */
4725
4726static bool
b48d0358 4727rtl_block_ends_with_call_p (basic_block bb)
6de9cd9a
DN
4728{
4729 rtx insn = BB_END (bb);
4730
4b4bf941 4731 while (!CALL_P (insn)
6de9cd9a 4732 && insn != BB_HEAD (bb)
92ddef69 4733 && (keep_with_call_p (insn)
b5b8b0ac
AO
4734 || NOTE_P (insn)
4735 || DEBUG_INSN_P (insn)))
6de9cd9a 4736 insn = PREV_INSN (insn);
4b4bf941 4737 return (CALL_P (insn));
6de9cd9a
DN
4738}
4739
4740/* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4741
4742static bool
9678086d 4743rtl_block_ends_with_condjump_p (const_basic_block bb)
6de9cd9a
DN
4744{
4745 return any_condjump_p (BB_END (bb));
4746}
4747
4748/* Return true if we need to add fake edge to exit.
4749 Helper function for rtl_flow_call_edges_add. */
4750
4751static bool
9678086d 4752need_fake_edge_p (const_rtx insn)
6de9cd9a
DN
4753{
4754 if (!INSN_P (insn))
4755 return false;
4756
4b4bf941 4757 if ((CALL_P (insn)
6de9cd9a
DN
4758 && !SIBLING_CALL_P (insn)
4759 && !find_reg_note (insn, REG_NORETURN, NULL)
becfd6e5 4760 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
6de9cd9a
DN
4761 return true;
4762
4763 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4764 && MEM_VOLATILE_P (PATTERN (insn)))
4765 || (GET_CODE (PATTERN (insn)) == PARALLEL
4766 && asm_noperands (insn) != -1
4767 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4768 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4769}
4770
4771/* Add fake edges to the function exit for any non constant and non noreturn
4772 calls, volatile inline assembly in the bitmap of blocks specified by
4773 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4774 that were split.
4775
4776 The goal is to expose cases in which entering a basic block does not imply
4777 that all subsequent instructions must be executed. */
4778
4779static int
4780rtl_flow_call_edges_add (sbitmap blocks)
4781{
4782 int i;
4783 int blocks_split = 0;
8b1c6fd7 4784 int last_bb = last_basic_block_for_fn (cfun);
6de9cd9a
DN
4785 bool check_last_block = false;
4786
0cae8d31 4787 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
6de9cd9a
DN
4788 return 0;
4789
4790 if (! blocks)
4791 check_last_block = true;
4792 else
fefa31b5
DM
4793 check_last_block = bitmap_bit_p (blocks,
4794 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
6de9cd9a
DN
4795
4796 /* In the last basic block, before epilogue generation, there will be
4797 a fallthru edge to EXIT. Special care is required if the last insn
4798 of the last basic block is a call because make_edge folds duplicate
4799 edges, which would result in the fallthru edge also being marked
4800 fake, which would result in the fallthru edge being removed by
4801 remove_fake_edges, which would result in an invalid CFG.
4802
4803 Moreover, we can't elide the outgoing fake edge, since the block
4804 profiler needs to take this into account in order to solve the minimal
4805 spanning tree in the case that the call doesn't return.
4806
4807 Handle this by adding a dummy instruction in a new last basic block. */
4808 if (check_last_block)
4809 {
fefa31b5 4810 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
6de9cd9a
DN
4811 rtx insn = BB_END (bb);
4812
4813 /* Back up past insns that must be kept in the same block as a call. */
4814 while (insn != BB_HEAD (bb)
4815 && keep_with_call_p (insn))
4816 insn = PREV_INSN (insn);
4817
4818 if (need_fake_edge_p (insn))
4819 {
4820 edge e;
4821
fefa31b5 4822 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
9ff3d2de
JL
4823 if (e)
4824 {
c41c1387 4825 insert_insn_on_edge (gen_use (const0_rtx), e);
9ff3d2de
JL
4826 commit_edge_insertions ();
4827 }
6de9cd9a
DN
4828 }
4829 }
4830
4831 /* Now add fake edges to the function exit for any non constant
4832 calls since there is no way that we can determine if they will
4833 return or not... */
4834
24bd1a0b 4835 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
6de9cd9a 4836 {
06e28de2 4837 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
6de9cd9a
DN
4838 rtx insn;
4839 rtx prev_insn;
4840
4841 if (!bb)
4842 continue;
4843
d7c028c0 4844 if (blocks && !bitmap_bit_p (blocks, i))
6de9cd9a
DN
4845 continue;
4846
4847 for (insn = BB_END (bb); ; insn = prev_insn)
4848 {
4849 prev_insn = PREV_INSN (insn);
4850 if (need_fake_edge_p (insn))
4851 {
4852 edge e;
4853 rtx split_at_insn = insn;
4854
4855 /* Don't split the block between a call and an insn that should
c22cacf3 4856 remain in the same block as the call. */
4b4bf941 4857 if (CALL_P (insn))
6de9cd9a
DN
4858 while (split_at_insn != BB_END (bb)
4859 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4860 split_at_insn = NEXT_INSN (split_at_insn);
4861
4862 /* The handling above of the final block before the epilogue
c22cacf3 4863 should be enough to verify that there is no edge to the exit
6de9cd9a
DN
4864 block in CFG already. Calling make_edge in such case would
4865 cause us to mark that edge as fake and remove it later. */
4866
4867#ifdef ENABLE_CHECKING
4868 if (split_at_insn == BB_END (bb))
628f6a4e 4869 {
fefa31b5 4870 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
9ff3d2de 4871 gcc_assert (e == NULL);
628f6a4e 4872 }
6de9cd9a
DN
4873#endif
4874
4875 /* Note that the following may create a new basic block
4876 and renumber the existing basic blocks. */
4877 if (split_at_insn != BB_END (bb))
4878 {
4879 e = split_block (bb, split_at_insn);
4880 if (e)
4881 blocks_split++;
4882 }
4883
fefa31b5 4884 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
6de9cd9a
DN
4885 }
4886
4887 if (insn == BB_HEAD (bb))
4888 break;
4889 }
4890 }
4891
4892 if (blocks_split)
4893 verify_flow_info ();
4894
4895 return blocks_split;
4896}
4897
1cb7dfc3 4898/* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
315682fb 4899 the conditional branch target, SECOND_HEAD should be the fall-thru
1cb7dfc3
MH
4900 there is no need to handle this here the loop versioning code handles
4901 this. the reason for SECON_HEAD is that it is needed for condition
4902 in trees, and this should be of the same type since it is a hook. */
4903static void
4904rtl_lv_add_condition_to_bb (basic_block first_head ,
c22cacf3
MS
4905 basic_block second_head ATTRIBUTE_UNUSED,
4906 basic_block cond_bb, void *comp_rtx)
1cb7dfc3
MH
4907{
4908 rtx label, seq, jump;
4909 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4910 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4911 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4912 enum machine_mode mode;
4913
4914
4915 label = block_label (first_head);
4916 mode = GET_MODE (op0);
4917 if (mode == VOIDmode)
4918 mode = GET_MODE (op1);
4919
4920 start_sequence ();
4921 op0 = force_operand (op0, NULL_RTX);
4922 op1 = force_operand (op1, NULL_RTX);
4923 do_compare_rtx_and_jump (op0, op1, comp, 0,
40e90eac 4924 mode, NULL_RTX, NULL_RTX, label, -1);
1cb7dfc3
MH
4925 jump = get_last_insn ();
4926 JUMP_LABEL (jump) = label;
4927 LABEL_NUSES (label)++;
4928 seq = get_insns ();
4929 end_sequence ();
4930
4931 /* Add the new cond , in the new head. */
c3284718 4932 emit_insn_after (seq, BB_END (cond_bb));
1cb7dfc3
MH
4933}
4934
4935
4936/* Given a block B with unconditional branch at its end, get the
4937 store the return the branch edge and the fall-thru edge in
4938 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4939static void
4940rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4941 edge *fallthru_edge)
4942{
4943 edge e = EDGE_SUCC (b, 0);
4944
4945 if (e->flags & EDGE_FALLTHRU)
4946 {
4947 *fallthru_edge = e;
4948 *branch_edge = EDGE_SUCC (b, 1);
4949 }
4950 else
4951 {
4952 *branch_edge = e;
4953 *fallthru_edge = EDGE_SUCC (b, 1);
4954 }
4955}
4956
5e2d947c
JH
4957void
4958init_rtl_bb_info (basic_block bb)
4959{
bcc708fc
MM
4960 gcc_assert (!bb->il.x.rtl);
4961 bb->il.x.head_ = NULL;
4962 bb->il.x.rtl = ggc_alloc_cleared_rtl_bb_info ();
5e2d947c
JH
4963}
4964
14fa2cc0
ZD
4965/* Returns true if it is possible to remove edge E by redirecting
4966 it to the destination of the other edge from E->src. */
4967
4968static bool
9678086d 4969rtl_can_remove_branch_p (const_edge e)
14fa2cc0 4970{
9678086d
KG
4971 const_basic_block src = e->src;
4972 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
4973 const_rtx insn = BB_END (src), set;
14fa2cc0
ZD
4974
4975 /* The conditions are taken from try_redirect_by_replacing_jump. */
fefa31b5 4976 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
14fa2cc0
ZD
4977 return false;
4978
4979 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4980 return false;
4981
3371a64f 4982 if (BB_PARTITION (src) != BB_PARTITION (target))
14fa2cc0
ZD
4983 return false;
4984
4985 if (!onlyjump_p (insn)
4986 || tablejump_p (insn, NULL, NULL))
4987 return false;
4988
4989 set = single_set (insn);
4990 if (!set || side_effects_p (set))
4991 return false;
4992
4993 return true;
4994}
4995
ffe14686
AM
4996static basic_block
4997rtl_duplicate_bb (basic_block bb)
4998{
4999 bb = cfg_layout_duplicate_bb (bb);
5000 bb->aux = NULL;
5001 return bb;
5002}
5003
aa4723d7
SB
5004/* Do book-keeping of basic block BB for the profile consistency checker.
5005 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
5006 then do post-pass accounting. Store the counting in RECORD. */
5007static void
5008rtl_account_profile_record (basic_block bb, int after_pass,
5009 struct profile_record *record)
5010{
5011 rtx insn;
5012 FOR_BB_INSNS (bb, insn)
5013 if (INSN_P (insn))
5014 {
5015 record->size[after_pass]
5016 += insn_rtx_cost (PATTERN (insn), false);
0a6a6ac9 5017 if (profile_status_for_fn (cfun) == PROFILE_READ)
aa4723d7
SB
5018 record->time[after_pass]
5019 += insn_rtx_cost (PATTERN (insn), true) * bb->count;
0a6a6ac9 5020 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
aa4723d7
SB
5021 record->time[after_pass]
5022 += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
5023 }
5024}
5025
9ee634e3
JH
5026/* Implementation of CFG manipulation for linearized RTL. */
5027struct cfg_hooks rtl_cfg_hooks = {
f470c378 5028 "rtl",
9ee634e3 5029 rtl_verify_flow_info,
10e9fecc 5030 rtl_dump_bb,
2c895bd1 5031 rtl_dump_bb_for_graph,
bc35512f 5032 rtl_create_basic_block,
9ee634e3
JH
5033 rtl_redirect_edge_and_branch,
5034 rtl_redirect_edge_and_branch_force,
14fa2cc0 5035 rtl_can_remove_branch_p,
9ee634e3
JH
5036 rtl_delete_block,
5037 rtl_split_block,
f470c378 5038 rtl_move_block_after,
bc35512f
JH
5039 rtl_can_merge_blocks, /* can_merge_blocks_p */
5040 rtl_merge_blocks,
6de9cd9a
DN
5041 rtl_predict_edge,
5042 rtl_predicted_by_p,
ffe14686
AM
5043 cfg_layout_can_duplicate_bb_p,
5044 rtl_duplicate_bb,
f470c378
ZD
5045 rtl_split_edge,
5046 rtl_make_forwarder_block,
6de9cd9a 5047 rtl_tidy_fallthru_edge,
cf103ca4 5048 rtl_force_nonfallthru,
6de9cd9a
DN
5049 rtl_block_ends_with_call_p,
5050 rtl_block_ends_with_condjump_p,
d9d4706f
KH
5051 rtl_flow_call_edges_add,
5052 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
5053 NULL, /* execute_on_shrinking_pred */
5054 NULL, /* duplicate loop for trees */
5055 NULL, /* lv_add_condition_to_bb */
5056 NULL, /* lv_adjust_loop_header_phi*/
5057 NULL, /* extract_cond_bb_edges */
df92c640
SB
5058 NULL, /* flush_pending_stmts */
5059 rtl_block_empty_p, /* block_empty_p */
5060 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
aa4723d7 5061 rtl_account_profile_record,
9ee634e3
JH
5062};
5063
5064/* Implementation of CFG manipulation for cfg layout RTL, where
5065 basic block connected via fallthru edges does not have to be adjacent.
5066 This representation will hopefully become the default one in future
5067 version of the compiler. */
6de9cd9a 5068
9ee634e3 5069struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
f470c378 5070 "cfglayout mode",
bc35512f 5071 rtl_verify_flow_info_1,
10e9fecc 5072 rtl_dump_bb,
2c895bd1 5073 rtl_dump_bb_for_graph,
bc35512f 5074 cfg_layout_create_basic_block,
9ee634e3
JH
5075 cfg_layout_redirect_edge_and_branch,
5076 cfg_layout_redirect_edge_and_branch_force,
14fa2cc0 5077 rtl_can_remove_branch_p,
9ee634e3
JH
5078 cfg_layout_delete_block,
5079 cfg_layout_split_block,
f470c378 5080 rtl_move_block_after,
bc35512f
JH
5081 cfg_layout_can_merge_blocks_p,
5082 cfg_layout_merge_blocks,
6de9cd9a
DN
5083 rtl_predict_edge,
5084 rtl_predicted_by_p,
5085 cfg_layout_can_duplicate_bb_p,
5086 cfg_layout_duplicate_bb,
f470c378
ZD
5087 cfg_layout_split_edge,
5088 rtl_make_forwarder_block,
cf103ca4
EB
5089 NULL, /* tidy_fallthru_edge */
5090 rtl_force_nonfallthru,
6de9cd9a
DN
5091 rtl_block_ends_with_call_p,
5092 rtl_block_ends_with_condjump_p,
d9d4706f
KH
5093 rtl_flow_call_edges_add,
5094 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
5095 NULL, /* execute_on_shrinking_pred */
5096 duplicate_loop_to_header_edge, /* duplicate loop for trees */
5097 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5098 NULL, /* lv_adjust_loop_header_phi*/
5099 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
df92c640
SB
5100 NULL, /* flush_pending_stmts */
5101 rtl_block_empty_p, /* block_empty_p */
5102 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
aa4723d7 5103 rtl_account_profile_record,
9ee634e3 5104};
78bde837
SB
5105
5106#include "gt-cfgrtl.h"