]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgrtl.c
[AArch64] Fix PR81800
[thirdparty/gcc.git] / gcc / cfgrtl.c
CommitLineData
ca6c03ca 1/* Control flow graph manipulation code for GNU compiler.
a5544970 2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
ca6c03ca
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
ca6c03ca
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ca6c03ca 19
5f0d2358
RK
20/* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
ca6c03ca
JH
22
23 Available functionality:
bc35512f 24 - Basic CFG/RTL manipulation API documented in cfghooks.h
5f0d2358 25 - CFG-aware instruction chain manipulation
ca6c03ca 26 delete_insn, delete_insn_chain
bc35512f
JH
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
ba5e9aca
EB
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
bc35512f
JH
33
34 Functions not supposed for generic use:
5f0d2358 35 - Infrastructure to determine quickly basic block for insn
ca6c03ca 36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
5f0d2358 37 - Edge redirection with updating and optimizing of insn chain
bc35512f 38 block_label, tidy_fallthru_edge, force_nonfallthru */
ca6c03ca
JH
39\f
40#include "config.h"
41#include "system.h"
4977bab6 42#include "coretypes.h"
c7131fb2 43#include "backend.h"
957060b5 44#include "target.h"
c7131fb2 45#include "rtl.h"
957060b5
AM
46#include "tree.h"
47#include "cfghooks.h"
c7131fb2 48#include "df.h"
957060b5 49#include "insn-config.h"
4d0cdd0c 50#include "memmodel.h"
957060b5 51#include "emit-rtl.h"
60393bbc
AM
52#include "cfgrtl.h"
53#include "cfganal.h"
54#include "cfgbuild.h"
55#include "cfgcleanup.h"
60393bbc 56#include "bb-reorder.h"
0cbd9993 57#include "rtl-error.h"
2a34bece 58#include "insn-attr.h"
36566b39 59#include "dojump.h"
ff25ef99 60#include "expr.h"
1cb7dfc3 61#include "cfgloop.h"
ef330312 62#include "tree-pass.h"
013a8899 63#include "print-rtl.h"
ca6c03ca 64
0ecf545c
MS
65/* Disable warnings about missing quoting in GCC diagnostics. */
66#if __GNUC__ >= 10
67# pragma GCC diagnostic push
68# pragma GCC diagnostic ignored "-Wformat-diag"
69#endif
70
78bde837
SB
71/* Holds the interesting leading and trailing notes for the function.
72 Only applicable if the CFG is in cfglayout mode. */
1130d5e3 73static GTY(()) rtx_insn *cfg_layout_function_footer;
9152e0aa 74static GTY(()) rtx_insn *cfg_layout_function_header;
78bde837 75
8879d71c 76static rtx_insn *skip_insns_after_block (basic_block);
78bde837 77static void record_effective_endpoints (void);
78bde837
SB
78static void fixup_reorder_chain (void);
79
80void verify_insn_chain (void);
81static void fixup_fallthru_exit_predecessor (void);
8879d71c
DM
82static int can_delete_note_p (const rtx_note *);
83static int can_delete_label_p (const rtx_code_label *);
d329e058 84static basic_block rtl_split_edge (edge);
f470c378 85static bool rtl_move_block_after (basic_block, basic_block);
d329e058 86static int rtl_verify_flow_info (void);
f470c378 87static basic_block cfg_layout_split_block (basic_block, void *);
6de9cd9a 88static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
d329e058
AJ
89static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
90static void cfg_layout_delete_block (basic_block);
91static void rtl_delete_block (basic_block);
92static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
6de9cd9a 93static edge rtl_redirect_edge_and_branch (edge, basic_block);
f470c378 94static basic_block rtl_split_block (basic_block, void *);
1a817418 95static void rtl_dump_bb (FILE *, basic_block, int, dump_flags_t);
d329e058 96static int rtl_verify_flow_info_1 (void);
f470c378 97static void rtl_make_forwarder_block (edge);
ca6c03ca
JH
98\f
99/* Return true if NOTE is not one of the ones that must be kept paired,
5f0d2358 100 so that we may simply delete it. */
ca6c03ca
JH
101
102static int
8879d71c 103can_delete_note_p (const rtx_note *note)
ca6c03ca 104{
cd9c1ca8
RH
105 switch (NOTE_KIND (note))
106 {
107 case NOTE_INSN_DELETED:
108 case NOTE_INSN_BASIC_BLOCK:
109 case NOTE_INSN_EPILOGUE_BEG:
110 return true;
111
112 default:
113 return false;
114 }
ca6c03ca
JH
115}
116
117/* True if a given label can be deleted. */
118
119static int
8879d71c 120can_delete_label_p (const rtx_code_label *label)
ca6c03ca 121{
5f0d2358
RK
122 return (!LABEL_PRESERVE_P (label)
123 /* User declared labels must be preserved. */
124 && LABEL_NAME (label) == 0
6f7eba34
TS
125 && !vec_safe_contains<rtx_insn *> (forced_labels,
126 const_cast<rtx_code_label *> (label)));
ca6c03ca
JH
127}
128
03fbe718 129/* Delete INSN by patching it out. */
ca6c03ca 130
03fbe718 131void
a29f6a2b 132delete_insn (rtx_insn *insn)
ca6c03ca 133{
ca6c03ca
JH
134 rtx note;
135 bool really_delete = true;
136
4b4bf941 137 if (LABEL_P (insn))
ca6c03ca
JH
138 {
139 /* Some labels can't be directly removed from the INSN chain, as they
c22cacf3
MS
140 might be references via variables, constant pool etc.
141 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
8879d71c 142 if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
ca6c03ca
JH
143 {
144 const char *name = LABEL_NAME (insn);
5e5df392 145 basic_block bb = BLOCK_FOR_INSN (insn);
8879d71c 146 rtx_insn *bb_note = NEXT_INSN (insn);
ca6c03ca
JH
147
148 really_delete = false;
149 PUT_CODE (insn, NOTE);
a38e7aa5 150 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
6773e15f 151 NOTE_DELETED_LABEL_NAME (insn) = name;
03fbe718 152
0f33baa9 153 /* If the note following the label starts a basic block, and the
5e5df392 154 label is a member of the same basic block, interchange the two. */
0f33baa9
TV
155 if (bb_note != NULL_RTX
156 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
5e5df392
TV
157 && bb != NULL
158 && bb == BLOCK_FOR_INSN (bb_note))
03fbe718
TV
159 {
160 reorder_insns_nobb (insn, insn, bb_note);
1130d5e3 161 BB_HEAD (bb) = bb_note;
03fbe718 162 if (BB_END (bb) == bb_note)
1130d5e3 163 BB_END (bb) = insn;
03fbe718 164 }
ca6c03ca 165 }
5f0d2358 166
b5241a5a 167 remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
ca6c03ca
JH
168 }
169
170 if (really_delete)
171 {
cda94cbb 172 /* If this insn has already been deleted, something is very wrong. */
4654c0cf 173 gcc_assert (!insn->deleted ());
1f397f45
SB
174 if (INSN_P (insn))
175 df_insn_delete (insn);
ca6c03ca 176 remove_insn (insn);
4654c0cf 177 insn->set_deleted ();
ca6c03ca
JH
178 }
179
180 /* If deleting a jump, decrement the use count of the label. Deleting
181 the label itself should happen in the normal course of block merging. */
cf7c4aa6 182 if (JUMP_P (insn))
9295a326 183 {
cf7c4aa6
HPN
184 if (JUMP_LABEL (insn)
185 && LABEL_P (JUMP_LABEL (insn)))
186 LABEL_NUSES (JUMP_LABEL (insn))--;
187
188 /* If there are more targets, remove them too. */
189 while ((note
190 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
4b4bf941 191 && LABEL_P (XEXP (note, 0)))
9295a326
JZ
192 {
193 LABEL_NUSES (XEXP (note, 0))--;
194 remove_note (insn, note);
195 }
196 }
ca6c03ca 197
cf7c4aa6
HPN
198 /* Also if deleting any insn that references a label as an operand. */
199 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
200 && LABEL_P (XEXP (note, 0)))
201 {
202 LABEL_NUSES (XEXP (note, 0))--;
203 remove_note (insn, note);
204 }
205
95c43227 206 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
ca6c03ca 207 {
95c43227
DM
208 rtvec vec = table->get_labels ();
209 int len = GET_NUM_ELEM (vec);
ca6c03ca
JH
210 int i;
211
212 for (i = 0; i < len; i++)
a124fcda 213 {
95c43227 214 rtx label = XEXP (RTVEC_ELT (vec, i), 0);
a124fcda
RH
215
216 /* When deleting code in bulk (e.g. removing many unreachable
217 blocks) we can delete a label that's a target of the vector
218 before deleting the vector itself. */
4b4bf941 219 if (!NOTE_P (label))
a124fcda
RH
220 LABEL_NUSES (label)--;
221 }
ca6c03ca 222 }
ca6c03ca
JH
223}
224
98ccd1d7
IE
225/* Like delete_insn but also purge dead edges from BB.
226 Return true if any edges are eliminated. */
9ed7b221 227
98ccd1d7 228bool
373fc786 229delete_insn_and_edges (rtx_insn *insn)
3dec4024 230{
3dec4024
JH
231 bool purge = false;
232
ba4f7968 233 if (INSN_P (insn)
3dec4024 234 && BLOCK_FOR_INSN (insn)
a813c111 235 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
3dec4024 236 purge = true;
03fbe718 237 delete_insn (insn);
3dec4024 238 if (purge)
98ccd1d7
IE
239 return purge_dead_edges (BLOCK_FOR_INSN (insn));
240 return false;
3dec4024
JH
241}
242
ca6c03ca 243/* Unlink a chain of insns between START and FINISH, leaving notes
a7b87f73
ZD
244 that must be paired. If CLEAR_BB is true, we set bb field for
245 insns that cannot be removed to NULL. */
ca6c03ca
JH
246
247void
2d93cd20 248delete_insn_chain (rtx start, rtx_insn *finish, bool clear_bb)
ca6c03ca 249{
5f0d2358
RK
250 /* Unchain the insns one by one. It would be quicker to delete all of these
251 with a single unchaining, rather than one at a time, but we need to keep
252 the NOTE's. */
2d93cd20 253 rtx_insn *current = finish;
ca6c03ca
JH
254 while (1)
255 {
2d93cd20 256 rtx_insn *prev = PREV_INSN (current);
8879d71c 257 if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
ca6c03ca
JH
258 ;
259 else
03fbe718 260 delete_insn (current);
ca6c03ca 261
4654c0cf 262 if (clear_bb && !current->deleted ())
03fbe718 263 set_block_for_insn (current, NULL);
a7b87f73 264
03fbe718 265 if (current == start)
ca6c03ca 266 break;
03fbe718 267 current = prev;
ca6c03ca
JH
268 }
269}
270\f
5f0d2358
RK
271/* Create a new basic block consisting of the instructions between HEAD and END
272 inclusive. This function is designed to allow fast BB construction - reuses
273 the note and basic block struct in BB_NOTE, if any and do not grow
274 BASIC_BLOCK chain and should be used directly only by CFG construction code.
275 END can be NULL in to create new empty basic block before HEAD. Both END
918ed612
ZD
276 and HEAD can be NULL to create basic block at the end of INSN chain.
277 AFTER is the basic block we should be put after. */
ca6c03ca
JH
278
279basic_block
e00022e9 280create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
66e8df53 281 basic_block after)
ca6c03ca
JH
282{
283 basic_block bb;
284
285 if (bb_note
ca6c03ca
JH
286 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
287 && bb->aux == NULL)
288 {
289 /* If we found an existing note, thread it back onto the chain. */
290
fee3e72c 291 rtx_insn *after;
ca6c03ca 292
4b4bf941 293 if (LABEL_P (head))
ca6c03ca
JH
294 after = head;
295 else
296 {
297 after = PREV_INSN (head);
298 head = bb_note;
299 }
300
301 if (after != bb_note && NEXT_INSN (after) != bb_note)
ba4f7968 302 reorder_insns_nobb (bb_note, bb_note, after);
ca6c03ca
JH
303 }
304 else
305 {
306 /* Otherwise we must create a note and a basic block structure. */
307
308 bb = alloc_block ();
309
5e2d947c 310 init_rtl_bb_info (bb);
ca6c03ca 311 if (!head && !end)
5f0d2358
RK
312 head = end = bb_note
313 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
4b4bf941 314 else if (LABEL_P (head) && end)
ca6c03ca
JH
315 {
316 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
317 if (head == end)
318 end = bb_note;
319 }
320 else
321 {
322 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
323 head = bb_note;
324 if (!end)
325 end = head;
326 }
5f0d2358 327
ca6c03ca
JH
328 NOTE_BASIC_BLOCK (bb_note) = bb;
329 }
330
331 /* Always include the bb note in the block. */
332 if (NEXT_INSN (end) == bb_note)
333 end = bb_note;
334
1130d5e3
DM
335 BB_HEAD (bb) = head;
336 BB_END (bb) = end;
8b1c6fd7 337 bb->index = last_basic_block_for_fn (cfun)++;
5e2d947c 338 bb->flags = BB_NEW | BB_RTL;
918ed612 339 link_block (bb, after);
557c4b49 340 SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
6fb5fa3c 341 df_bb_refs_record (bb->index, false);
ba4f7968 342 update_bb_for_insn (bb);
076c7ab8 343 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
ca6c03ca
JH
344
345 /* Tag the block so that we know it has been used when considering
346 other basic block notes. */
347 bb->aux = bb;
348
349 return bb;
350}
351
5f0d2358 352/* Create new basic block consisting of instructions in between HEAD and END
ffe14686
AM
353 and place it to the BB chain after block AFTER. END can be NULL to
354 create a new empty basic block before HEAD. Both END and HEAD can be
355 NULL to create basic block at the end of INSN chain. */
ca6c03ca 356
bc35512f
JH
357static basic_block
358rtl_create_basic_block (void *headp, void *endp, basic_block after)
ca6c03ca 359{
e00022e9
DM
360 rtx_insn *head = (rtx_insn *) headp;
361 rtx_insn *end = (rtx_insn *) endp;
ca6c03ca 362 basic_block bb;
0b17ab2f 363
7eca0767 364 /* Grow the basic block array if needed. */
8b1c6fd7
DM
365 if ((size_t) last_basic_block_for_fn (cfun)
366 >= basic_block_info_for_fn (cfun)->length ())
7eca0767 367 {
8b1c6fd7
DM
368 size_t new_size =
369 (last_basic_block_for_fn (cfun)
370 + (last_basic_block_for_fn (cfun) + 3) / 4);
c5f05f68 371 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
7eca0767 372 }
0b17ab2f 373
0cae8d31 374 n_basic_blocks_for_fn (cfun)++;
ca6c03ca 375
852c6ec7 376 bb = create_basic_block_structure (head, end, NULL, after);
ca6c03ca
JH
377 bb->aux = NULL;
378 return bb;
379}
bc35512f
JH
380
381static basic_block
382cfg_layout_create_basic_block (void *head, void *end, basic_block after)
383{
384 basic_block newbb = rtl_create_basic_block (head, end, after);
385
bc35512f
JH
386 return newbb;
387}
ca6c03ca
JH
388\f
389/* Delete the insns in a (non-live) block. We physically delete every
390 non-deleted-note insn, and update the flow graph appropriately.
391
392 Return nonzero if we deleted an exception handler. */
393
394/* ??? Preserving all such notes strikes me as wrong. It would be nice
395 to post-process the stream to remove empty blocks, loops, ranges, etc. */
396
f0fda11c 397static void
d329e058 398rtl_delete_block (basic_block b)
ca6c03ca 399{
8879d71c 400 rtx_insn *insn, *end;
ca6c03ca
JH
401
402 /* If the head of this block is a CODE_LABEL, then it might be the
f39e46ba
SB
403 label for an exception handler which can't be reached. We need
404 to remove the label from the exception_handler_label list. */
a813c111 405 insn = BB_HEAD (b);
ca6c03ca 406
96370780 407 end = get_last_bb_insn (b);
ca6c03ca
JH
408
409 /* Selectively delete the entire chain. */
1130d5e3 410 BB_HEAD (b) = NULL;
a7b87f73
ZD
411 delete_insn_chain (insn, end, true);
412
6fb5fa3c
DB
413
414 if (dump_file)
415 fprintf (dump_file, "deleting block %d\n", b->index);
416 df_bb_delete (b->index);
ca6c03ca
JH
417}
418\f
852c6ec7 419/* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
ca6c03ca
JH
420
421void
d329e058 422compute_bb_for_insn (void)
ca6c03ca 423{
e0082a72 424 basic_block bb;
ca6c03ca 425
11cd3bed 426 FOR_EACH_BB_FN (bb, cfun)
ca6c03ca 427 {
8879d71c
DM
428 rtx_insn *end = BB_END (bb);
429 rtx_insn *insn;
ca6c03ca 430
a813c111 431 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
ca6c03ca 432 {
ba4f7968 433 BLOCK_FOR_INSN (insn) = bb;
ca6c03ca
JH
434 if (insn == end)
435 break;
ca6c03ca
JH
436 }
437 }
438}
439
440/* Release the basic_block_for_insn array. */
441
c2924966 442unsigned int
d329e058 443free_bb_for_insn (void)
ca6c03ca 444{
8879d71c 445 rtx_insn *insn;
ba4f7968 446 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 447 if (!BARRIER_P (insn))
ba4f7968 448 BLOCK_FOR_INSN (insn) = NULL;
c2924966 449 return 0;
ca6c03ca
JH
450}
451
27a4cd48
DM
452namespace {
453
454const pass_data pass_data_free_cfg =
455{
456 RTL_PASS, /* type */
457 "*free_cfg", /* name */
458 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
459 TV_NONE, /* tv_id */
460 0, /* properties_required */
461 0, /* properties_provided */
462 PROP_cfg, /* properties_destroyed */
463 0, /* todo_flags_start */
464 0, /* todo_flags_finish */
ef330312
PB
465};
466
27a4cd48
DM
467class pass_free_cfg : public rtl_opt_pass
468{
469public:
c3284718
RS
470 pass_free_cfg (gcc::context *ctxt)
471 : rtl_opt_pass (pass_data_free_cfg, ctxt)
27a4cd48
DM
472 {}
473
474 /* opt_pass methods: */
be55bfe6 475 virtual unsigned int execute (function *);
27a4cd48
DM
476
477}; // class pass_free_cfg
478
be55bfe6
TS
479unsigned int
480pass_free_cfg::execute (function *)
481{
be55bfe6
TS
482 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
483 valid at that point so it would be too late to call df_analyze. */
e90bedf5 484 if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
be55bfe6
TS
485 {
486 df_note_add_problem ();
487 df_analyze ();
488 }
be55bfe6
TS
489
490 if (crtl->has_bb_partition)
491 insert_section_boundary_note ();
492
493 free_bb_for_insn ();
494 return 0;
495}
496
27a4cd48
DM
497} // anon namespace
498
499rtl_opt_pass *
500make_pass_free_cfg (gcc::context *ctxt)
501{
502 return new pass_free_cfg (ctxt);
503}
504
91278841 505/* Return RTX to emit after when we want to emit code on the entry of function. */
8a2e8325 506rtx_insn *
91278841
AP
507entry_of_function (void)
508{
0cae8d31 509 return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
fefa31b5 510 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
91278841
AP
511}
512
11b904a1
BS
513/* Emit INSN at the entry point of the function, ensuring that it is only
514 executed once per function. */
515void
516emit_insn_at_entry (rtx insn)
517{
fefa31b5 518 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
11b904a1 519 edge e = ei_safe_edge (ei);
5419bc7f 520 gcc_assert (e->flags & EDGE_FALLTHRU);
11b904a1
BS
521
522 insert_insn_on_edge (insn, e);
523 commit_edge_insertions ();
524}
525
6c3d0e31 526/* Update BLOCK_FOR_INSN of insns between BEGIN and END
b8698a0f 527 (or BARRIER if found) and notify df of the bb change.
6c3d0e31
SP
528 The insn chain range is inclusive
529 (i.e. both BEGIN and END will be updated. */
ca6c03ca 530
6c3d0e31 531static void
8879d71c 532update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
ca6c03ca 533{
8879d71c 534 rtx_insn *insn;
ca6c03ca 535
6c3d0e31
SP
536 end = NEXT_INSN (end);
537 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
63642d5a
AO
538 if (!BARRIER_P (insn))
539 df_insn_change_bb (insn, bb);
ca6c03ca 540}
6c3d0e31
SP
541
542/* Update BLOCK_FOR_INSN of insns in BB to BB,
543 and notify df of the change. */
544
545void
546update_bb_for_insn (basic_block bb)
547{
548 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
549}
550
532aafad 551\f
1008df90 552/* Like active_insn_p, except keep the return value use or clobber around
532aafad
SB
553 even after reload. */
554
555static bool
8879d71c 556flow_active_insn_p (const rtx_insn *insn)
532aafad
SB
557{
558 if (active_insn_p (insn))
559 return true;
560
561 /* A clobber of the function return value exists for buggy
562 programs that fail to return a value. Its effect is to
563 keep the return value from being live across the entire
564 function. If we allow it to be skipped, we introduce the
1008df90
JJ
565 possibility for register lifetime confusion.
566 Similarly, keep a USE of the function return value, otherwise
567 the USE is dropped and we could fail to thread jump if USE
568 appears on some paths and not on others, see PR90257. */
569 if ((GET_CODE (PATTERN (insn)) == CLOBBER
570 || GET_CODE (PATTERN (insn)) == USE)
532aafad
SB
571 && REG_P (XEXP (PATTERN (insn), 0))
572 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
573 return true;
574
575 return false;
576}
577
578/* Return true if the block has no effect and only forwards control flow to
579 its single destination. */
532aafad
SB
580
581bool
2407343c 582contains_no_active_insn_p (const_basic_block bb)
532aafad 583{
8879d71c 584 rtx_insn *insn;
532aafad 585
4833f561
JJ
586 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
587 || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
588 || !single_succ_p (bb)
589 || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
532aafad
SB
590 return false;
591
2407343c
JR
592 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
593 if (INSN_P (insn) && flow_active_insn_p (insn))
594 return false;
595
596 return (!INSN_P (insn)
597 || (JUMP_P (insn) && simplejump_p (insn))
598 || !flow_active_insn_p (insn));
599}
600
601/* Likewise, but protect loop latches, headers and preheaders. */
602/* FIXME: Make this a cfg hook. */
603
604bool
605forwarder_block_p (const_basic_block bb)
606{
607 if (!contains_no_active_insn_p (bb))
608 return false;
609
532aafad
SB
610 /* Protect loop latches, headers and preheaders. */
611 if (current_loops)
612 {
613 basic_block dest;
614 if (bb->loop_father->header == bb)
615 return false;
616 dest = EDGE_SUCC (bb, 0)->dest;
617 if (dest->loop_father->header == dest)
618 return false;
619 }
620
2407343c 621 return true;
532aafad
SB
622}
623
624/* Return nonzero if we can reach target from src by falling through. */
d7b6661b 625/* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
532aafad
SB
626
627bool
628can_fallthru (basic_block src, basic_block target)
629{
8879d71c
DM
630 rtx_insn *insn = BB_END (src);
631 rtx_insn *insn2;
532aafad
SB
632 edge e;
633 edge_iterator ei;
634
fefa31b5 635 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
532aafad
SB
636 return true;
637 if (src->next_bb != target)
d7b6661b
SB
638 return false;
639
640 /* ??? Later we may add code to move jump tables offline. */
641 if (tablejump_p (insn, NULL, NULL))
642 return false;
643
532aafad 644 FOR_EACH_EDGE (e, ei, src->succs)
fefa31b5 645 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
532aafad 646 && e->flags & EDGE_FALLTHRU)
d7b6661b 647 return false;
532aafad
SB
648
649 insn2 = BB_HEAD (target);
d7b6661b 650 if (!active_insn_p (insn2))
532aafad
SB
651 insn2 = next_active_insn (insn2);
652
532aafad
SB
653 return next_active_insn (insn) == insn2;
654}
655
656/* Return nonzero if we could reach target from src by falling through,
657 if the target was made adjacent. If we already have a fall-through
658 edge to the exit block, we can't do that. */
659static bool
660could_fall_through (basic_block src, basic_block target)
661{
662 edge e;
663 edge_iterator ei;
664
fefa31b5 665 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
532aafad
SB
666 return true;
667 FOR_EACH_EDGE (e, ei, src->succs)
fefa31b5 668 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
532aafad
SB
669 && e->flags & EDGE_FALLTHRU)
670 return 0;
671 return true;
672}
ca6c03ca 673\f
ef2be249 674/* Return the NOTE_INSN_BASIC_BLOCK of BB. */
ed270997 675rtx_note *
ef2be249
RS
676bb_note (basic_block bb)
677{
8879d71c 678 rtx_insn *note;
ef2be249
RS
679
680 note = BB_HEAD (bb);
681 if (LABEL_P (note))
682 note = NEXT_INSN (note);
683
684 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
ed270997 685 return as_a <rtx_note *> (note);
ef2be249
RS
686}
687
6fb5fa3c
DB
688/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
689 note associated with the BLOCK. */
690
8879d71c 691static rtx_insn *
6fb5fa3c
DB
692first_insn_after_basic_block_note (basic_block block)
693{
8879d71c 694 rtx_insn *insn;
6fb5fa3c
DB
695
696 /* Get the first instruction in the block. */
697 insn = BB_HEAD (block);
698
699 if (insn == NULL_RTX)
8879d71c 700 return NULL;
6fb5fa3c
DB
701 if (LABEL_P (insn))
702 insn = NEXT_INSN (insn);
703 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
704
705 return NEXT_INSN (insn);
706}
707
2b9c63a2
FY
708/* Creates a new basic block just after basic block BB by splitting
709 everything after specified instruction INSNP. */
ca6c03ca 710
f470c378 711static basic_block
d329e058 712rtl_split_block (basic_block bb, void *insnp)
ca6c03ca
JH
713{
714 basic_block new_bb;
8879d71c 715 rtx_insn *insn = (rtx_insn *) insnp;
f470c378 716 edge e;
628f6a4e 717 edge_iterator ei;
ca6c03ca 718
f470c378
ZD
719 if (!insn)
720 {
721 insn = first_insn_after_basic_block_note (bb);
722
723 if (insn)
b5b8b0ac 724 {
8879d71c 725 rtx_insn *next = insn;
b5b8b0ac
AO
726
727 insn = PREV_INSN (insn);
728
729 /* If the block contains only debug insns, insn would have
730 been NULL in a non-debug compilation, and then we'd end
731 up emitting a DELETED note. For -fcompare-debug
732 stability, emit the note too. */
733 if (insn != BB_END (bb)
734 && DEBUG_INSN_P (next)
735 && DEBUG_INSN_P (BB_END (bb)))
736 {
737 while (next != BB_END (bb) && DEBUG_INSN_P (next))
738 next = NEXT_INSN (next);
739
740 if (next == BB_END (bb))
741 emit_note_after (NOTE_INSN_DELETED, next);
742 }
743 }
f470c378
ZD
744 else
745 insn = get_last_insn ();
746 }
747
748 /* We probably should check type of the insn so that we do not create
749 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
750 bother. */
751 if (insn == BB_END (bb))
752 emit_note_after (NOTE_INSN_DELETED, insn);
ca6c03ca
JH
753
754 /* Create the new basic block. */
a813c111 755 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
076c7ab8 756 BB_COPY_PARTITION (new_bb, bb);
1130d5e3 757 BB_END (bb) = insn;
ca6c03ca
JH
758
759 /* Redirect the outgoing edges. */
628f6a4e
BE
760 new_bb->succs = bb->succs;
761 bb->succs = NULL;
762 FOR_EACH_EDGE (e, ei, new_bb->succs)
ca6c03ca
JH
763 e->src = new_bb;
764
6fb5fa3c
DB
765 /* The new block starts off being dirty. */
766 df_set_bb_dirty (bb);
f470c378 767 return new_bb;
bc35512f
JH
768}
769
9be94227
EB
770/* Return true if the single edge between blocks A and B is the only place
771 in RTL which holds some unique locus. */
772
773static bool
774unique_locus_on_edge_between_p (basic_block a, basic_block b)
775{
5368224f 776 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
8879d71c 777 rtx_insn *insn, *end;
9be94227 778
2f13f2de 779 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
9be94227
EB
780 return false;
781
782 /* First scan block A backward. */
783 insn = BB_END (a);
784 end = PREV_INSN (BB_HEAD (a));
5368224f 785 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
9be94227
EB
786 insn = PREV_INSN (insn);
787
5368224f 788 if (insn != end && INSN_LOCATION (insn) == goto_locus)
9be94227
EB
789 return false;
790
791 /* Then scan block B forward. */
792 insn = BB_HEAD (b);
793 if (insn)
794 {
795 end = NEXT_INSN (BB_END (b));
796 while (insn != end && !NONDEBUG_INSN_P (insn))
797 insn = NEXT_INSN (insn);
798
5368224f
DC
799 if (insn != end && INSN_HAS_LOCATION (insn)
800 && INSN_LOCATION (insn) == goto_locus)
9be94227
EB
801 return false;
802 }
803
804 return true;
805}
806
807/* If the single edge between blocks A and B is the only place in RTL which
808 holds some unique locus, emit a nop with that locus between the blocks. */
809
810static void
811emit_nop_for_unique_locus_between (basic_block a, basic_block b)
812{
813 if (!unique_locus_on_edge_between_p (a, b))
814 return;
815
1130d5e3 816 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
5368224f 817 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
9be94227
EB
818}
819
ca6c03ca 820/* Blocks A and B are to be merged into a single block A. The insns
bc35512f 821 are already contiguous. */
ca6c03ca 822
bc35512f
JH
823static void
824rtl_merge_blocks (basic_block a, basic_block b)
ca6c03ca 825{
7792f13c
EB
826 /* If B is a forwarder block whose outgoing edge has no location, we'll
827 propagate the locus of the edge between A and B onto it. */
828 const bool forward_edge_locus
829 = (b->flags & BB_FORWARDER_BLOCK) != 0
830 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
8879d71c
DM
831 rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
832 rtx_insn *del_first = NULL, *del_last = NULL;
833 rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
ca6c03ca
JH
834 int b_empty = 0;
835
6fb5fa3c 836 if (dump_file)
50a36e42
EB
837 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
838 a->index);
6fb5fa3c 839
b5b8b0ac
AO
840 while (DEBUG_INSN_P (b_end))
841 b_end = PREV_INSN (b_debug_start = b_end);
842
ca6c03ca 843 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 844 if (LABEL_P (b_head))
ca6c03ca
JH
845 {
846 /* Detect basic blocks with nothing but a label. This can happen
847 in particular at the end of a function. */
848 if (b_head == b_end)
849 b_empty = 1;
5f0d2358 850
ca6c03ca
JH
851 del_first = del_last = b_head;
852 b_head = NEXT_INSN (b_head);
853 }
854
5f0d2358
RK
855 /* Delete the basic block note and handle blocks containing just that
856 note. */
ca6c03ca
JH
857 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
858 {
859 if (b_head == b_end)
860 b_empty = 1;
861 if (! del_last)
862 del_first = b_head;
5f0d2358 863
ca6c03ca
JH
864 del_last = b_head;
865 b_head = NEXT_INSN (b_head);
866 }
867
868 /* If there was a jump out of A, delete it. */
4b4bf941 869 if (JUMP_P (a_end))
ca6c03ca 870 {
8879d71c 871 rtx_insn *prev;
ca6c03ca
JH
872
873 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
4b4bf941 874 if (!NOTE_P (prev)
a38e7aa5 875 || NOTE_INSN_BASIC_BLOCK_P (prev)
a813c111 876 || prev == BB_HEAD (a))
ca6c03ca
JH
877 break;
878
879 del_first = a_end;
880
ca6c03ca
JH
881 /* If this was a conditional jump, we need to also delete
882 the insn that set cc0. */
058eb3b0 883 if (HAVE_cc0 && only_sets_cc0_p (prev))
ca6c03ca 884 {
8879d71c 885 rtx_insn *tmp = prev;
5f0d2358 886
ca6c03ca
JH
887 prev = prev_nonnote_insn (prev);
888 if (!prev)
a813c111 889 prev = BB_HEAD (a);
ca6c03ca
JH
890 del_first = tmp;
891 }
ca6c03ca
JH
892
893 a_end = PREV_INSN (del_first);
894 }
4b4bf941 895 else if (BARRIER_P (NEXT_INSN (a_end)))
ca6c03ca
JH
896 del_first = NEXT_INSN (a_end);
897
ca6c03ca
JH
898 /* Delete everything marked above as well as crap that might be
899 hanging out between the two blocks. */
1130d5e3
DM
900 BB_END (a) = a_end;
901 BB_HEAD (b) = b_empty ? NULL : b_head;
a7b87f73 902 delete_insn_chain (del_first, del_last, true);
ca6c03ca 903
7792f13c
EB
904 /* If not optimizing, preserve the locus of the single edge between
905 blocks A and B if necessary by emitting a nop. */
906 if (!optimize
907 && !forward_edge_locus
908 && !DECL_IGNORED_P (current_function_decl))
9be94227
EB
909 {
910 emit_nop_for_unique_locus_between (a, b);
911 a_end = BB_END (a);
912 }
913
ca6c03ca
JH
914 /* Reassociate the insns of B with A. */
915 if (!b_empty)
916 {
b5b8b0ac 917 update_bb_for_insn_chain (a_end, b_debug_end, a);
5f0d2358 918
1130d5e3
DM
919 BB_END (a) = b_debug_end;
920 BB_HEAD (b) = NULL;
b5b8b0ac
AO
921 }
922 else if (b_end != b_debug_end)
923 {
924 /* Move any deleted labels and other notes between the end of A
925 and the debug insns that make up B after the debug insns,
926 bringing the debug insns into A while keeping the notes after
927 the end of A. */
928 if (NEXT_INSN (a_end) != b_debug_start)
929 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
930 b_debug_end);
931 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
1130d5e3 932 BB_END (a) = b_debug_end;
ca6c03ca 933 }
5f0d2358 934
6fb5fa3c 935 df_bb_delete (b->index);
50a36e42 936
7792f13c 937 if (forward_edge_locus)
50a36e42
EB
938 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
939
940 if (dump_file)
941 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
ca6c03ca 942}
bc35512f 943
6fb5fa3c 944
bc35512f 945/* Return true when block A and B can be merged. */
9678086d 946
b48d0358
DN
947static bool
948rtl_can_merge_blocks (basic_block a, basic_block b)
bc35512f 949{
750054a2
CT
950 /* If we are partitioning hot/cold basic blocks, we don't want to
951 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
952 and cold sections.
953
8e8d5162 954 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
955 be optimizable (or blocks that appear to be mergeable), but which really
956 must be left untouched (they are required to make it safely across
957 partition boundaries). See the comments at the top of
8e8d5162 958 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
076c7ab8 959
87c8b4be 960 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 961 return false;
750054a2 962
7d776ee2
RG
963 /* Protect the loop latches. */
964 if (current_loops && b->loop_father->latch == b)
965 return false;
966
bc35512f 967 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
968 return (single_succ_p (a)
969 && single_succ (a) == b
970 && single_pred_p (b)
628f6a4e 971 && a != b
bc35512f 972 /* Must be simple edge. */
c5cbcccf 973 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
bc35512f 974 && a->next_bb == b
fefa31b5
DM
975 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
976 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
bc35512f
JH
977 /* If the jump insn has side effects,
978 we can't kill the edge. */
4b4bf941 979 && (!JUMP_P (BB_END (a))
e24e7211 980 || (reload_completed
a813c111 981 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f 982}
ca6c03ca 983\f
5f0d2358
RK
984/* Return the label in the head of basic block BLOCK. Create one if it doesn't
985 exist. */
ca6c03ca 986
1476d1bd 987rtx_code_label *
d329e058 988block_label (basic_block block)
ca6c03ca 989{
fefa31b5 990 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
1476d1bd 991 return NULL;
5f0d2358 992
4b4bf941 993 if (!LABEL_P (BB_HEAD (block)))
ca6c03ca 994 {
1130d5e3 995 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
ca6c03ca 996 }
5f0d2358 997
1476d1bd 998 return as_a <rtx_code_label *> (BB_HEAD (block));
ca6c03ca
JH
999}
1000
91c50487
ML
1001/* Remove all barriers from BB_FOOTER of a BB. */
1002
1003static void
1004remove_barriers_from_footer (basic_block bb)
1005{
1006 rtx_insn *insn = BB_FOOTER (bb);
1007
1008 /* Remove barriers but keep jumptables. */
1009 while (insn)
1010 {
1011 if (BARRIER_P (insn))
1012 {
1013 if (PREV_INSN (insn))
1014 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1015 else
1016 BB_FOOTER (bb) = NEXT_INSN (insn);
1017 if (NEXT_INSN (insn))
1018 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1019 }
1020 if (LABEL_P (insn))
1021 return;
1022 insn = NEXT_INSN (insn);
1023 }
1024}
1025
ca6c03ca 1026/* Attempt to perform edge redirection by replacing possibly complex jump
5f0d2358
RK
1027 instruction by unconditional jump or removing jump completely. This can
1028 apply only if all edges now point to the same block. The parameters and
1029 return values are equivalent to redirect_edge_and_branch. */
ca6c03ca 1030
6de9cd9a 1031edge
bc35512f 1032try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
ca6c03ca
JH
1033{
1034 basic_block src = e->src;
8879d71c 1035 rtx_insn *insn = BB_END (src), *kill_from;
e1233a7d 1036 rtx set;
ca6c03ca 1037 int fallthru = 0;
750054a2
CT
1038
1039 /* If we are partitioning hot/cold basic blocks, we don't want to
1040 mess up unconditional or indirect jumps that cross between hot
8e8d5162
CT
1041 and cold sections.
1042
1043 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
1044 be optimizable (or blocks that appear to be mergeable), but which really
1045 must be left untouched (they are required to make it safely across
1046 partition boundaries). See the comments at the top of
8e8d5162 1047 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
c22cacf3 1048
3371a64f 1049 if (BB_PARTITION (src) != BB_PARTITION (target))
9cf84a3c 1050 return NULL;
750054a2 1051
6a66a8a7
KH
1052 /* We can replace or remove a complex jump only when we have exactly
1053 two edges. Also, if we have exactly one outgoing edge, we can
1054 redirect that. */
1055 if (EDGE_COUNT (src->succs) >= 3
1056 /* Verify that all targets will be TARGET. Specifically, the
1057 edge that is not E must also go to TARGET. */
1058 || (EDGE_COUNT (src->succs) == 2
1059 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1060 return NULL;
5f0d2358 1061
6a66a8a7 1062 if (!onlyjump_p (insn))
6de9cd9a 1063 return NULL;
3348b696 1064 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
6de9cd9a 1065 return NULL;
ca6c03ca
JH
1066
1067 /* Avoid removing branch with side effects. */
1068 set = single_set (insn);
1069 if (!set || side_effects_p (set))
6de9cd9a 1070 return NULL;
ca6c03ca
JH
1071
1072 /* In case we zap a conditional jump, we'll need to kill
1073 the cc0 setter too. */
1074 kill_from = insn;
058eb3b0 1075 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn))
9caea4a7 1076 && only_sets_cc0_p (PREV_INSN (insn)))
ca6c03ca 1077 kill_from = PREV_INSN (insn);
ca6c03ca
JH
1078
1079 /* See if we can create the fallthru edge. */
bc35512f 1080 if (in_cfglayout || can_fallthru (src, target))
ca6c03ca 1081 {
c263766c
RH
1082 if (dump_file)
1083 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
ca6c03ca
JH
1084 fallthru = 1;
1085
eaec9b3d 1086 /* Selectively unlink whole insn chain. */
bc35512f
JH
1087 if (in_cfglayout)
1088 {
a7b87f73 1089 delete_insn_chain (kill_from, BB_END (src), false);
91c50487 1090 remove_barriers_from_footer (src);
bc35512f
JH
1091 }
1092 else
a7b87f73
ZD
1093 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1094 false);
ca6c03ca 1095 }
5f0d2358 1096
ca6c03ca
JH
1097 /* If this already is simplejump, redirect it. */
1098 else if (simplejump_p (insn))
1099 {
1100 if (e->dest == target)
6de9cd9a 1101 return NULL;
c263766c
RH
1102 if (dump_file)
1103 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
0b17ab2f 1104 INSN_UID (insn), e->dest->index, target->index);
1476d1bd
MM
1105 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1106 block_label (target), 0))
6ee3c8e4 1107 {
fefa31b5 1108 gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
341c100f 1109 return NULL;
6ee3c8e4 1110 }
ca6c03ca 1111 }
5f0d2358 1112
6ee3c8e4 1113 /* Cannot do anything for target exit block. */
fefa31b5 1114 else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
6de9cd9a 1115 return NULL;
6ee3c8e4 1116
ca6c03ca
JH
1117 /* Or replace possibly complicated jump insn by simple jump insn. */
1118 else
1119 {
e67d1102 1120 rtx_code_label *target_label = block_label (target);
8879d71c 1121 rtx_insn *barrier;
dfe08bc4 1122 rtx_insn *label;
8942ee0f 1123 rtx_jump_table_data *table;
ca6c03ca 1124
ec4a505f 1125 emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
a813c111 1126 JUMP_LABEL (BB_END (src)) = target_label;
ca6c03ca 1127 LABEL_NUSES (target_label)++;
c263766c
RH
1128 if (dump_file)
1129 fprintf (dump_file, "Replacing insn %i by jump %i\n",
a813c111 1130 INSN_UID (insn), INSN_UID (BB_END (src)));
ca6c03ca 1131
4da2eb6b 1132
ba4807a0
LB
1133 delete_insn_chain (kill_from, insn, false);
1134
4da2eb6b
RH
1135 /* Recognize a tablejump that we are converting to a
1136 simple jump and remove its associated CODE_LABEL
1137 and ADDR_VEC or ADDR_DIFF_VEC. */
ba4807a0
LB
1138 if (tablejump_p (insn, &label, &table))
1139 delete_insn_chain (label, table, false);
eb5b8ad4 1140
65f4b875 1141 barrier = next_nonnote_nondebug_insn (BB_END (src));
4b4bf941 1142 if (!barrier || !BARRIER_P (barrier))
a813c111 1143 emit_barrier_after (BB_END (src));
5d693491
JZ
1144 else
1145 {
a813c111 1146 if (barrier != NEXT_INSN (BB_END (src)))
5d693491
JZ
1147 {
1148 /* Move the jump before barrier so that the notes
1149 which originally were or were created before jump table are
1150 inside the basic block. */
8879d71c 1151 rtx_insn *new_insn = BB_END (src);
5d693491 1152
6c3d0e31
SP
1153 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1154 PREV_INSN (barrier), src);
5d693491 1155
0f82e5c9
DM
1156 SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1157 SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
5d693491 1158
0f82e5c9
DM
1159 SET_NEXT_INSN (new_insn) = barrier;
1160 SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
5d693491 1161
0f82e5c9
DM
1162 SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1163 SET_PREV_INSN (barrier) = new_insn;
5d693491
JZ
1164 }
1165 }
ca6c03ca
JH
1166 }
1167
1168 /* Keep only one edge out and set proper flags. */
c5cbcccf 1169 if (!single_succ_p (src))
628f6a4e 1170 remove_edge (e);
c5cbcccf 1171 gcc_assert (single_succ_p (src));
628f6a4e 1172
c5cbcccf 1173 e = single_succ_edge (src);
ca6c03ca
JH
1174 if (fallthru)
1175 e->flags = EDGE_FALLTHRU;
1176 else
1177 e->flags = 0;
5f0d2358 1178
357067f2 1179 e->probability = profile_probability::always ();
ca6c03ca 1180
ca6c03ca
JH
1181 if (e->dest != target)
1182 redirect_edge_succ (e, target);
6de9cd9a 1183 return e;
ca6c03ca
JH
1184}
1185
4e3825db
MM
1186/* Subroutine of redirect_branch_edge that tries to patch the jump
1187 instruction INSN so that it reaches block NEW. Do this
1188 only when it originally reached block OLD. Return true if this
1189 worked or the original target wasn't OLD, return false if redirection
1190 doesn't work. */
1191
1192static bool
8879d71c 1193patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
ca6c03ca 1194{
8942ee0f 1195 rtx_jump_table_data *table;
ca6c03ca 1196 rtx tmp;
ca6c03ca 1197 /* Recognize a tablejump and adjust all matching cases. */
8942ee0f 1198 if (tablejump_p (insn, NULL, &table))
ca6c03ca
JH
1199 {
1200 rtvec vec;
1201 int j;
e67d1102 1202 rtx_code_label *new_label = block_label (new_bb);
ca6c03ca 1203
fefa31b5 1204 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4e3825db 1205 return false;
95c43227 1206 vec = table->get_labels ();
ca6c03ca
JH
1207
1208 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1209 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1210 {
1211 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1212 --LABEL_NUSES (old_label);
1213 ++LABEL_NUSES (new_label);
1214 }
1215
f9da5064 1216 /* Handle casesi dispatch insns. */
ca6c03ca
JH
1217 if ((tmp = single_set (insn)) != NULL
1218 && SET_DEST (tmp) == pc_rtx
1219 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1220 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
04a121a7 1221 && label_ref_label (XEXP (SET_SRC (tmp), 2)) == old_label)
ca6c03ca 1222 {
4c33cb26 1223 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
ca6c03ca
JH
1224 new_label);
1225 --LABEL_NUSES (old_label);
1226 ++LABEL_NUSES (new_label);
1227 }
1228 }
1c384bf1
RH
1229 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1230 {
1231 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
e67d1102 1232 rtx note;
1c384bf1 1233
fefa31b5 1234 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1c384bf1 1235 return false;
e67d1102 1236 rtx_code_label *new_label = block_label (new_bb);
1c384bf1
RH
1237
1238 for (i = 0; i < n; ++i)
1239 {
1240 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1241 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1242 if (XEXP (old_ref, 0) == old_label)
1243 {
1244 ASM_OPERANDS_LABEL (tmp, i)
1245 = gen_rtx_LABEL_REF (Pmode, new_label);
1246 --LABEL_NUSES (old_label);
1247 ++LABEL_NUSES (new_label);
1248 }
1249 }
1250
1251 if (JUMP_LABEL (insn) == old_label)
1252 {
1253 JUMP_LABEL (insn) = new_label;
1254 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1255 if (note)
1256 remove_note (insn, note);
1257 }
1258 else
1259 {
1260 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1261 if (note)
1262 remove_note (insn, note);
1263 if (JUMP_LABEL (insn) != new_label
1264 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1265 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1266 }
3b5fda81
JJ
1267 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1268 != NULL_RTX)
1269 XEXP (note, 0) = new_label;
1c384bf1 1270 }
ca6c03ca
JH
1271 else
1272 {
1273 /* ?? We may play the games with moving the named labels from
1274 one basic block to the other in case only one computed_jump is
1275 available. */
5f0d2358
RK
1276 if (computed_jump_p (insn)
1277 /* A return instruction can't be redirected. */
1278 || returnjump_p (insn))
4e3825db 1279 return false;
6ee3c8e4 1280
4e3825db 1281 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
6ee3c8e4 1282 {
4e3825db
MM
1283 /* If the insn doesn't go where we think, we're confused. */
1284 gcc_assert (JUMP_LABEL (insn) == old_label);
1285
1286 /* If the substitution doesn't succeed, die. This can happen
1287 if the back end emitted unrecognizable instructions or if
a09ad347
JJ
1288 target is exit block on some arches. Or for crossing
1289 jumps. */
1476d1bd
MM
1290 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1291 block_label (new_bb), 0))
4e3825db 1292 {
a09ad347
JJ
1293 gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1294 || CROSSING_JUMP_P (insn));
4e3825db
MM
1295 return false;
1296 }
6ee3c8e4 1297 }
ca6c03ca 1298 }
4e3825db
MM
1299 return true;
1300}
1301
1302
1303/* Redirect edge representing branch of (un)conditional jump or tablejump,
1304 NULL on failure */
1305static edge
1306redirect_branch_edge (edge e, basic_block target)
1307{
8879d71c 1308 rtx_insn *old_label = BB_HEAD (e->dest);
4e3825db 1309 basic_block src = e->src;
8879d71c 1310 rtx_insn *insn = BB_END (src);
4e3825db
MM
1311
1312 /* We can only redirect non-fallthru edges of jump insn. */
1313 if (e->flags & EDGE_FALLTHRU)
1314 return NULL;
1315 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1316 return NULL;
1317
1318 if (!currently_expanding_to_rtl)
1319 {
1476d1bd 1320 if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
4e3825db
MM
1321 return NULL;
1322 }
1323 else
1324 /* When expanding this BB might actually contain multiple
1325 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1326 Redirect all of those that match our label. */
0f346928 1327 FOR_BB_INSNS (src, insn)
1476d1bd
MM
1328 if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
1329 old_label, target))
4e3825db 1330 return NULL;
ca6c03ca 1331
c263766c
RH
1332 if (dump_file)
1333 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
0b17ab2f 1334 e->src->index, e->dest->index, target->index);
5f0d2358 1335
ca6c03ca 1336 if (e->dest != target)
6de9cd9a 1337 e = redirect_edge_succ_nodup (e, target);
6fb5fa3c 1338
6de9cd9a 1339 return e;
bc35512f
JH
1340}
1341
3371a64f
TJ
1342/* Called when edge E has been redirected to a new destination,
1343 in order to update the region crossing flag on the edge and
1344 jump. */
1345
1346static void
1347fixup_partition_crossing (edge e)
1348{
fefa31b5
DM
1349 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1350 == EXIT_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1351 return;
1352 /* If we redirected an existing edge, it may already be marked
1353 crossing, even though the new src is missing a reg crossing note.
1354 But make sure reg crossing note doesn't already exist before
1355 inserting. */
1356 if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1357 {
1358 e->flags |= EDGE_CROSSING;
52af3804 1359 if (JUMP_P (BB_END (e->src)))
339ba33b 1360 CROSSING_JUMP_P (BB_END (e->src)) = 1;
3371a64f
TJ
1361 }
1362 else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1363 {
1364 e->flags &= ~EDGE_CROSSING;
1365 /* Remove the section crossing note from jump at end of
1366 src if it exists, and if no other successors are
1367 still crossing. */
339ba33b 1368 if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
3371a64f
TJ
1369 {
1370 bool has_crossing_succ = false;
1371 edge e2;
1372 edge_iterator ei;
1373 FOR_EACH_EDGE (e2, ei, e->src->succs)
1374 {
1375 has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1376 if (has_crossing_succ)
1377 break;
1378 }
1379 if (!has_crossing_succ)
339ba33b 1380 CROSSING_JUMP_P (BB_END (e->src)) = 0;
3371a64f
TJ
1381 }
1382 }
1383}
1384
600b5b1d
TJ
1385/* Called when block BB has been reassigned to the cold partition,
1386 because it is now dominated by another cold block,
1387 to ensure that the region crossing attributes are updated. */
1388
1389static void
1390fixup_new_cold_bb (basic_block bb)
1391{
1392 edge e;
1393 edge_iterator ei;
1394
1395 /* This is called when a hot bb is found to now be dominated
1396 by a cold bb and therefore needs to become cold. Therefore,
1397 its preds will no longer be region crossing. Any non-dominating
1398 preds that were previously hot would also have become cold
1399 in the caller for the same region. Any preds that were previously
1400 region-crossing will be adjusted in fixup_partition_crossing. */
1401 FOR_EACH_EDGE (e, ei, bb->preds)
1402 {
1403 fixup_partition_crossing (e);
1404 }
1405
1406 /* Possibly need to make bb's successor edges region crossing,
1407 or remove stale region crossing. */
1408 FOR_EACH_EDGE (e, ei, bb->succs)
1409 {
1410 /* We can't have fall-through edges across partition boundaries.
1411 Note that force_nonfallthru will do any necessary partition
1412 boundary fixup by calling fixup_partition_crossing itself. */
1413 if ((e->flags & EDGE_FALLTHRU)
1414 && BB_PARTITION (bb) != BB_PARTITION (e->dest)
fefa31b5 1415 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
600b5b1d
TJ
1416 force_nonfallthru (e);
1417 else
1418 fixup_partition_crossing (e);
1419 }
1420}
1421
bc35512f
JH
1422/* Attempt to change code to redirect edge E to TARGET. Don't do that on
1423 expense of adding new instructions or reordering basic blocks.
1424
1425 Function can be also called with edge destination equivalent to the TARGET.
1426 Then it should try the simplifications and do nothing if none is possible.
1427
6de9cd9a
DN
1428 Return edge representing the branch if transformation succeeded. Return NULL
1429 on failure.
1430 We still return NULL in case E already destinated TARGET and we didn't
1431 managed to simplify instruction stream. */
bc35512f 1432
6de9cd9a 1433static edge
5671bf27 1434rtl_redirect_edge_and_branch (edge e, basic_block target)
bc35512f 1435{
6de9cd9a 1436 edge ret;
f345f21a 1437 basic_block src = e->src;
3371a64f 1438 basic_block dest = e->dest;
f345f21a 1439
bc35512f 1440 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 1441 return NULL;
bc35512f 1442
3371a64f 1443 if (dest == target)
6de9cd9a 1444 return e;
3348b696 1445
6de9cd9a 1446 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
f345f21a 1447 {
6fb5fa3c 1448 df_set_bb_dirty (src);
3371a64f 1449 fixup_partition_crossing (ret);
6de9cd9a 1450 return ret;
f345f21a 1451 }
bc35512f 1452
6de9cd9a
DN
1453 ret = redirect_branch_edge (e, target);
1454 if (!ret)
1455 return NULL;
5f0d2358 1456
6fb5fa3c 1457 df_set_bb_dirty (src);
3371a64f 1458 fixup_partition_crossing (ret);
6de9cd9a 1459 return ret;
ca6c03ca
JH
1460}
1461
3371a64f
TJ
1462/* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1463
1464void
1465emit_barrier_after_bb (basic_block bb)
1466{
cd459bf8 1467 rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
c3284718 1468 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
3371a64f
TJ
1469 || current_ir_type () == IR_RTL_CFGLAYOUT);
1470 if (current_ir_type () == IR_RTL_CFGLAYOUT)
0fb3402f
RX
1471 {
1472 rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1473
1474 if (BB_FOOTER (bb))
1475 {
1476 rtx_insn *footer_tail = BB_FOOTER (bb);
1477
1478 while (NEXT_INSN (footer_tail))
1479 footer_tail = NEXT_INSN (footer_tail);
1480 if (!BARRIER_P (footer_tail))
1481 {
1482 SET_NEXT_INSN (footer_tail) = insn;
1483 SET_PREV_INSN (insn) = footer_tail;
1484 }
1485 }
1486 else
1487 BB_FOOTER (bb) = insn;
1488 }
3371a64f
TJ
1489}
1490
4fe9b91c 1491/* Like force_nonfallthru below, but additionally performs redirection
26898771
BS
1492 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1493 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1494 simple_return_rtx, indicating which kind of returnjump to create.
1495 It should be NULL otherwise. */
ca6c03ca 1496
dc0ff1c8 1497basic_block
26898771 1498force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
ca6c03ca 1499{
a3716585 1500 basic_block jump_block, new_bb = NULL, src = e->src;
ca6c03ca
JH
1501 rtx note;
1502 edge new_edge;
a3716585 1503 int abnormal_edge_flags = 0;
a3e6a37b 1504 bool asm_goto_edge = false;
7241571e 1505 int loc;
ca6c03ca 1506
cb9a1d9b
JH
1507 /* In the case the last instruction is conditional jump to the next
1508 instruction, first redirect the jump itself and then continue
b20b352b 1509 by creating a basic block afterwards to redirect fallthru edge. */
fefa31b5
DM
1510 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1511 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
a813c111 1512 && any_condjump_p (BB_END (e->src))
a813c111 1513 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
cb9a1d9b
JH
1514 {
1515 rtx note;
58e6ae30 1516 edge b = unchecked_make_edge (e->src, target, 0);
341c100f 1517 bool redirected;
cb9a1d9b 1518
1476d1bd
MM
1519 redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
1520 block_label (target), 0);
341c100f 1521 gcc_assert (redirected);
c22cacf3 1522
a813c111 1523 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
cb9a1d9b
JH
1524 if (note)
1525 {
e5af9ddd 1526 int prob = XINT (note, 0);
cb9a1d9b 1527
5fa396ad 1528 b->probability = profile_probability::from_reg_br_prob_note (prob);
cb9a1d9b 1529 e->probability -= e->probability;
cb9a1d9b
JH
1530 }
1531 }
1532
ca6c03ca 1533 if (e->flags & EDGE_ABNORMAL)
a3716585
JH
1534 {
1535 /* Irritating special case - fallthru edge to the same block as abnormal
1536 edge.
1537 We can't redirect abnormal edge, but we still can split the fallthru
d329e058 1538 one and create separate abnormal edge to original destination.
a3716585 1539 This allows bb-reorder to make such edge non-fallthru. */
341c100f 1540 gcc_assert (e->dest == target);
ee44c28d
SB
1541 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1542 e->flags &= EDGE_FALLTHRU;
a3716585 1543 }
341c100f 1544 else
24c545ff 1545 {
341c100f 1546 gcc_assert (e->flags & EDGE_FALLTHRU);
fefa31b5 1547 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
341c100f
NS
1548 {
1549 /* We can't redirect the entry block. Create an empty block
628f6a4e
BE
1550 at the start of the function which we use to add the new
1551 jump. */
1552 edge tmp;
1553 edge_iterator ei;
1554 bool found = false;
c22cacf3 1555
fefa31b5
DM
1556 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1557 ENTRY_BLOCK_PTR_FOR_FN (cfun));
e7a74006 1558 bb->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
c22cacf3 1559
63e1159c
JJ
1560 /* Make sure new block ends up in correct hot/cold section. */
1561 BB_COPY_PARTITION (bb, e->dest);
1562
341c100f
NS
1563 /* Change the existing edge's source to be the new block, and add
1564 a new edge from the entry block to the new block. */
1565 e->src = bb;
fefa31b5
DM
1566 for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1567 (tmp = ei_safe_edge (ei)); )
628f6a4e
BE
1568 {
1569 if (tmp == e)
1570 {
fefa31b5 1571 ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
628f6a4e
BE
1572 found = true;
1573 break;
1574 }
1575 else
1576 ei_next (&ei);
1577 }
c22cacf3 1578
628f6a4e 1579 gcc_assert (found);
c22cacf3 1580
9771b263 1581 vec_safe_push (bb->succs, e);
fefa31b5
DM
1582 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1583 EDGE_FALLTHRU);
341c100f 1584 }
24c545ff
BS
1585 }
1586
a3e6a37b 1587 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
891ca07d 1588 don't point to the target or fallthru label. */
a3e6a37b 1589 if (JUMP_P (BB_END (e->src))
fefa31b5 1590 && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
a3e6a37b
JJ
1591 && (e->flags & EDGE_FALLTHRU)
1592 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
ca6c03ca 1593 {
a3e6a37b 1594 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
5d68b7e6 1595 bool adjust_jump_target = false;
a3e6a37b
JJ
1596
1597 for (i = 0; i < n; ++i)
891ca07d
JJ
1598 {
1599 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
5d68b7e6
JJ
1600 {
1601 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1602 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1603 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1604 adjust_jump_target = true;
1605 }
891ca07d 1606 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
a3e6a37b 1607 asm_goto_edge = true;
891ca07d 1608 }
5d68b7e6
JJ
1609 if (adjust_jump_target)
1610 {
8879d71c
DM
1611 rtx_insn *insn = BB_END (e->src);
1612 rtx note;
1613 rtx_insn *old_label = BB_HEAD (e->dest);
1614 rtx_insn *new_label = BB_HEAD (target);
5d68b7e6
JJ
1615
1616 if (JUMP_LABEL (insn) == old_label)
1617 {
1618 JUMP_LABEL (insn) = new_label;
1619 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1620 if (note)
1621 remove_note (insn, note);
1622 }
1623 else
1624 {
1625 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1626 if (note)
1627 remove_note (insn, note);
1628 if (JUMP_LABEL (insn) != new_label
1629 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1630 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1631 }
1632 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1633 != NULL_RTX)
1634 XEXP (note, 0) = new_label;
1635 }
a3e6a37b
JJ
1636 }
1637
1638 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1639 {
dc01c3d1 1640 rtx_insn *new_head;
ef30ab83 1641 profile_count count = e->count ();
357067f2 1642 profile_probability probability = e->probability;
ca6c03ca 1643 /* Create the new structures. */
31a78298 1644
79019985
RH
1645 /* If the old block ended with a tablejump, skip its table
1646 by searching forward from there. Otherwise start searching
1647 forward from the last instruction of the old block. */
8942ee0f
DM
1648 rtx_jump_table_data *table;
1649 if (tablejump_p (BB_END (e->src), NULL, &table))
dc01c3d1 1650 new_head = table;
8942ee0f 1651 else
dc01c3d1
DM
1652 new_head = BB_END (e->src);
1653 new_head = NEXT_INSN (new_head);
31a78298 1654
dc01c3d1 1655 jump_block = create_basic_block (new_head, NULL, e->src);
a3e6a37b 1656 jump_block->count = count;
ca6c03ca 1657
750054a2
CT
1658 /* Make sure new block ends up in correct hot/cold section. */
1659
076c7ab8 1660 BB_COPY_PARTITION (jump_block, e->src);
c22cacf3 1661
ca6c03ca
JH
1662 /* Wire edge in. */
1663 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
a3e6a37b 1664 new_edge->probability = probability;
ca6c03ca
JH
1665
1666 /* Redirect old edge. */
1667 redirect_edge_pred (e, jump_block);
357067f2 1668 e->probability = profile_probability::always ();
ca6c03ca 1669
3371a64f
TJ
1670 /* If e->src was previously region crossing, it no longer is
1671 and the reg crossing note should be removed. */
1672 fixup_partition_crossing (new_edge);
1673
a3e6a37b
JJ
1674 /* If asm goto has any label refs to target's label,
1675 add also edge from asm goto bb to target. */
1676 if (asm_goto_edge)
1677 {
357067f2 1678 new_edge->probability = new_edge->probability.apply_scale (1, 2);
3995f3a2 1679 jump_block->count = jump_block->count.apply_scale (1, 2);
3995f3a2
JH
1680 edge new_edge2 = make_edge (new_edge->src, target,
1681 e->flags & ~EDGE_FALLTHRU);
1682 new_edge2->probability = probability - new_edge->probability;
a3e6a37b
JJ
1683 }
1684
ca6c03ca
JH
1685 new_bb = jump_block;
1686 }
1687 else
1688 jump_block = e->src;
5f0d2358 1689
2f13f2de 1690 loc = e->goto_locus;
ca6c03ca 1691 e->flags &= ~EDGE_FALLTHRU;
fefa31b5 1692 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
ca6c03ca 1693 {
26898771 1694 if (jump_label == ret_rtx)
4bccb39e
RS
1695 emit_jump_insn_after_setloc (targetm.gen_return (),
1696 BB_END (jump_block), loc);
26898771
BS
1697 else
1698 {
1699 gcc_assert (jump_label == simple_return_rtx);
4bccb39e 1700 emit_jump_insn_after_setloc (targetm.gen_simple_return (),
26898771 1701 BB_END (jump_block), loc);
26898771 1702 }
387748de 1703 set_return_jump_label (BB_END (jump_block));
ca6c03ca
JH
1704 }
1705 else
1706 {
e67d1102 1707 rtx_code_label *label = block_label (target);
ec4a505f
RS
1708 emit_jump_insn_after_setloc (targetm.gen_jump (label),
1709 BB_END (jump_block), loc);
a813c111 1710 JUMP_LABEL (BB_END (jump_block)) = label;
ca6c03ca
JH
1711 LABEL_NUSES (label)++;
1712 }
5f0d2358 1713
3371a64f
TJ
1714 /* We might be in cfg layout mode, and if so, the following routine will
1715 insert the barrier correctly. */
1716 emit_barrier_after_bb (jump_block);
ca6c03ca
JH
1717 redirect_edge_succ_nodup (e, target);
1718
a3716585
JH
1719 if (abnormal_edge_flags)
1720 make_edge (src, target, abnormal_edge_flags);
1721
b8698a0f 1722 df_mark_solutions_dirty ();
3371a64f 1723 fixup_partition_crossing (e);
ca6c03ca
JH
1724 return new_bb;
1725}
1726
1727/* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1728 (and possibly create new basic block) to make edge non-fallthru.
1729 Return newly created BB or NULL if none. */
5f0d2358 1730
cf103ca4
EB
1731static basic_block
1732rtl_force_nonfallthru (edge e)
ca6c03ca 1733{
26898771 1734 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
ca6c03ca
JH
1735}
1736
1737/* Redirect edge even at the expense of creating new jump insn or
1738 basic block. Return new basic block if created, NULL otherwise.
41806d92 1739 Conversion must be possible. */
ca6c03ca 1740
9ee634e3 1741static basic_block
d329e058 1742rtl_redirect_edge_and_branch_force (edge e, basic_block target)
ca6c03ca 1743{
5f0d2358
RK
1744 if (redirect_edge_and_branch (e, target)
1745 || e->dest == target)
ca6c03ca
JH
1746 return NULL;
1747
1748 /* In case the edge redirection failed, try to force it to be non-fallthru
1749 and redirect newly created simplejump. */
6fb5fa3c 1750 df_set_bb_dirty (e->src);
26898771 1751 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
ca6c03ca
JH
1752}
1753
1754/* The given edge should potentially be a fallthru edge. If that is in
1755 fact true, delete the jump and barriers that are in the way. */
1756
f470c378
ZD
1757static void
1758rtl_tidy_fallthru_edge (edge e)
ca6c03ca 1759{
8879d71c 1760 rtx_insn *q;
f470c378
ZD
1761 basic_block b = e->src, c = b->next_bb;
1762
ca6c03ca
JH
1763 /* ??? In a late-running flow pass, other folks may have deleted basic
1764 blocks by nopping out blocks, leaving multiple BARRIERs between here
0fa2e4df 1765 and the target label. They ought to be chastised and fixed.
ca6c03ca
JH
1766
1767 We can also wind up with a sequence of undeletable labels between
1768 one block and the next.
1769
1770 So search through a sequence of barriers, labels, and notes for
1771 the head of block C and assert that we really do fall through. */
1772
a813c111 1773 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
65f4b875 1774 if (NONDEBUG_INSN_P (q))
9c0a0632 1775 return;
ca6c03ca
JH
1776
1777 /* Remove what will soon cease being the jump insn from the source block.
1778 If block B consisted only of this single jump, turn it into a deleted
1779 note. */
a813c111 1780 q = BB_END (b);
4b4bf941 1781 if (JUMP_P (q)
ca6c03ca
JH
1782 && onlyjump_p (q)
1783 && (any_uncondjump_p (q)
c5cbcccf 1784 || single_succ_p (b)))
ca6c03ca 1785 {
dfe08bc4 1786 rtx_insn *label;
adfac8df
JJ
1787 rtx_jump_table_data *table;
1788
1789 if (tablejump_p (q, &label, &table))
1790 {
1791 /* The label is likely mentioned in some instruction before
1792 the tablejump and might not be DCEd, so turn it into
1793 a note instead and move before the tablejump that is going to
1794 be deleted. */
1795 const char *name = LABEL_NAME (label);
1796 PUT_CODE (label, NOTE);
1797 NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1798 NOTE_DELETED_LABEL_NAME (label) = name;
dfe08bc4 1799 reorder_insns (label, label, PREV_INSN (q));
adfac8df
JJ
1800 delete_insn (table);
1801 }
1802
ca6c03ca
JH
1803 /* If this was a conditional jump, we need to also delete
1804 the insn that set cc0. */
058eb3b0 1805 if (HAVE_cc0 && any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
ca6c03ca 1806 q = PREV_INSN (q);
ca6c03ca
JH
1807
1808 q = PREV_INSN (q);
ca6c03ca 1809 }
b34f29a1
JJ
1810 /* Unconditional jumps with side-effects (i.e. which we can't just delete
1811 together with the barrier) should never have a fallthru edge. */
1812 else if (JUMP_P (q) && any_uncondjump_p (q))
1813 return;
ca6c03ca
JH
1814
1815 /* Selectively unlink the sequence. */
a813c111 1816 if (q != PREV_INSN (BB_HEAD (c)))
a7b87f73 1817 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
ca6c03ca
JH
1818
1819 e->flags |= EDGE_FALLTHRU;
1820}
ca6c03ca 1821\f
f470c378
ZD
1822/* Should move basic block BB after basic block AFTER. NIY. */
1823
1824static bool
1825rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1826 basic_block after ATTRIBUTE_UNUSED)
1827{
1828 return false;
1829}
1830
3371a64f
TJ
1831/* Locate the last bb in the same partition as START_BB. */
1832
1833static basic_block
1834last_bb_in_partition (basic_block start_bb)
1835{
1836 basic_block bb;
fefa31b5 1837 FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
3371a64f
TJ
1838 {
1839 if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1840 return bb;
1841 }
6626665f 1842 /* Return bb before the exit block. */
3371a64f
TJ
1843 return bb->prev_bb;
1844}
1845
ca6c03ca 1846/* Split a (typically critical) edge. Return the new block.
41806d92 1847 The edge must not be abnormal.
ca6c03ca
JH
1848
1849 ??? The code generally expects to be called on critical edges.
1850 The case of a block ending in an unconditional jump to a
1851 block with multiple predecessors is not handled optimally. */
1852
8ce33230 1853static basic_block
d329e058 1854rtl_split_edge (edge edge_in)
ca6c03ca 1855{
3371a64f 1856 basic_block bb, new_bb;
8879d71c 1857 rtx_insn *before;
ca6c03ca
JH
1858
1859 /* Abnormal edges cannot be split. */
341c100f 1860 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
ca6c03ca
JH
1861
1862 /* We are going to place the new block in front of edge destination.
eaec9b3d 1863 Avoid existence of fallthru predecessors. */
ca6c03ca
JH
1864 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1865 {
0fd4b31d 1866 edge e = find_fallthru_edge (edge_in->dest->preds);
ca6c03ca
JH
1867
1868 if (e)
1869 force_nonfallthru (e);
1870 }
1871
96e82e0a 1872 /* Create the basic block note. */
fefa31b5 1873 if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
a813c111 1874 before = BB_HEAD (edge_in->dest);
ca6c03ca 1875 else
8879d71c 1876 before = NULL;
ca6c03ca 1877
623a66fa 1878 /* If this is a fall through edge to the exit block, the blocks might be
ffe14686 1879 not adjacent, and the right place is after the source. */
fefa31b5
DM
1880 if ((edge_in->flags & EDGE_FALLTHRU)
1881 && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
623a66fa
R
1882 {
1883 before = NEXT_INSN (BB_END (edge_in->src));
623a66fa 1884 bb = create_basic_block (before, NULL, edge_in->src);
076c7ab8 1885 BB_COPY_PARTITION (bb, edge_in->src);
623a66fa
R
1886 }
1887 else
9fb32434 1888 {
fefa31b5 1889 if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1890 {
1891 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1892 BB_COPY_PARTITION (bb, edge_in->dest);
1893 }
1894 else
1895 {
1896 basic_block after = edge_in->dest->prev_bb;
1897 /* If this is post-bb reordering, and the edge crosses a partition
1898 boundary, the new block needs to be inserted in the bb chain
1899 at the end of the src partition (since we put the new bb into
1900 that partition, see below). Otherwise we may end up creating
1901 an extra partition crossing in the chain, which is illegal.
1902 It can't go after the src, because src may have a fall-through
1903 to a different block. */
1904 if (crtl->bb_reorder_complete
1905 && (edge_in->flags & EDGE_CROSSING))
1906 {
1907 after = last_bb_in_partition (edge_in->src);
b3a3ae09 1908 before = get_last_bb_insn (after);
3371a64f
TJ
1909 /* The instruction following the last bb in partition should
1910 be a barrier, since it cannot end in a fall-through. */
1911 gcc_checking_assert (BARRIER_P (before));
1912 before = NEXT_INSN (before);
1913 }
1914 bb = create_basic_block (before, NULL, after);
1915 /* Put the split bb into the src partition, to avoid creating
1916 a situation where a cold bb dominates a hot bb, in the case
1917 where src is cold and dest is hot. The src will dominate
1918 the new bb (whereas it might not have dominated dest). */
1919 BB_COPY_PARTITION (bb, edge_in->src);
1920 }
9fb32434 1921 }
ca6c03ca 1922
4977bab6 1923 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
ca6c03ca 1924
3371a64f
TJ
1925 /* Can't allow a region crossing edge to be fallthrough. */
1926 if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
fefa31b5 1927 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1928 {
1929 new_bb = force_nonfallthru (single_succ_edge (bb));
1930 gcc_assert (!new_bb);
1931 }
1932
4d6922ee 1933 /* For non-fallthru edges, we must adjust the predecessor's
ca6c03ca
JH
1934 jump instruction to target our new block. */
1935 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1936 {
341c100f
NS
1937 edge redirected = redirect_edge_and_branch (edge_in, bb);
1938 gcc_assert (redirected);
ca6c03ca
JH
1939 }
1940 else
3b5fda81 1941 {
fefa31b5 1942 if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
3b5fda81
JJ
1943 {
1944 /* For asm goto even splitting of fallthru edge might
1945 need insn patching, as other labels might point to the
1946 old label. */
8879d71c 1947 rtx_insn *last = BB_END (edge_in->src);
3b5fda81
JJ
1948 if (last
1949 && JUMP_P (last)
fefa31b5 1950 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
81e63b63
SB
1951 && (extract_asm_operands (PATTERN (last))
1952 || JUMP_LABEL (last) == before)
3b5fda81
JJ
1953 && patch_jump_insn (last, before, bb))
1954 df_set_bb_dirty (edge_in->src);
1955 }
1956 redirect_edge_succ (edge_in, bb);
1957 }
ca6c03ca
JH
1958
1959 return bb;
1960}
1961
1962/* Queue instructions for insertion on an edge between two basic blocks.
1963 The new instructions and basic blocks (if any) will not appear in the
1964 CFG until commit_edge_insertions is called. */
1965
1966void
d329e058 1967insert_insn_on_edge (rtx pattern, edge e)
ca6c03ca
JH
1968{
1969 /* We cannot insert instructions on an abnormal critical edge.
1970 It will be easier to find the culprit if we die now. */
341c100f 1971 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
ca6c03ca 1972
6de9cd9a 1973 if (e->insns.r == NULL_RTX)
ca6c03ca
JH
1974 start_sequence ();
1975 else
6de9cd9a 1976 push_to_sequence (e->insns.r);
ca6c03ca
JH
1977
1978 emit_insn (pattern);
1979
6de9cd9a 1980 e->insns.r = get_insns ();
ca6c03ca
JH
1981 end_sequence ();
1982}
1983
1984/* Update the CFG for the instructions queued on edge E. */
1985
4e3825db 1986void
2ac66157 1987commit_one_edge_insertion (edge e)
ca6c03ca 1988{
8879d71c 1989 rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
898c90c0 1990 basic_block bb;
ca6c03ca
JH
1991
1992 /* Pull the insns off the edge now since the edge might go away. */
3ffa95c2
DM
1993 insns = e->insns.r;
1994 e->insns.r = NULL;
ca6c03ca 1995
898c90c0
EB
1996 /* Figure out where to put these insns. If the destination has
1997 one predecessor, insert there. Except for the exit block. */
fefa31b5 1998 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
ca6c03ca 1999 {
898c90c0
EB
2000 bb = e->dest;
2001
2002 /* Get the location correct wrt a code label, and "nice" wrt
2003 a basic block note, and before everything else. */
2004 tmp = BB_HEAD (bb);
2005 if (LABEL_P (tmp))
2006 tmp = NEXT_INSN (tmp);
2007 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2008 tmp = NEXT_INSN (tmp);
2009 if (tmp == BB_HEAD (bb))
2010 before = tmp;
2011 else if (tmp)
2012 after = PREV_INSN (tmp);
2013 else
2014 after = get_last_insn ();
2015 }
3dec4024 2016
898c90c0 2017 /* If the source has one successor and the edge is not abnormal,
a1d50386
JJ
2018 insert there. Except for the entry block.
2019 Don't do this if the predecessor ends in a jump other than
2020 unconditional simple jump. E.g. for asm goto that points all
2021 its labels at the fallthru basic block, we can't insert instructions
2022 before the asm goto, as the asm goto can have various of side effects,
2023 and can't emit instructions after the asm goto, as it must end
2024 the basic block. */
898c90c0
EB
2025 else if ((e->flags & EDGE_ABNORMAL) == 0
2026 && single_succ_p (e->src)
fefa31b5 2027 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
a1d50386
JJ
2028 && (!JUMP_P (BB_END (e->src))
2029 || simplejump_p (BB_END (e->src))))
898c90c0
EB
2030 {
2031 bb = e->src;
3dec4024 2032
898c90c0
EB
2033 /* It is possible to have a non-simple jump here. Consider a target
2034 where some forms of unconditional jumps clobber a register. This
2035 happens on the fr30 for example.
ca6c03ca 2036
898c90c0
EB
2037 We know this block has a single successor, so we can just emit
2038 the queued insns before the jump. */
2039 if (JUMP_P (BB_END (bb)))
2040 before = BB_END (bb);
3dec4024
JH
2041 else
2042 {
898c90c0
EB
2043 /* We'd better be fallthru, or we've lost track of what's what. */
2044 gcc_assert (e->flags & EDGE_FALLTHRU);
750054a2 2045
898c90c0 2046 after = BB_END (bb);
ca6c03ca
JH
2047 }
2048 }
2049
898c90c0
EB
2050 /* Otherwise we must split the edge. */
2051 else
2052 {
2053 bb = split_edge (e);
3371a64f
TJ
2054
2055 /* If E crossed a partition boundary, we needed to make bb end in
2056 a region-crossing jump, even though it was originally fallthru. */
2057 if (JUMP_P (BB_END (bb)))
2058 before = BB_END (bb);
2059 else
2060 after = BB_END (bb);
898c90c0
EB
2061 }
2062
2063 /* Now that we've found the spot, do the insertion. */
ca6c03ca
JH
2064 if (before)
2065 {
6fb5fa3c 2066 emit_insn_before_noloc (insns, before, bb);
ca6c03ca
JH
2067 last = prev_nonnote_insn (before);
2068 }
2069 else
6fb5fa3c 2070 last = emit_insn_after_noloc (insns, after, bb);
ca6c03ca
JH
2071
2072 if (returnjump_p (last))
2073 {
2074 /* ??? Remove all outgoing edges from BB and add one for EXIT.
c22cacf3
MS
2075 This is not currently a problem because this only happens
2076 for the (single) epilogue, which already has a fallthru edge
2077 to EXIT. */
ca6c03ca 2078
c5cbcccf 2079 e = single_succ_edge (bb);
fefa31b5 2080 gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
c5cbcccf 2081 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
ca6c03ca 2082
5f0d2358 2083 e->flags &= ~EDGE_FALLTHRU;
ca6c03ca 2084 emit_barrier_after (last);
0b17ab2f 2085
ca6c03ca
JH
2086 if (before)
2087 delete_insn (before);
2088 }
341c100f
NS
2089 else
2090 gcc_assert (!JUMP_P (last));
ca6c03ca
JH
2091}
2092
2093/* Update the CFG for all queued instructions. */
2094
2095void
d329e058 2096commit_edge_insertions (void)
ca6c03ca 2097{
ca6c03ca
JH
2098 basic_block bb;
2099
600b5b1d
TJ
2100 /* Optimization passes that invoke this routine can cause hot blocks
2101 previously reached by both hot and cold blocks to become dominated only
2102 by cold blocks. This will cause the verification below to fail,
2103 and lead to now cold code in the hot section. In some cases this
2104 may only be visible after newly unreachable blocks are deleted,
2105 which will be done by fixup_partitions. */
2106 fixup_partitions ();
2107
82cea5e8
EB
2108 if (!currently_expanding_to_rtl)
2109 checking_verify_flow_info ();
ca6c03ca 2110
fefa31b5
DM
2111 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2112 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
ca6c03ca 2113 {
628f6a4e
BE
2114 edge e;
2115 edge_iterator ei;
ca6c03ca 2116
628f6a4e
BE
2117 FOR_EACH_EDGE (e, ei, bb->succs)
2118 if (e->insns.r)
fe51b129
EB
2119 {
2120 if (currently_expanding_to_rtl)
2121 rebuild_jump_labels_chain (e->insns.r);
2122 commit_one_edge_insertion (e);
2123 }
3dec4024 2124 }
ca6c03ca
JH
2125}
2126\f
6fb5fa3c 2127
f470c378 2128/* Print out RTL-specific basic block information (live information
a315c44c
SB
2129 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2130 documented in dumpfile.h. */
ca6c03ca 2131
10e9fecc 2132static void
1a817418 2133rtl_dump_bb (FILE *outf, basic_block bb, int indent, dump_flags_t flags)
ca6c03ca 2134{
f470c378 2135 char *s_indent;
ca6c03ca 2136
ae50c0cb 2137 s_indent = (char *) alloca ((size_t) indent + 1);
400e39e3 2138 memset (s_indent, ' ', (size_t) indent);
f470c378 2139 s_indent[indent] = '\0';
b8698a0f 2140
a315c44c 2141 if (df && (flags & TDF_DETAILS))
6fb5fa3c
DB
2142 {
2143 df_dump_top (bb, outf);
2144 putc ('\n', outf);
2145 }
ca6c03ca 2146
68cc3174 2147 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
9fcaf059
SB
2148 {
2149 rtx_insn *last = BB_END (bb);
2150 if (last)
2151 last = NEXT_INSN (last);
2152 for (rtx_insn *insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
2153 {
2154 if (flags & TDF_DETAILS)
2155 df_dump_insn_top (insn, outf);
2156 if (! (flags & TDF_SLIM))
2157 print_rtl_single (outf, insn);
2158 else
2159 dump_insn_slim (outf, insn);
2160 if (flags & TDF_DETAILS)
2161 df_dump_insn_bottom (insn, outf);
2162 }
2163 }
a315c44c
SB
2164
2165 if (df && (flags & TDF_DETAILS))
6fb5fa3c
DB
2166 {
2167 df_dump_bottom (bb, outf);
2168 putc ('\n', outf);
2169 }
2170
ca6c03ca
JH
2171}
2172\f
c4669594
SB
2173/* Like dump_function_to_file, but for RTL. Print out dataflow information
2174 for the start of each basic block. FLAGS are the TDF_* masks documented
2175 in dumpfile.h. */
ca6c03ca
JH
2176
2177void
1a817418 2178print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, dump_flags_t flags)
ca6c03ca 2179{
b2908ba6 2180 const rtx_insn *tmp_rtx;
ca6c03ca
JH
2181 if (rtx_first == 0)
2182 fprintf (outf, "(nil)\n");
2183 else
2184 {
ca6c03ca
JH
2185 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2186 int max_uid = get_max_uid ();
5ed6ace5
MD
2187 basic_block *start = XCNEWVEC (basic_block, max_uid);
2188 basic_block *end = XCNEWVEC (basic_block, max_uid);
2189 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
e0082a72
ZD
2190 basic_block bb;
2191
c4669594
SB
2192 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2193 insns, but the CFG is not maintained so the basic block info
2194 is not reliable. Therefore it's omitted from the dumps. */
2195 if (! (cfun->curr_properties & PROP_cfg))
2196 flags &= ~TDF_BLOCKS;
2197
6fb5fa3c
DB
2198 if (df)
2199 df_dump_start (outf);
2200
33627377 2201 if (cfun->curr_properties & PROP_cfg)
ca6c03ca 2202 {
4f42035e 2203 FOR_EACH_BB_REVERSE_FN (bb, cfun)
ca6c03ca 2204 {
8879d71c 2205 rtx_insn *x;
c4669594
SB
2206
2207 start[INSN_UID (BB_HEAD (bb))] = bb;
2208 end[INSN_UID (BB_END (bb))] = bb;
33627377 2209 if (flags & TDF_BLOCKS)
c4669594 2210 {
33627377
KL
2211 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2212 {
2213 enum bb_state state = IN_MULTIPLE_BB;
5f0d2358 2214
33627377
KL
2215 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2216 state = IN_ONE_BB;
2217 in_bb_p[INSN_UID (x)] = state;
ca6c03ca 2218
33627377
KL
2219 if (x == BB_END (bb))
2220 break;
2221 }
c4669594 2222 }
ca6c03ca
JH
2223 }
2224 }
2225
01512446 2226 for (tmp_rtx = rtx_first; tmp_rtx != NULL; tmp_rtx = NEXT_INSN (tmp_rtx))
ca6c03ca 2227 {
c4669594
SB
2228 if (flags & TDF_BLOCKS)
2229 {
2230 bb = start[INSN_UID (tmp_rtx)];
2231 if (bb != NULL)
2232 {
9d9573d5 2233 dump_bb_info (outf, bb, 0, dump_flags, true, false);
c4669594
SB
2234 if (df && (flags & TDF_DETAILS))
2235 df_dump_top (bb, outf);
2236 }
ca6c03ca 2237
c4669594
SB
2238 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2239 && !NOTE_P (tmp_rtx)
2240 && !BARRIER_P (tmp_rtx))
2241 fprintf (outf, ";; Insn is not within a basic block\n");
2242 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2243 fprintf (outf, ";; Insn is in multiple basic blocks\n");
2244 }
ca6c03ca 2245
7b19209f
SB
2246 if (flags & TDF_DETAILS)
2247 df_dump_insn_top (tmp_rtx, outf);
a315c44c
SB
2248 if (! (flags & TDF_SLIM))
2249 print_rtl_single (outf, tmp_rtx);
2250 else
2251 dump_insn_slim (outf, tmp_rtx);
7b19209f
SB
2252 if (flags & TDF_DETAILS)
2253 df_dump_insn_bottom (tmp_rtx, outf);
ca6c03ca 2254
33627377
KL
2255 bb = end[INSN_UID (tmp_rtx)];
2256 if (bb != NULL)
c4669594 2257 {
33627377 2258 if (flags & TDF_BLOCKS)
c4669594 2259 {
9d9573d5 2260 dump_bb_info (outf, bb, 0, dump_flags, false, true);
c4669594
SB
2261 if (df && (flags & TDF_DETAILS))
2262 df_dump_bottom (bb, outf);
1b03a96d 2263 putc ('\n', outf);
c4669594 2264 }
33627377
KL
2265 /* Emit a hint if the fallthrough target of current basic block
2266 isn't the one placed right next. */
2267 else if (EDGE_COUNT (bb->succs) > 0)
2268 {
2269 gcc_assert (BB_END (bb) == tmp_rtx);
2270 const rtx_insn *ninsn = NEXT_INSN (tmp_rtx);
2271 /* Bypass intervening deleted-insn notes and debug insns. */
2272 while (ninsn
2273 && !NONDEBUG_INSN_P (ninsn)
2274 && !start[INSN_UID (ninsn)])
2275 ninsn = NEXT_INSN (ninsn);
2276 edge e = find_fallthru_edge (bb->succs);
2277 if (e && ninsn)
2278 {
2279 basic_block dest = e->dest;
2280 if (start[INSN_UID (ninsn)] != dest)
2281 fprintf (outf, "%s ; pc falls through to BB %d\n",
2282 print_rtx_head, dest->index);
2283 }
2284 }
c4669594 2285 }
ca6c03ca
JH
2286 }
2287
2288 free (start);
2289 free (end);
2290 free (in_bb_p);
2291 }
ca6c03ca
JH
2292}
2293\f
532aafad
SB
2294/* Update the branch probability of BB if a REG_BR_PROB is present. */
2295
b446e5a2 2296void
d329e058 2297update_br_prob_note (basic_block bb)
b446e5a2
JH
2298{
2299 rtx note;
a813c111 2300 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
e7a74006
JH
2301 if (!JUMP_P (BB_END (bb)) || !BRANCH_EDGE (bb)->probability.initialized_p ())
2302 {
2303 if (note)
2304 {
2305 rtx *note_link, this_rtx;
2306
2307 note_link = &REG_NOTES (BB_END (bb));
2308 for (this_rtx = *note_link; this_rtx; this_rtx = XEXP (this_rtx, 1))
2309 if (this_rtx == note)
2310 {
2311 *note_link = XEXP (this_rtx, 1);
2312 break;
2313 }
2314 }
2315 return;
2316 }
357067f2 2317 if (!note
5fa396ad 2318 || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ())
b446e5a2 2319 return;
5fa396ad 2320 XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ();
b446e5a2 2321}
96370780
MK
2322
2323/* Get the last insn associated with block BB (that includes barriers and
2324 tablejumps after BB). */
636eb204 2325rtx_insn *
96370780
MK
2326get_last_bb_insn (basic_block bb)
2327{
8942ee0f 2328 rtx_jump_table_data *table;
636eb204
DM
2329 rtx_insn *tmp;
2330 rtx_insn *end = BB_END (bb);
96370780
MK
2331
2332 /* Include any jump table following the basic block. */
8942ee0f
DM
2333 if (tablejump_p (end, NULL, &table))
2334 end = table;
96370780
MK
2335
2336 /* Include any barriers that may follow the basic block. */
f40dd646 2337 tmp = next_nonnote_nondebug_insn_bb (end);
96370780
MK
2338 while (tmp && BARRIER_P (tmp))
2339 {
2340 end = tmp;
f40dd646 2341 tmp = next_nonnote_nondebug_insn_bb (end);
96370780
MK
2342 }
2343
2344 return end;
2345}
af205f67 2346
d9af4fea
JH
2347/* Add all BBs reachable from entry via hot paths into the SET. */
2348
2349void
2350find_bbs_reachable_by_hot_paths (hash_set<basic_block> *set)
2351{
2352 auto_vec<basic_block, 64> worklist;
2353
2354 set->add (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2355 worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2356
2357 while (worklist.length () > 0)
2358 {
2359 basic_block bb = worklist.pop ();
2360 edge_iterator ei;
2361 edge e;
2362
2363 FOR_EACH_EDGE (e, ei, bb->succs)
2364 if (BB_PARTITION (e->dest) != BB_COLD_PARTITION
2365 && !set->add (e->dest))
2366 worklist.safe_push (e->dest);
2367 }
2368}
2369
600b5b1d
TJ
2370/* Sanity check partition hotness to ensure that basic blocks in
2371   the cold partition don't dominate basic blocks in the hot partition.
2372 If FLAG_ONLY is true, report violations as errors. Otherwise
2373 re-mark the dominated blocks as cold, since this is run after
2374 cfg optimizations that may make hot blocks previously reached
2375 by both hot and cold blocks now only reachable along cold paths. */
2376
2377static vec<basic_block>
2378find_partition_fixes (bool flag_only)
2379{
2380 basic_block bb;
2381 vec<basic_block> bbs_in_cold_partition = vNULL;
2382 vec<basic_block> bbs_to_fix = vNULL;
d9af4fea 2383 hash_set<basic_block> set;
600b5b1d
TJ
2384
2385 /* Callers check this. */
2386 gcc_checking_assert (crtl->has_bb_partition);
2387
d9af4fea 2388 find_bbs_reachable_by_hot_paths (&set);
600b5b1d 2389
d9af4fea
JH
2390 FOR_EACH_BB_FN (bb, cfun)
2391 if (!set.contains (bb)
2392 && BB_PARTITION (bb) != BB_COLD_PARTITION)
2393 {
2394 if (flag_only)
2395 error ("non-cold basic block %d reachable only "
2396 "by paths crossing the cold partition", bb->index);
2397 else
2398 BB_SET_PARTITION (bb, BB_COLD_PARTITION);
2399 bbs_to_fix.safe_push (bb);
2400 bbs_in_cold_partition.safe_push (bb);
2401 }
600b5b1d
TJ
2402
2403 return bbs_to_fix;
2404}
2405
2406/* Perform cleanup on the hot/cold bb partitioning after optimization
2407 passes that modify the cfg. */
2408
2409void
2410fixup_partitions (void)
2411{
2412 basic_block bb;
2413
2414 if (!crtl->has_bb_partition)
2415 return;
2416
2417 /* Delete any blocks that became unreachable and weren't
2418 already cleaned up, for example during edge forwarding
2419 and convert_jumps_to_returns. This will expose more
2420 opportunities for fixing the partition boundaries here.
2421 Also, the calculation of the dominance graph during verification
2422 will assert if there are unreachable nodes. */
2423 delete_unreachable_blocks ();
2424
2425 /* If there are partitions, do a sanity check on them: A basic block in
2426   a cold partition cannot dominate a basic block in a hot partition.
2427 Fixup any that now violate this requirement, as a result of edge
2428 forwarding and unreachable block deletion.  */
2429 vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2430
2431 /* Do the partition fixup after all necessary blocks have been converted to
2432 cold, so that we only update the region crossings the minimum number of
2433 places, which can require forcing edges to be non fallthru. */
2434 while (! bbs_to_fix.is_empty ())
2435 {
2436 bb = bbs_to_fix.pop ();
2437 fixup_new_cold_bb (bb);
2438 }
2439}
2440
af205f67
TJ
2441/* Verify, in the basic block chain, that there is at most one switch
2442 between hot/cold partitions. This condition will not be true until
2443 after reorder_basic_blocks is called. */
2444
251a41b9 2445static int
af205f67
TJ
2446verify_hot_cold_block_grouping (void)
2447{
2448 basic_block bb;
2449 int err = 0;
2450 bool switched_sections = false;
2451 int current_partition = BB_UNPARTITIONED;
2452
3371a64f
TJ
2453 /* Even after bb reordering is complete, we go into cfglayout mode
2454 again (in compgoto). Ensure we don't call this before going back
2455 into linearized RTL when any layout fixes would have been committed. */
2456 if (!crtl->bb_reorder_complete
c3284718 2457 || current_ir_type () != IR_RTL_CFGRTL)
251a41b9 2458 return err;
af205f67 2459
11cd3bed 2460 FOR_EACH_BB_FN (bb, cfun)
af205f67
TJ
2461 {
2462 if (current_partition != BB_UNPARTITIONED
2463 && BB_PARTITION (bb) != current_partition)
2464 {
2465 if (switched_sections)
2466 {
2467 error ("multiple hot/cold transitions found (bb %i)",
2468 bb->index);
2469 err = 1;
2470 }
2471 else
2472 switched_sections = true;
2473
2474 if (!crtl->has_bb_partition)
2475 error ("partition found but function partition flag not set");
2476 }
2477 current_partition = BB_PARTITION (bb);
2478 }
2479
251a41b9 2480 return err;
af205f67 2481}
b446e5a2 2482\f
ca6c03ca 2483
251a41b9
TJ
2484/* Perform several checks on the edges out of each block, such as
2485 the consistency of the branch probabilities, the correctness
2486 of hot/cold partition crossing edges, and the number of expected
600b5b1d
TJ
2487 successor edges. Also verify that the dominance relationship
2488 between hot/cold blocks is sane. */
f470c378 2489
10e9fecc 2490static int
251a41b9 2491rtl_verify_edges (void)
ca6c03ca 2492{
10e9fecc 2493 int err = 0;
94eb5ddb 2494 basic_block bb;
ca6c03ca 2495
4f42035e 2496 FOR_EACH_BB_REVERSE_FN (bb, cfun)
ca6c03ca 2497 {
f99ffaa3
EB
2498 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2499 int n_eh = 0, n_abnormal = 0;
3cf54412 2500 edge e, fallthru = NULL;
628f6a4e 2501 edge_iterator ei;
251a41b9 2502 rtx note;
3371a64f 2503 bool has_crossing_edge = false;
ca6c03ca 2504
2085a21f 2505 if (JUMP_P (BB_END (bb))
a813c111 2506 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
628f6a4e 2507 && EDGE_COUNT (bb->succs) >= 2
a813c111 2508 && any_condjump_p (BB_END (bb)))
5a1a3e5e 2509 {
357067f2
JH
2510 if (!BRANCH_EDGE (bb)->probability.initialized_p ())
2511 {
a4c57f86 2512 if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
357067f2 2513 {
a4c57f86
JH
2514 error ("verify_flow_info: "
2515 "REG_BR_PROB is set but cfg probability is not");
2516 err = 1;
357067f2
JH
2517 }
2518 }
2519 else if (XINT (note, 0)
5fa396ad 2520 != BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ()
357067f2 2521 && profile_status_for_fn (cfun) != PROFILE_ABSENT)
5a1a3e5e 2522 {
e5af9ddd 2523 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
357067f2 2524 XINT (note, 0),
5fa396ad 2525 BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ());
5a1a3e5e
JH
2526 err = 1;
2527 }
2528 }
251a41b9 2529
628f6a4e 2530 FOR_EACH_EDGE (e, ei, bb->succs)
ca6c03ca 2531 {
0be7e7a6
RH
2532 bool is_crossing;
2533
ca6c03ca 2534 if (e->flags & EDGE_FALLTHRU)
0be7e7a6
RH
2535 n_fallthru++, fallthru = e;
2536
2537 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
fefa31b5
DM
2538 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2539 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
3371a64f 2540 has_crossing_edge |= is_crossing;
0be7e7a6 2541 if (e->flags & EDGE_CROSSING)
750054a2 2542 {
0be7e7a6
RH
2543 if (!is_crossing)
2544 {
2545 error ("EDGE_CROSSING incorrectly set across same section");
2546 err = 1;
2547 }
2548 if (e->flags & EDGE_FALLTHRU)
2549 {
f99ffaa3 2550 error ("fallthru edge crosses section boundary in bb %i",
750054a2
CT
2551 e->src->index);
2552 err = 1;
2553 }
0be7e7a6
RH
2554 if (e->flags & EDGE_EH)
2555 {
f99ffaa3 2556 error ("EH edge crosses section boundary in bb %i",
0be7e7a6
RH
2557 e->src->index);
2558 err = 1;
2559 }
339ba33b 2560 if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
3371a64f
TJ
2561 {
2562 error ("No region crossing jump at section boundary in bb %i",
2563 bb->index);
2564 err = 1;
2565 }
0be7e7a6
RH
2566 }
2567 else if (is_crossing)
2568 {
2569 error ("EDGE_CROSSING missing across section boundary");
2570 err = 1;
750054a2 2571 }
3dec4024 2572
65f43cdf
ZD
2573 if ((e->flags & ~(EDGE_DFS_BACK
2574 | EDGE_CAN_FALLTHRU
2575 | EDGE_IRREDUCIBLE_LOOP
9beb1c84 2576 | EDGE_LOOP_EXIT
7e872b90
JJ
2577 | EDGE_CROSSING
2578 | EDGE_PRESERVE)) == 0)
3dec4024
JH
2579 n_branch++;
2580
2581 if (e->flags & EDGE_ABNORMAL_CALL)
f99ffaa3
EB
2582 n_abnormal_call++;
2583
2584 if (e->flags & EDGE_SIBCALL)
2585 n_sibcall++;
3dec4024
JH
2586
2587 if (e->flags & EDGE_EH)
2588 n_eh++;
f99ffaa3
EB
2589
2590 if (e->flags & EDGE_ABNORMAL)
3dec4024 2591 n_abnormal++;
ca6c03ca 2592 }
5f0d2358 2593
d53e8ef4
JJ
2594 if (!has_crossing_edge
2595 && JUMP_P (BB_END (bb))
2596 && CROSSING_JUMP_P (BB_END (bb)))
2597 {
2598 print_rtl_with_bb (stderr, get_insns (), TDF_BLOCKS | TDF_DETAILS);
2599 error ("Region crossing jump across same section in bb %i",
2600 bb->index);
2601 err = 1;
2602 }
3371a64f 2603
1d65f45c 2604 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
3dec4024 2605 {
f99ffaa3 2606 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
3dec4024
JH
2607 err = 1;
2608 }
1d65f45c
RH
2609 if (n_eh > 1)
2610 {
f99ffaa3 2611 error ("too many exception handling edges in bb %i", bb->index);
1d65f45c
RH
2612 err = 1;
2613 }
3dec4024 2614 if (n_branch
4b4bf941 2615 && (!JUMP_P (BB_END (bb))
a813c111
SB
2616 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2617 || any_condjump_p (BB_END (bb))))))
3dec4024 2618 {
ab532386 2619 error ("too many outgoing branch edges from bb %i", bb->index);
3dec4024
JH
2620 err = 1;
2621 }
a813c111 2622 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
3dec4024 2623 {
f99ffaa3 2624 error ("fallthru edge after unconditional jump in bb %i", bb->index);
3dec4024
JH
2625 err = 1;
2626 }
a813c111 2627 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
3dec4024 2628 {
f99ffaa3
EB
2629 error ("wrong number of branch edges after unconditional jump"
2630 " in bb %i", bb->index);
3dec4024
JH
2631 err = 1;
2632 }
a813c111 2633 if (n_branch != 1 && any_condjump_p (BB_END (bb))
c11fd0b2 2634 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
3dec4024 2635 {
f99ffaa3
EB
2636 error ("wrong amount of branch edges after conditional jump"
2637 " in bb %i", bb->index);
2638 err = 1;
2639 }
2640 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2641 {
2642 error ("abnormal call edges for non-call insn in bb %i", bb->index);
3dec4024
JH
2643 err = 1;
2644 }
f99ffaa3 2645 if (n_sibcall && !CALL_P (BB_END (bb)))
3dec4024 2646 {
f99ffaa3 2647 error ("sibcall edges for non-call insn in bb %i", bb->index);
3dec4024
JH
2648 err = 1;
2649 }
f99ffaa3
EB
2650 if (n_abnormal > n_eh
2651 && !(CALL_P (BB_END (bb))
2652 && n_abnormal == n_abnormal_call + n_sibcall)
4b4bf941 2653 && (!JUMP_P (BB_END (bb))
a813c111
SB
2654 || any_condjump_p (BB_END (bb))
2655 || any_uncondjump_p (BB_END (bb))))
3dec4024 2656 {
ab532386 2657 error ("abnormal edges for no purpose in bb %i", bb->index);
3dec4024
JH
2658 err = 1;
2659 }
d53e8ef4
JJ
2660
2661 int has_eh = -1;
2662 FOR_EACH_EDGE (e, ei, bb->preds)
2663 {
2664 if (has_eh == -1)
2665 has_eh = (e->flags & EDGE_EH);
2666 if ((e->flags & EDGE_EH) == has_eh)
2667 continue;
2668 error ("EH incoming edge mixed with non-EH incoming edges "
2669 "in bb %i", bb->index);
2670 err = 1;
2671 break;
2672 }
251a41b9 2673 }
f87c27b4 2674
600b5b1d
TJ
2675 /* If there are partitions, do a sanity check on them: A basic block in
2676   a cold partition cannot dominate a basic block in a hot partition.  */
3211aea2
JH
2677 if (crtl->has_bb_partition && !err
2678 && current_ir_type () == IR_RTL_CFGLAYOUT)
600b5b1d
TJ
2679 {
2680 vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2681 err = !bbs_to_fix.is_empty ();
2682 }
2683
251a41b9
TJ
2684 /* Clean up. */
2685 return err;
2686}
5f0d2358 2687
251a41b9
TJ
2688/* Checks on the instructions within blocks. Currently checks that each
2689 block starts with a basic block note, and that basic block notes and
2690 control flow jumps are not found in the middle of the block. */
ca6c03ca 2691
251a41b9
TJ
2692static int
2693rtl_verify_bb_insns (void)
2694{
8879d71c 2695 rtx_insn *x;
251a41b9
TJ
2696 int err = 0;
2697 basic_block bb;
2698
4f42035e 2699 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9
TJ
2700 {
2701 /* Now check the header of basic
c22cacf3 2702 block. It ought to contain optional CODE_LABEL followed
ca6c03ca 2703 by NOTE_BASIC_BLOCK. */
a813c111 2704 x = BB_HEAD (bb);
4b4bf941 2705 if (LABEL_P (x))
ca6c03ca 2706 {
a813c111 2707 if (BB_END (bb) == x)
ca6c03ca
JH
2708 {
2709 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 2710 bb->index);
ca6c03ca
JH
2711 err = 1;
2712 }
5f0d2358 2713
ca6c03ca
JH
2714 x = NEXT_INSN (x);
2715 }
5f0d2358 2716
ca6c03ca
JH
2717 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2718 {
2719 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 2720 bb->index);
ca6c03ca
JH
2721 err = 1;
2722 }
2723
a813c111 2724 if (BB_END (bb) == x)
49243025 2725 /* Do checks for empty blocks here. */
5f0d2358 2726 ;
ca6c03ca 2727 else
5f0d2358
RK
2728 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2729 {
2730 if (NOTE_INSN_BASIC_BLOCK_P (x))
2731 {
2732 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
0b17ab2f 2733 INSN_UID (x), bb->index);
5f0d2358
RK
2734 err = 1;
2735 }
2736
a813c111 2737 if (x == BB_END (bb))
5f0d2358 2738 break;
ca6c03ca 2739
83fd323c 2740 if (control_flow_insn_p (x))
5f0d2358 2741 {
0b17ab2f 2742 error ("in basic block %d:", bb->index);
5f0d2358
RK
2743 fatal_insn ("flow control insn inside a basic block", x);
2744 }
2745 }
ca6c03ca
JH
2746 }
2747
251a41b9
TJ
2748 /* Clean up. */
2749 return err;
2750}
2751
2752/* Verify that block pointers for instructions in basic blocks, headers and
2753 footers are set appropriately. */
2754
2755static int
2756rtl_verify_bb_pointers (void)
2757{
2758 int err = 0;
2759 basic_block bb;
2760
2761 /* Check the general integrity of the basic blocks. */
4f42035e 2762 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9 2763 {
8879d71c 2764 rtx_insn *insn;
251a41b9
TJ
2765
2766 if (!(bb->flags & BB_RTL))
2767 {
2768 error ("BB_RTL flag not set for block %d", bb->index);
2769 err = 1;
2770 }
2771
2772 FOR_BB_INSNS (bb, insn)
2773 if (BLOCK_FOR_INSN (insn) != bb)
2774 {
2775 error ("insn %d basic block pointer is %d, should be %d",
2776 INSN_UID (insn),
2777 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2778 bb->index);
2779 err = 1;
2780 }
2781
2782 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2783 if (!BARRIER_P (insn)
2784 && BLOCK_FOR_INSN (insn) != NULL)
2785 {
2786 error ("insn %d in header of bb %d has non-NULL basic block",
2787 INSN_UID (insn), bb->index);
2788 err = 1;
2789 }
2790 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2791 if (!BARRIER_P (insn)
2792 && BLOCK_FOR_INSN (insn) != NULL)
2793 {
2794 error ("insn %d in footer of bb %d has non-NULL basic block",
2795 INSN_UID (insn), bb->index);
2796 err = 1;
2797 }
2798 }
af205f67 2799
10e9fecc 2800 /* Clean up. */
10e9fecc
JH
2801 return err;
2802}
5f0d2358 2803
10e9fecc
JH
2804/* Verify the CFG and RTL consistency common for both underlying RTL and
2805 cfglayout RTL.
5f0d2358 2806
10e9fecc 2807 Currently it does following checks:
251a41b9
TJ
2808
2809 - overlapping of basic blocks
2810 - insns with wrong BLOCK_FOR_INSN pointers
2811 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2812 - tails of basic blocks (ensure that boundary is necessary)
2813 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2814 and NOTE_INSN_BASIC_BLOCK
2815 - verify that no fall_thru edge crosses hot/cold partition boundaries
2816 - verify that there are no pending RTL branch predictions
600b5b1d 2817 - verify that hot blocks are not dominated by cold blocks
251a41b9
TJ
2818
2819 In future it can be extended check a lot of other stuff as well
2820 (reachability of basic blocks, life information, etc. etc.). */
9eab6785 2821
10e9fecc 2822static int
251a41b9
TJ
2823rtl_verify_flow_info_1 (void)
2824{
2825 int err = 0;
2826
2827 err |= rtl_verify_bb_pointers ();
2828
2829 err |= rtl_verify_bb_insns ();
2830
2831 err |= rtl_verify_edges ();
2832
251a41b9
TJ
2833 return err;
2834}
2835
2836/* Walk the instruction chain and verify that bb head/end pointers
2837 are correct, and that instructions are in exactly one bb and have
2838 correct block pointers. */
2839
2840static int
2841rtl_verify_bb_insn_chain (void)
10e9fecc
JH
2842{
2843 basic_block bb;
251a41b9 2844 int err = 0;
8879d71c
DM
2845 rtx_insn *x;
2846 rtx_insn *last_head = get_last_insn ();
9eab6785 2847 basic_block *bb_info;
9eab6785
SB
2848 const int max_uid = get_max_uid ();
2849
2850 bb_info = XCNEWVEC (basic_block, max_uid);
ca6c03ca 2851
4f42035e 2852 FOR_EACH_BB_REVERSE_FN (bb, cfun)
10e9fecc 2853 {
8879d71c
DM
2854 rtx_insn *head = BB_HEAD (bb);
2855 rtx_insn *end = BB_END (bb);
628f6a4e 2856
9eab6785 2857 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
a7b87f73
ZD
2858 {
2859 /* Verify the end of the basic block is in the INSN chain. */
2860 if (x == end)
2861 break;
2862
251a41b9
TJ
2863 /* And that the code outside of basic blocks has NULL bb field. */
2864 if (!BARRIER_P (x)
2865 && BLOCK_FOR_INSN (x) != NULL)
2866 {
2867 error ("insn %d outside of basic blocks has non-NULL bb field",
2868 INSN_UID (x));
2869 err = 1;
2870 }
a7b87f73 2871 }
9eab6785
SB
2872
2873 if (!x)
2874 {
2875 error ("end insn %d for block %d not found in the insn stream",
2876 INSN_UID (end), bb->index);
2877 err = 1;
2878 }
2879
2880 /* Work backwards from the end to the head of the basic block
2881 to verify the head is in the RTL chain. */
2882 for (; x != NULL_RTX; x = PREV_INSN (x))
a00d11f0 2883 {
9eab6785
SB
2884 /* While walking over the insn chain, verify insns appear
2885 in only one basic block. */
2886 if (bb_info[INSN_UID (x)] != NULL)
2887 {
2888 error ("insn %d is in multiple basic blocks (%d and %d)",
2889 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2890 err = 1;
2891 }
2892
2893 bb_info[INSN_UID (x)] = bb;
2894
2895 if (x == head)
2896 break;
2897 }
2898 if (!x)
2899 {
2900 error ("head insn %d for block %d not found in the insn stream",
2901 INSN_UID (head), bb->index);
a00d11f0
JH
2902 err = 1;
2903 }
2904
a7b87f73 2905 last_head = PREV_INSN (x);
251a41b9
TJ
2906 }
2907
2908 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2909 {
2910 /* Check that the code before the first basic block has NULL
2911 bb field. */
2912 if (!BARRIER_P (x)
2913 && BLOCK_FOR_INSN (x) != NULL)
2914 {
2915 error ("insn %d outside of basic blocks has non-NULL bb field",
2916 INSN_UID (x));
2917 err = 1;
2918 }
2919 }
2920 free (bb_info);
2921
2922 return err;
2923}
2924
2925/* Verify that fallthru edges point to adjacent blocks in layout order and
2926 that barriers exist after non-fallthru blocks. */
2927
2928static int
2929rtl_verify_fallthru (void)
2930{
2931 basic_block bb;
2932 int err = 0;
2933
4f42035e 2934 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9
TJ
2935 {
2936 edge e;
9eab6785 2937
0fd4b31d 2938 e = find_fallthru_edge (bb->succs);
10e9fecc
JH
2939 if (!e)
2940 {
8879d71c 2941 rtx_insn *insn;
10e9fecc
JH
2942
2943 /* Ensure existence of barrier in BB with no fallthru edges. */
468059bc
DD
2944 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2945 {
2946 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
10e9fecc
JH
2947 {
2948 error ("missing barrier after block %i", bb->index);
2949 err = 1;
2950 break;
2951 }
468059bc
DD
2952 if (BARRIER_P (insn))
2953 break;
2954 }
10e9fecc 2955 }
fefa31b5
DM
2956 else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2957 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
c22cacf3 2958 {
8879d71c 2959 rtx_insn *insn;
10e9fecc
JH
2960
2961 if (e->src->next_bb != e->dest)
2962 {
2963 error
2964 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2965 e->src->index, e->dest->index);
2966 err = 1;
2967 }
2968 else
a813c111 2969 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
10e9fecc 2970 insn = NEXT_INSN (insn))
65f4b875 2971 if (BARRIER_P (insn) || NONDEBUG_INSN_P (insn))
10e9fecc
JH
2972 {
2973 error ("verify_flow_info: Incorrect fallthru %i->%i",
2974 e->src->index, e->dest->index);
2975 fatal_insn ("wrong insn in the fallthru edge", insn);
2976 err = 1;
2977 }
c22cacf3 2978 }
10e9fecc 2979 }
ca6c03ca 2980
251a41b9
TJ
2981 return err;
2982}
2983
2984/* Verify that blocks are laid out in consecutive order. While walking the
2985 instructions, verify that all expected instructions are inside the basic
2986 blocks, and that all returns are followed by barriers. */
2987
2988static int
2989rtl_verify_bb_layout (void)
2990{
2991 basic_block bb;
2992 int err = 0;
65f4b875 2993 rtx_insn *x, *y;
251a41b9 2994 int num_bb_notes;
8879d71c 2995 rtx_insn * const rtx_first = get_insns ();
fefa31b5 2996 basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
9eab6785 2997
ca6c03ca 2998 num_bb_notes = 0;
e0082a72 2999
5f0d2358 3000 for (x = rtx_first; x; x = NEXT_INSN (x))
ca6c03ca
JH
3001 {
3002 if (NOTE_INSN_BASIC_BLOCK_P (x))
3003 {
bf77398c 3004 bb = NOTE_BASIC_BLOCK (x);
5f0d2358 3005
ca6c03ca 3006 num_bb_notes++;
e0082a72 3007 if (bb != last_bb_seen->next_bb)
10e9fecc 3008 internal_error ("basic blocks not laid down consecutively");
ca6c03ca 3009
10e9fecc 3010 curr_bb = last_bb_seen = bb;
ca6c03ca
JH
3011 }
3012
10e9fecc 3013 if (!curr_bb)
ca6c03ca
JH
3014 {
3015 switch (GET_CODE (x))
3016 {
3017 case BARRIER:
3018 case NOTE:
3019 break;
3020
3021 case CODE_LABEL:
39718607 3022 /* An ADDR_VEC is placed outside any basic block. */
ca6c03ca 3023 if (NEXT_INSN (x)
481683e1 3024 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
5f0d2358 3025 x = NEXT_INSN (x);
ca6c03ca
JH
3026
3027 /* But in any case, non-deletable labels can appear anywhere. */
3028 break;
3029
3030 default:
1f978f5f 3031 fatal_insn ("insn outside basic block", x);
ca6c03ca
JH
3032 }
3033 }
3034
26cae194 3035 if (JUMP_P (x)
ca6c03ca 3036 && returnjump_p (x) && ! condjump_p (x)
65f4b875
AO
3037 && ! ((y = next_nonnote_nondebug_insn (x))
3038 && BARRIER_P (y)))
1f978f5f 3039 fatal_insn ("return not followed by barrier", x);
251a41b9 3040
a813c111 3041 if (curr_bb && x == BB_END (curr_bb))
10e9fecc 3042 curr_bb = NULL;
ca6c03ca
JH
3043 }
3044
0cae8d31 3045 if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
ca6c03ca 3046 internal_error
0b17ab2f 3047 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
0cae8d31 3048 num_bb_notes, n_basic_blocks_for_fn (cfun));
ca6c03ca 3049
10e9fecc 3050 return err;
ca6c03ca 3051}
251a41b9
TJ
3052
3053/* Verify the CFG and RTL consistency common for both underlying RTL and
3054 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
3055
3056 Currently it does following checks:
3057 - all checks of rtl_verify_flow_info_1
3058 - test head/end pointers
3059 - check that blocks are laid out in consecutive order
3060 - check that all insns are in the basic blocks
3061 (except the switch handling code, barriers and notes)
3062 - check that all returns are followed by barriers
600b5b1d
TJ
3063 - check that all fallthru edge points to the adjacent blocks
3064 - verify that there is a single hot/cold partition boundary after bbro */
251a41b9
TJ
3065
3066static int
3067rtl_verify_flow_info (void)
3068{
3069 int err = 0;
3070
3071 err |= rtl_verify_flow_info_1 ();
3072
3073 err |= rtl_verify_bb_insn_chain ();
3074
3075 err |= rtl_verify_fallthru ();
3076
3077 err |= rtl_verify_bb_layout ();
3078
3371a64f
TJ
3079 err |= verify_hot_cold_block_grouping ();
3080
251a41b9
TJ
3081 return err;
3082}
ca6c03ca 3083\f
eaec9b3d 3084/* Assume that the preceding pass has possibly eliminated jump instructions
ca6c03ca
JH
3085 or converted the unconditional jumps. Eliminate the edges from CFG.
3086 Return true if any edges are eliminated. */
3087
3088bool
d329e058 3089purge_dead_edges (basic_block bb)
ca6c03ca 3090{
628f6a4e 3091 edge e;
8879d71c
DM
3092 rtx_insn *insn = BB_END (bb);
3093 rtx note;
ca6c03ca 3094 bool purged = false;
628f6a4e
BE
3095 bool found;
3096 edge_iterator ei;
ca6c03ca 3097
b5b8b0ac
AO
3098 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
3099 do
3100 insn = PREV_INSN (insn);
3101 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3102
70da1d03 3103 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
4b4bf941 3104 if (NONJUMP_INSN_P (insn)
70da1d03
JH
3105 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3106 {
3107 rtx eqnote;
3108
3109 if (! may_trap_p (PATTERN (insn))
3110 || ((eqnote = find_reg_equal_equiv_note (insn))
3111 && ! may_trap_p (XEXP (eqnote, 0))))
3112 remove_note (insn, note);
3113 }
3114
546c093e 3115 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
628f6a4e 3116 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
546c093e 3117 {
1d65f45c
RH
3118 bool remove = false;
3119
e5f9a909
JW
3120 /* There are three types of edges we need to handle correctly here: EH
3121 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3122 latter can appear when nonlocal gotos are used. */
1d65f45c 3123 if (e->flags & EDGE_ABNORMAL_CALL)
546c093e 3124 {
1d65f45c
RH
3125 if (!CALL_P (insn))
3126 remove = true;
3127 else if (can_nonlocal_goto (insn))
3128 ;
3129 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3130 ;
0a35513e
AH
3131 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3132 ;
1d65f45c
RH
3133 else
3134 remove = true;
546c093e 3135 }
1d65f45c
RH
3136 else if (e->flags & EDGE_EH)
3137 remove = !can_throw_internal (insn);
3138
3139 if (remove)
546c093e 3140 {
1d65f45c
RH
3141 remove_edge (e);
3142 df_set_bb_dirty (bb);
3143 purged = true;
546c093e
RH
3144 }
3145 else
1d65f45c 3146 ei_next (&ei);
546c093e 3147 }
5f0d2358 3148
4b4bf941 3149 if (JUMP_P (insn))
ca6c03ca
JH
3150 {
3151 rtx note;
3152 edge b,f;
628f6a4e 3153 edge_iterator ei;
5f0d2358 3154
ca6c03ca
JH
3155 /* We do care only about conditional jumps and simplejumps. */
3156 if (!any_condjump_p (insn)
3157 && !returnjump_p (insn)
3158 && !simplejump_p (insn))
c51d95ec 3159 return purged;
5f0d2358 3160
5a1a3e5e
JH
3161 /* Branch probability/prediction notes are defined only for
3162 condjumps. We've possibly turned condjump into simplejump. */
3163 if (simplejump_p (insn))
3164 {
3165 note = find_reg_note (insn, REG_BR_PROB, NULL);
3166 if (note)
3167 remove_note (insn, note);
3168 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3169 remove_note (insn, note);
3170 }
3171
628f6a4e 3172 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 3173 {
7fcd7218
JH
3174 /* Avoid abnormal flags to leak from computed jumps turned
3175 into simplejumps. */
f87c27b4 3176
0e1638d4 3177 e->flags &= ~EDGE_ABNORMAL;
7fcd7218 3178
5a566bed
MM
3179 /* See if this edge is one we should keep. */
3180 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3181 /* A conditional jump can fall through into the next
3182 block, so we should keep the edge. */
628f6a4e
BE
3183 {
3184 ei_next (&ei);
3185 continue;
3186 }
fefa31b5 3187 else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
a813c111 3188 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
5a566bed
MM
3189 /* If the destination block is the target of the jump,
3190 keep the edge. */
628f6a4e
BE
3191 {
3192 ei_next (&ei);
3193 continue;
3194 }
fefa31b5
DM
3195 else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3196 && returnjump_p (insn))
5a566bed
MM
3197 /* If the destination block is the exit block, and this
3198 instruction is a return, then keep the edge. */
628f6a4e
BE
3199 {
3200 ei_next (&ei);
3201 continue;
3202 }
5a566bed
MM
3203 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3204 /* Keep the edges that correspond to exceptions thrown by
0b75beaa
EB
3205 this instruction and rematerialize the EDGE_ABNORMAL
3206 flag we just cleared above. */
3207 {
3208 e->flags |= EDGE_ABNORMAL;
628f6a4e 3209 ei_next (&ei);
0b75beaa
EB
3210 continue;
3211 }
5f0d2358 3212
5a566bed 3213 /* We do not need this edge. */
6fb5fa3c 3214 df_set_bb_dirty (bb);
ca6c03ca
JH
3215 purged = true;
3216 remove_edge (e);
3217 }
5f0d2358 3218
628f6a4e 3219 if (EDGE_COUNT (bb->succs) == 0 || !purged)
c51d95ec 3220 return purged;
5f0d2358 3221
c263766c
RH
3222 if (dump_file)
3223 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
5f0d2358 3224
ca6c03ca
JH
3225 if (!optimize)
3226 return purged;
3227
3228 /* Redistribute probabilities. */
c5cbcccf 3229 if (single_succ_p (bb))
ca6c03ca 3230 {
357067f2 3231 single_succ_edge (bb)->probability = profile_probability::always ();
f87c27b4 3232 }
ca6c03ca
JH
3233 else
3234 {
3235 note = find_reg_note (insn, REG_BR_PROB, NULL);
3236 if (!note)
3237 return purged;
5f0d2358 3238
ca6c03ca
JH
3239 b = BRANCH_EDGE (bb);
3240 f = FALLTHRU_EDGE (bb);
5fa396ad 3241 b->probability = profile_probability::from_reg_br_prob_note
357067f2 3242 (XINT (note, 0));
5fa396ad 3243 f->probability = b->probability.invert ();
ca6c03ca 3244 }
5f0d2358 3245
ca6c03ca
JH
3246 return purged;
3247 }
4b4bf941 3248 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
1722c2c8
RH
3249 {
3250 /* First, there should not be any EH or ABCALL edges resulting
3251 from non-local gotos and the like. If there were, we shouldn't
3252 have created the sibcall in the first place. Second, there
3253 should of course never have been a fallthru edge. */
c5cbcccf
ZD
3254 gcc_assert (single_succ_p (bb));
3255 gcc_assert (single_succ_edge (bb)->flags
3256 == (EDGE_SIBCALL | EDGE_ABNORMAL));
1722c2c8
RH
3257
3258 return 0;
3259 }
ca6c03ca 3260
ca6c03ca
JH
3261 /* If we don't see a jump insn, we don't know exactly why the block would
3262 have been broken at this point. Look for a simple, non-fallthru edge,
3263 as these are only created by conditional branches. If we find such an
3264 edge we know that there used to be a jump here and can then safely
3265 remove all non-fallthru edges. */
628f6a4e
BE
3266 found = false;
3267 FOR_EACH_EDGE (e, ei, bb->succs)
3268 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3269 {
3270 found = true;
3271 break;
3272 }
5f0d2358 3273
628f6a4e 3274 if (!found)
ca6c03ca 3275 return purged;
5f0d2358 3276
2afa8dce
DB
3277 /* Remove all but the fake and fallthru edges. The fake edge may be
3278 the only successor for this block in the case of noreturn
3279 calls. */
628f6a4e 3280 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 3281 {
2afa8dce 3282 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
c51d95ec 3283 {
6fb5fa3c 3284 df_set_bb_dirty (bb);
c51d95ec
JH
3285 remove_edge (e);
3286 purged = true;
3287 }
628f6a4e
BE
3288 else
3289 ei_next (&ei);
ca6c03ca 3290 }
5f0d2358 3291
c5cbcccf 3292 gcc_assert (single_succ_p (bb));
5f0d2358 3293
357067f2 3294 single_succ_edge (bb)->probability = profile_probability::always ();
ca6c03ca 3295
c263766c
RH
3296 if (dump_file)
3297 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
0b17ab2f 3298 bb->index);
ca6c03ca
JH
3299 return purged;
3300}
3301
5f0d2358
RK
3302/* Search all basic blocks for potentially dead edges and purge them. Return
3303 true if some edge has been eliminated. */
ca6c03ca
JH
3304
3305bool
25cd19de 3306purge_all_dead_edges (void)
ca6c03ca 3307{
e0082a72 3308 int purged = false;
e0082a72 3309 basic_block bb;
473fb060 3310
11cd3bed 3311 FOR_EACH_BB_FN (bb, cfun)
473fb060 3312 {
e0082a72 3313 bool purged_here = purge_dead_edges (bb);
5f0d2358 3314
473fb060 3315 purged |= purged_here;
473fb060 3316 }
5f0d2358 3317
ca6c03ca
JH
3318 return purged;
3319}
9ee634e3 3320
ba5e9aca
EB
3321/* This is used by a few passes that emit some instructions after abnormal
3322 calls, moving the basic block's end, while they in fact do want to emit
3323 them on the fallthru edge. Look for abnormal call edges, find backward
3324 the call in the block and insert the instructions on the edge instead.
3325
3326 Similarly, handle instructions throwing exceptions internally.
3327
3328 Return true when instructions have been found and inserted on edges. */
3329
3330bool
3331fixup_abnormal_edges (void)
3332{
3333 bool inserted = false;
3334 basic_block bb;
3335
11cd3bed 3336 FOR_EACH_BB_FN (bb, cfun)
ba5e9aca
EB
3337 {
3338 edge e;
3339 edge_iterator ei;
3340
3341 /* Look for cases we are interested in - calls or instructions causing
3342 exceptions. */
3343 FOR_EACH_EDGE (e, ei, bb->succs)
3344 if ((e->flags & EDGE_ABNORMAL_CALL)
3345 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3346 == (EDGE_ABNORMAL | EDGE_EH)))
3347 break;
3348
3349 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3350 {
8879d71c 3351 rtx_insn *insn;
ba5e9aca
EB
3352
3353 /* Get past the new insns generated. Allow notes, as the insns
3354 may be already deleted. */
3355 insn = BB_END (bb);
3356 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3357 && !can_throw_internal (insn)
3358 && insn != BB_HEAD (bb))
3359 insn = PREV_INSN (insn);
3360
3361 if (CALL_P (insn) || can_throw_internal (insn))
3362 {
8879d71c 3363 rtx_insn *stop, *next;
ba5e9aca
EB
3364
3365 e = find_fallthru_edge (bb->succs);
3366
3367 stop = NEXT_INSN (BB_END (bb));
1130d5e3 3368 BB_END (bb) = insn;
ba5e9aca
EB
3369
3370 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3371 {
3372 next = NEXT_INSN (insn);
3373 if (INSN_P (insn))
3374 {
3375 delete_insn (insn);
3376
3377 /* Sometimes there's still the return value USE.
3378 If it's placed after a trapping call (i.e. that
3379 call is the last insn anyway), we have no fallthru
3380 edge. Simply delete this use and don't try to insert
3df45f17
JJ
3381 on the non-existent edge.
3382 Similarly, sometimes a call that can throw is
3383 followed in the source with __builtin_unreachable (),
3384 meaning that there is UB if the call returns rather
3385 than throws. If there weren't any instructions
3386 following such calls before, supposedly even the ones
3387 we've deleted aren't significant and can be
3388 removed. */
3389 if (e)
ba5e9aca
EB
3390 {
3391 /* We're not deleting it, we're moving it. */
4654c0cf 3392 insn->set_undeleted ();
0f82e5c9
DM
3393 SET_PREV_INSN (insn) = NULL_RTX;
3394 SET_NEXT_INSN (insn) = NULL_RTX;
ba5e9aca
EB
3395
3396 insert_insn_on_edge (insn, e);
3397 inserted = true;
3398 }
3399 }
3400 else if (!BARRIER_P (insn))
3401 set_block_for_insn (insn, NULL);
3402 }
3403 }
3404
3405 /* It may be that we don't find any trapping insn. In this
3406 case we discovered quite late that the insn that had been
3407 marked as can_throw_internal in fact couldn't trap at all.
3408 So we should in fact delete the EH edges out of the block. */
3409 else
3410 purge_dead_edges (bb);
3411 }
3412 }
3413
3414 return inserted;
3415}
78bde837
SB
3416\f
3417/* Cut the insns from FIRST to LAST out of the insns stream. */
3418
a90c614a 3419rtx_insn *
9152e0aa 3420unlink_insn_chain (rtx_insn *first, rtx_insn *last)
78bde837 3421{
a90c614a
DM
3422 rtx_insn *prevfirst = PREV_INSN (first);
3423 rtx_insn *nextlast = NEXT_INSN (last);
78bde837 3424
0f82e5c9
DM
3425 SET_PREV_INSN (first) = NULL;
3426 SET_NEXT_INSN (last) = NULL;
78bde837 3427 if (prevfirst)
0f82e5c9 3428 SET_NEXT_INSN (prevfirst) = nextlast;
78bde837 3429 if (nextlast)
0f82e5c9 3430 SET_PREV_INSN (nextlast) = prevfirst;
78bde837
SB
3431 else
3432 set_last_insn (prevfirst);
3433 if (!prevfirst)
3434 set_first_insn (nextlast);
9152e0aa 3435 return first;
78bde837
SB
3436}
3437\f
3438/* Skip over inter-block insns occurring after BB which are typically
3439 associated with BB (e.g., barriers). If there are any such insns,
3440 we return the last one. Otherwise, we return the end of BB. */
3441
8879d71c 3442static rtx_insn *
78bde837
SB
3443skip_insns_after_block (basic_block bb)
3444{
8879d71c 3445 rtx_insn *insn, *last_insn, *next_head, *prev;
78bde837 3446
8879d71c 3447 next_head = NULL;
fefa31b5 3448 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3449 next_head = BB_HEAD (bb->next_bb);
3450
3451 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3452 {
3453 if (insn == next_head)
3454 break;
3455
3456 switch (GET_CODE (insn))
3457 {
3458 case BARRIER:
3459 last_insn = insn;
3460 continue;
3461
3462 case NOTE:
3463 switch (NOTE_KIND (insn))
3464 {
3465 case NOTE_INSN_BLOCK_END:
3466 gcc_unreachable ();
3467 continue;
3468 default:
3469 continue;
3470 break;
3471 }
3472 break;
3473
3474 case CODE_LABEL:
3475 if (NEXT_INSN (insn)
3476 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3477 {
3478 insn = NEXT_INSN (insn);
3479 last_insn = insn;
3480 continue;
3481 }
3482 break;
3483
3484 default:
3485 break;
3486 }
3487
3488 break;
3489 }
3490
3491 /* It is possible to hit contradictory sequence. For instance:
3492
3493 jump_insn
3494 NOTE_INSN_BLOCK_BEG
3495 barrier
3496
3497 Where barrier belongs to jump_insn, but the note does not. This can be
3498 created by removing the basic block originally following
3499 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3500
3501 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3502 {
3503 prev = PREV_INSN (insn);
3504 if (NOTE_P (insn))
3505 switch (NOTE_KIND (insn))
3506 {
3507 case NOTE_INSN_BLOCK_END:
3508 gcc_unreachable ();
3509 break;
3510 case NOTE_INSN_DELETED:
3511 case NOTE_INSN_DELETED_LABEL:
3512 case NOTE_INSN_DELETED_DEBUG_LABEL:
3513 continue;
3514 default:
3515 reorder_insns (insn, insn, last_insn);
3516 }
3517 }
3518
3519 return last_insn;
3520}
3521
3522/* Locate or create a label for a given basic block. */
3523
e67d1102 3524static rtx_insn *
78bde837
SB
3525label_for_bb (basic_block bb)
3526{
e67d1102 3527 rtx_insn *label = BB_HEAD (bb);
78bde837
SB
3528
3529 if (!LABEL_P (label))
3530 {
3531 if (dump_file)
3532 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3533
3534 label = block_label (bb);
3535 }
3536
3537 return label;
3538}
3539
3540/* Locate the effective beginning and end of the insn chain for each
3541 block, as defined by skip_insns_after_block above. */
3542
3543static void
3544record_effective_endpoints (void)
3545{
8879d71c 3546 rtx_insn *next_insn;
78bde837 3547 basic_block bb;
8879d71c 3548 rtx_insn *insn;
78bde837
SB
3549
3550 for (insn = get_insns ();
3551 insn
3552 && NOTE_P (insn)
3553 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3554 insn = NEXT_INSN (insn))
3555 continue;
3556 /* No basic blocks at all? */
3557 gcc_assert (insn);
3558
3559 if (PREV_INSN (insn))
3560 cfg_layout_function_header =
3561 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3562 else
9152e0aa 3563 cfg_layout_function_header = NULL;
78bde837
SB
3564
3565 next_insn = get_insns ();
11cd3bed 3566 FOR_EACH_BB_FN (bb, cfun)
78bde837 3567 {
8879d71c 3568 rtx_insn *end;
78bde837
SB
3569
3570 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
1130d5e3 3571 BB_HEADER (bb) = unlink_insn_chain (next_insn,
190bea87 3572 PREV_INSN (BB_HEAD (bb)));
78bde837
SB
3573 end = skip_insns_after_block (bb);
3574 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
d8ce2eae 3575 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
78bde837
SB
3576 next_insn = NEXT_INSN (BB_END (bb));
3577 }
3578
3579 cfg_layout_function_footer = next_insn;
3580 if (cfg_layout_function_footer)
3581 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3582}
3583\f
27a4cd48
DM
3584namespace {
3585
3586const pass_data pass_data_into_cfg_layout_mode =
3587{
3588 RTL_PASS, /* type */
3589 "into_cfglayout", /* name */
3590 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3591 TV_CFG, /* tv_id */
3592 0, /* properties_required */
3593 PROP_cfglayout, /* properties_provided */
3594 0, /* properties_destroyed */
3595 0, /* todo_flags_start */
3596 0, /* todo_flags_finish */
78bde837
SB
3597};
3598
27a4cd48
DM
3599class pass_into_cfg_layout_mode : public rtl_opt_pass
3600{
3601public:
c3284718
RS
3602 pass_into_cfg_layout_mode (gcc::context *ctxt)
3603 : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
27a4cd48
DM
3604 {}
3605
3606 /* opt_pass methods: */
be55bfe6
TS
3607 virtual unsigned int execute (function *)
3608 {
3609 cfg_layout_initialize (0);
3610 return 0;
3611 }
27a4cd48
DM
3612
3613}; // class pass_into_cfg_layout_mode
3614
3615} // anon namespace
3616
3617rtl_opt_pass *
3618make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3619{
3620 return new pass_into_cfg_layout_mode (ctxt);
3621}
3622
3623namespace {
3624
3625const pass_data pass_data_outof_cfg_layout_mode =
3626{
3627 RTL_PASS, /* type */
3628 "outof_cfglayout", /* name */
3629 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3630 TV_CFG, /* tv_id */
3631 0, /* properties_required */
3632 0, /* properties_provided */
3633 PROP_cfglayout, /* properties_destroyed */
3634 0, /* todo_flags_start */
3635 0, /* todo_flags_finish */
78bde837 3636};
27a4cd48
DM
3637
3638class pass_outof_cfg_layout_mode : public rtl_opt_pass
3639{
3640public:
c3284718
RS
3641 pass_outof_cfg_layout_mode (gcc::context *ctxt)
3642 : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
27a4cd48
DM
3643 {}
3644
3645 /* opt_pass methods: */
be55bfe6 3646 virtual unsigned int execute (function *);
27a4cd48
DM
3647
3648}; // class pass_outof_cfg_layout_mode
3649
be55bfe6
TS
3650unsigned int
3651pass_outof_cfg_layout_mode::execute (function *fun)
3652{
3653 basic_block bb;
3654
3655 FOR_EACH_BB_FN (bb, fun)
3656 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3657 bb->aux = bb->next_bb;
3658
3659 cfg_layout_finalize ();
3660
3661 return 0;
3662}
3663
27a4cd48
DM
3664} // anon namespace
3665
3666rtl_opt_pass *
3667make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3668{
3669 return new pass_outof_cfg_layout_mode (ctxt);
3670}
78bde837
SB
3671\f
3672
3673/* Link the basic blocks in the correct order, compacting the basic
3674 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3675 function also clears the basic block header and footer fields.
3676
3677 This function is usually called after a pass (e.g. tracer) finishes
3678 some transformations while in cfglayout mode. The required sequence
3679 of the basic blocks is in a linked list along the bb->aux field.
3680 This functions re-links the basic block prev_bb and next_bb pointers
532aafad
SB
3681 accordingly, and it compacts and renumbers the blocks.
3682
3683 FIXME: This currently works only for RTL, but the only RTL-specific
3684 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3685 to GIMPLE a long time ago, but it doesn't relink the basic block
3686 chain. It could do that (to give better initial RTL) if this function
3687 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
78bde837
SB
3688
3689void
3690relink_block_chain (bool stay_in_cfglayout_mode)
3691{
3692 basic_block bb, prev_bb;
3693 int index;
3694
3695 /* Maybe dump the re-ordered sequence. */
3696 if (dump_file)
3697 {
3698 fprintf (dump_file, "Reordered sequence:\n");
fefa31b5
DM
3699 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3700 NUM_FIXED_BLOCKS;
78bde837
SB
3701 bb;
3702 bb = (basic_block) bb->aux, index++)
3703 {
3704 fprintf (dump_file, " %i ", index);
3705 if (get_bb_original (bb))
f3b4eae6 3706 fprintf (dump_file, "duplicate of %i\n",
78bde837
SB
3707 get_bb_original (bb)->index);
3708 else if (forwarder_block_p (bb)
3709 && !LABEL_P (BB_HEAD (bb)))
f3b4eae6 3710 fprintf (dump_file, "compensation\n");
78bde837 3711 else
f3b4eae6 3712 fprintf (dump_file, "bb %i\n", bb->index);
78bde837
SB
3713 }
3714 }
3715
3716 /* Now reorder the blocks. */
fefa31b5
DM
3717 prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3718 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
78bde837
SB
3719 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3720 {
3721 bb->prev_bb = prev_bb;
3722 prev_bb->next_bb = bb;
3723 }
fefa31b5
DM
3724 prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3725 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
78bde837
SB
3726
3727 /* Then, clean up the aux fields. */
04a90bec 3728 FOR_ALL_BB_FN (bb, cfun)
78bde837
SB
3729 {
3730 bb->aux = NULL;
3731 if (!stay_in_cfglayout_mode)
1130d5e3 3732 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
78bde837
SB
3733 }
3734
3735 /* Maybe reset the original copy tables, they are not valid anymore
3736 when we renumber the basic blocks in compact_blocks. If we are
3737 are going out of cfglayout mode, don't re-allocate the tables. */
c2e84327
DM
3738 if (original_copy_tables_initialized_p ())
3739 free_original_copy_tables ();
78bde837
SB
3740 if (stay_in_cfglayout_mode)
3741 initialize_original_copy_tables ();
3742
3743 /* Finally, put basic_block_info in the new order. */
3744 compact_blocks ();
3745}
3746\f
3747
3748/* Given a reorder chain, rearrange the code to match. */
3749
3750static void
3751fixup_reorder_chain (void)
3752{
3753 basic_block bb;
9152e0aa 3754 rtx_insn *insn = NULL;
78bde837
SB
3755
3756 if (cfg_layout_function_header)
3757 {
3758 set_first_insn (cfg_layout_function_header);
3759 insn = cfg_layout_function_header;
3760 while (NEXT_INSN (insn))
3761 insn = NEXT_INSN (insn);
3762 }
3763
3764 /* First do the bulk reordering -- rechain the blocks without regard to
3765 the needed changes to jumps and labels. */
3766
fefa31b5
DM
3767 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3768 bb->aux)
78bde837
SB
3769 {
3770 if (BB_HEADER (bb))
3771 {
3772 if (insn)
0f82e5c9 3773 SET_NEXT_INSN (insn) = BB_HEADER (bb);
78bde837
SB
3774 else
3775 set_first_insn (BB_HEADER (bb));
0f82e5c9 3776 SET_PREV_INSN (BB_HEADER (bb)) = insn;
78bde837
SB
3777 insn = BB_HEADER (bb);
3778 while (NEXT_INSN (insn))
3779 insn = NEXT_INSN (insn);
3780 }
3781 if (insn)
0f82e5c9 3782 SET_NEXT_INSN (insn) = BB_HEAD (bb);
78bde837
SB
3783 else
3784 set_first_insn (BB_HEAD (bb));
0f82e5c9 3785 SET_PREV_INSN (BB_HEAD (bb)) = insn;
78bde837
SB
3786 insn = BB_END (bb);
3787 if (BB_FOOTER (bb))
3788 {
0f82e5c9
DM
3789 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3790 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
78bde837
SB
3791 while (NEXT_INSN (insn))
3792 insn = NEXT_INSN (insn);
3793 }
3794 }
3795
0f82e5c9 3796 SET_NEXT_INSN (insn) = cfg_layout_function_footer;
78bde837 3797 if (cfg_layout_function_footer)
0f82e5c9 3798 SET_PREV_INSN (cfg_layout_function_footer) = insn;
78bde837
SB
3799
3800 while (NEXT_INSN (insn))
3801 insn = NEXT_INSN (insn);
3802
3803 set_last_insn (insn);
b2b29377
MM
3804 if (flag_checking)
3805 verify_insn_chain ();
78bde837
SB
3806
3807 /* Now add jumps and labels as needed to match the blocks new
29f3fd5b 3808 outgoing edges. */
78bde837 3809
29f3fd5b
SB
3810 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3811 bb->aux)
78bde837
SB
3812 {
3813 edge e_fall, e_taken, e;
8879d71c 3814 rtx_insn *bb_end_insn;
78bde837 3815 rtx ret_label = NULL_RTX;
3371a64f 3816 basic_block nb;
78bde837
SB
3817 edge_iterator ei;
3818
3819 if (EDGE_COUNT (bb->succs) == 0)
3820 continue;
3821
3822 /* Find the old fallthru edge, and another non-EH edge for
3823 a taken jump. */
3824 e_taken = e_fall = NULL;
3825
3826 FOR_EACH_EDGE (e, ei, bb->succs)
3827 if (e->flags & EDGE_FALLTHRU)
3828 e_fall = e;
3829 else if (! (e->flags & EDGE_EH))
3830 e_taken = e;
3831
3832 bb_end_insn = BB_END (bb);
1476d1bd 3833 if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
78bde837 3834 {
1476d1bd
MM
3835 ret_label = JUMP_LABEL (bb_end_jump);
3836 if (any_condjump_p (bb_end_jump))
78bde837
SB
3837 {
3838 /* This might happen if the conditional jump has side
3839 effects and could therefore not be optimized away.
3840 Make the basic block to end with a barrier in order
3841 to prevent rtl_verify_flow_info from complaining. */
3842 if (!e_fall)
3843 {
1476d1bd
MM
3844 gcc_assert (!onlyjump_p (bb_end_jump)
3845 || returnjump_p (bb_end_jump)
91af97c3 3846 || (e_taken->flags & EDGE_CROSSING));
1476d1bd 3847 emit_barrier_after (bb_end_jump);
78bde837
SB
3848 continue;
3849 }
3850
3851 /* If the old fallthru is still next, nothing to do. */
3852 if (bb->aux == e_fall->dest
fefa31b5 3853 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3854 continue;
3855
3856 /* The degenerated case of conditional jump jumping to the next
3857 instruction can happen for jumps with side effects. We need
3858 to construct a forwarder block and this will be done just
3859 fine by force_nonfallthru below. */
3860 if (!e_taken)
3861 ;
3862
3863 /* There is another special case: if *neither* block is next,
3864 such as happens at the very end of a function, then we'll
3865 need to add a new unconditional jump. Choose the taken
3866 edge based on known or assumed probability. */
3867 else if (bb->aux != e_taken->dest)
3868 {
1476d1bd 3869 rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
78bde837
SB
3870
3871 if (note
5fa396ad
JH
3872 && profile_probability::from_reg_br_prob_note
3873 (XINT (note, 0)) < profile_probability::even ()
1476d1bd 3874 && invert_jump (bb_end_jump,
fefa31b5
DM
3875 (e_fall->dest
3876 == EXIT_BLOCK_PTR_FOR_FN (cfun)
78bde837
SB
3877 ? NULL_RTX
3878 : label_for_bb (e_fall->dest)), 0))
3879 {
3880 e_fall->flags &= ~EDGE_FALLTHRU;
3881 gcc_checking_assert (could_fall_through
3882 (e_taken->src, e_taken->dest));
3883 e_taken->flags |= EDGE_FALLTHRU;
3884 update_br_prob_note (bb);
3885 e = e_fall, e_fall = e_taken, e_taken = e;
3886 }
3887 }
3888
3889 /* If the "jumping" edge is a crossing edge, and the fall
3890 through edge is non-crossing, leave things as they are. */
3891 else if ((e_taken->flags & EDGE_CROSSING)
3892 && !(e_fall->flags & EDGE_CROSSING))
3893 continue;
3894
3895 /* Otherwise we can try to invert the jump. This will
3896 basically never fail, however, keep up the pretense. */
1476d1bd 3897 else if (invert_jump (bb_end_jump,
fefa31b5
DM
3898 (e_fall->dest
3899 == EXIT_BLOCK_PTR_FOR_FN (cfun)
78bde837
SB
3900 ? NULL_RTX
3901 : label_for_bb (e_fall->dest)), 0))
3902 {
3903 e_fall->flags &= ~EDGE_FALLTHRU;
3904 gcc_checking_assert (could_fall_through
3905 (e_taken->src, e_taken->dest));
3906 e_taken->flags |= EDGE_FALLTHRU;
3907 update_br_prob_note (bb);
3908 if (LABEL_NUSES (ret_label) == 0
3909 && single_pred_p (e_taken->dest))
a29f6a2b 3910 delete_insn (as_a<rtx_insn *> (ret_label));
78bde837
SB
3911 continue;
3912 }
3913 }
3914 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3915 {
3916 /* If the old fallthru is still next or if
3917 asm goto doesn't have a fallthru (e.g. when followed by
3918 __builtin_unreachable ()), nothing to do. */
3919 if (! e_fall
3920 || bb->aux == e_fall->dest
fefa31b5 3921 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3922 continue;
3923
3924 /* Otherwise we'll have to use the fallthru fixup below. */
3925 }
3926 else
3927 {
3928 /* Otherwise we have some return, switch or computed
3929 jump. In the 99% case, there should not have been a
3930 fallthru edge. */
3931 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3932 continue;
3933 }
3934 }
3935 else
3936 {
3937 /* No fallthru implies a noreturn function with EH edges, or
3938 something similarly bizarre. In any case, we don't need to
3939 do anything. */
3940 if (! e_fall)
3941 continue;
3942
3943 /* If the fallthru block is still next, nothing to do. */
3944 if (bb->aux == e_fall->dest)
3945 continue;
3946
3947 /* A fallthru to exit block. */
fefa31b5 3948 if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3949 continue;
3950 }
3951
3952 /* We got here if we need to add a new jump insn.
3953 Note force_nonfallthru can delete E_FALL and thus we have to
3954 save E_FALL->src prior to the call to force_nonfallthru. */
78bde837
SB
3955 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3956 if (nb)
3957 {
3958 nb->aux = bb->aux;
3959 bb->aux = nb;
3960 /* Don't process this new block. */
3961 bb = nb;
78bde837
SB
3962 }
3963 }
3964
3965 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3966
29f3fd5b 3967 /* Annoying special case - jump around dead jumptables left in the code. */
11cd3bed 3968 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
3969 {
3970 edge e = find_fallthru_edge (bb->succs);
3971
29f3fd5b
SB
3972 if (e && !can_fallthru (e->src, e->dest))
3973 force_nonfallthru (e);
78bde837
SB
3974 }
3975
7792f13c
EB
3976 /* Ensure goto_locus from edges has some instructions with that locus in RTL
3977 when not optimizing. */
3978 if (!optimize && !DECL_IGNORED_P (current_function_decl))
11cd3bed 3979 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
3980 {
3981 edge e;
3982 edge_iterator ei;
3983
3984 FOR_EACH_EDGE (e, ei, bb->succs)
2f13f2de 3985 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
5368224f 3986 && !(e->flags & EDGE_ABNORMAL))
78bde837
SB
3987 {
3988 edge e2;
3989 edge_iterator ei2;
3990 basic_block dest, nb;
8879d71c 3991 rtx_insn *end;
78bde837
SB
3992
3993 insn = BB_END (e->src);
3994 end = PREV_INSN (BB_HEAD (e->src));
3995 while (insn != end
5368224f 3996 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
78bde837
SB
3997 insn = PREV_INSN (insn);
3998 if (insn != end
5368224f 3999 && INSN_LOCATION (insn) == e->goto_locus)
78bde837
SB
4000 continue;
4001 if (simplejump_p (BB_END (e->src))
5368224f 4002 && !INSN_HAS_LOCATION (BB_END (e->src)))
78bde837 4003 {
5368224f 4004 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
78bde837
SB
4005 continue;
4006 }
4007 dest = e->dest;
fefa31b5 4008 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
4009 {
4010 /* Non-fallthru edges to the exit block cannot be split. */
4011 if (!(e->flags & EDGE_FALLTHRU))
4012 continue;
4013 }
4014 else
4015 {
4016 insn = BB_HEAD (dest);
4017 end = NEXT_INSN (BB_END (dest));
4018 while (insn != end && !NONDEBUG_INSN_P (insn))
4019 insn = NEXT_INSN (insn);
5368224f
DC
4020 if (insn != end && INSN_HAS_LOCATION (insn)
4021 && INSN_LOCATION (insn) == e->goto_locus)
78bde837
SB
4022 continue;
4023 }
4024 nb = split_edge (e);
4025 if (!INSN_P (BB_END (nb)))
1130d5e3 4026 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
190bea87 4027 nb);
5368224f 4028 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
78bde837
SB
4029
4030 /* If there are other incoming edges to the destination block
4031 with the same goto locus, redirect them to the new block as
4032 well, this can prevent other such blocks from being created
4033 in subsequent iterations of the loop. */
4034 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
2f13f2de 4035 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
78bde837 4036 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
5368224f 4037 && e->goto_locus == e2->goto_locus)
78bde837
SB
4038 redirect_edge_and_branch (e2, nb);
4039 else
4040 ei_next (&ei2);
4041 }
4042 }
4043}
4044\f
4045/* Perform sanity checks on the insn chain.
4046 1. Check that next/prev pointers are consistent in both the forward and
4047 reverse direction.
4048 2. Count insns in chain, going both directions, and check if equal.
4049 3. Check that get_last_insn () returns the actual end of chain. */
4050
4051DEBUG_FUNCTION void
4052verify_insn_chain (void)
4053{
8879d71c 4054 rtx_insn *x, *prevx, *nextx;
78bde837
SB
4055 int insn_cnt1, insn_cnt2;
4056
4057 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
4058 x != 0;
4059 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
4060 gcc_assert (PREV_INSN (x) == prevx);
4061
4062 gcc_assert (prevx == get_last_insn ());
4063
4064 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
4065 x != 0;
4066 nextx = x, insn_cnt2++, x = PREV_INSN (x))
4067 gcc_assert (NEXT_INSN (x) == nextx);
4068
4069 gcc_assert (insn_cnt1 == insn_cnt2);
4070}
4071\f
4072/* If we have assembler epilogues, the block falling through to exit must
4073 be the last one in the reordered chain when we reach final. Ensure
4074 that this condition is met. */
4075static void
4076fixup_fallthru_exit_predecessor (void)
4077{
4078 edge e;
4079 basic_block bb = NULL;
4080
4081 /* This transformation is not valid before reload, because we might
4082 separate a call from the instruction that copies the return
4083 value. */
4084 gcc_assert (reload_completed);
4085
fefa31b5 4086 e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
78bde837
SB
4087 if (e)
4088 bb = e->src;
4089
4090 if (bb && bb->aux)
4091 {
fefa31b5 4092 basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
78bde837
SB
4093
4094 /* If the very first block is the one with the fall-through exit
4095 edge, we have to split that block. */
4096 if (c == bb)
4097 {
c4d281b2 4098 bb = split_block_after_labels (bb)->dest;
78bde837
SB
4099 bb->aux = c->aux;
4100 c->aux = bb;
d8ce2eae
DM
4101 BB_FOOTER (bb) = BB_FOOTER (c);
4102 BB_FOOTER (c) = NULL;
78bde837
SB
4103 }
4104
4105 while (c->aux != bb)
4106 c = (basic_block) c->aux;
4107
4108 c->aux = bb->aux;
4109 while (c->aux)
4110 c = (basic_block) c->aux;
4111
4112 c->aux = bb;
4113 bb->aux = NULL;
4114 }
4115}
4116
4117/* In case there are more than one fallthru predecessors of exit, force that
4118 there is only one. */
4119
4120static void
4121force_one_exit_fallthru (void)
4122{
4123 edge e, predecessor = NULL;
4124 bool more = false;
4125 edge_iterator ei;
4126 basic_block forwarder, bb;
4127
fefa31b5 4128 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
78bde837
SB
4129 if (e->flags & EDGE_FALLTHRU)
4130 {
4131 if (predecessor == NULL)
4132 predecessor = e;
4133 else
4134 {
4135 more = true;
4136 break;
4137 }
4138 }
4139
4140 if (!more)
4141 return;
4142
4143 /* Exit has several fallthru predecessors. Create a forwarder block for
4144 them. */
4145 forwarder = split_edge (predecessor);
fefa31b5
DM
4146 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4147 (e = ei_safe_edge (ei)); )
78bde837
SB
4148 {
4149 if (e->src == forwarder
4150 || !(e->flags & EDGE_FALLTHRU))
4151 ei_next (&ei);
4152 else
4153 redirect_edge_and_branch_force (e, forwarder);
4154 }
4155
4156 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4157 exit block. */
11cd3bed 4158 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
4159 {
4160 if (bb->aux == NULL && bb != forwarder)
4161 {
4162 bb->aux = forwarder;
4163 break;
4164 }
4165 }
4166}
4167\f
4168/* Return true in case it is possible to duplicate the basic block BB. */
4169
4170static bool
4171cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4172{
4173 /* Do not attempt to duplicate tablejumps, as we need to unshare
4174 the dispatch table. This is difficult to do, as the instructions
4175 computing jump destination may be hoisted outside the basic block. */
4176 if (tablejump_p (BB_END (bb), NULL, NULL))
4177 return false;
4178
4179 /* Do not duplicate blocks containing insns that can't be copied. */
4180 if (targetm.cannot_copy_insn_p)
4181 {
8879d71c 4182 rtx_insn *insn = BB_HEAD (bb);
78bde837
SB
4183 while (1)
4184 {
4185 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4186 return false;
4187 if (insn == BB_END (bb))
4188 break;
4189 insn = NEXT_INSN (insn);
4190 }
4191 }
4192
4193 return true;
4194}
4195
a90c614a 4196rtx_insn *
5d34b050 4197duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
78bde837 4198{
5d34b050 4199 rtx_insn *insn, *next, *copy;
66e8df53 4200 rtx_note *last;
78bde837
SB
4201
4202 /* Avoid updating of boundaries of previous basic block. The
4203 note will get removed from insn stream in fixup. */
4204 last = emit_note (NOTE_INSN_DELETED);
4205
4206 /* Create copy at the end of INSN chain. The chain will
4207 be reordered later. */
4208 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4209 {
4210 switch (GET_CODE (insn))
4211 {
4212 case DEBUG_INSN:
4213 /* Don't duplicate label debug insns. */
65f4b875
AO
4214 if (DEBUG_BIND_INSN_P (insn)
4215 && TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
78bde837
SB
4216 break;
4217 /* FALLTHRU */
4218 case INSN:
4219 case CALL_INSN:
4220 case JUMP_INSN:
78bde837
SB
4221 copy = emit_copy_of_insn_after (insn, get_last_insn ());
4222 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4223 && ANY_RETURN_P (JUMP_LABEL (insn)))
4224 JUMP_LABEL (copy) = JUMP_LABEL (insn);
4225 maybe_copy_prologue_epilogue_insn (insn, copy);
4226 break;
4227
39718607
SB
4228 case JUMP_TABLE_DATA:
4229 /* Avoid copying of dispatch tables. We never duplicate
4230 tablejumps, so this can hit only in case the table got
4231 moved far from original jump.
4232 Avoid copying following barrier as well if any
4233 (and debug insns in between). */
4234 for (next = NEXT_INSN (insn);
4235 next != NEXT_INSN (to);
4236 next = NEXT_INSN (next))
4237 if (!DEBUG_INSN_P (next))
4238 break;
4239 if (next != NEXT_INSN (to) && BARRIER_P (next))
4240 insn = next;
4241 break;
4242
78bde837
SB
4243 case CODE_LABEL:
4244 break;
4245
4246 case BARRIER:
4247 emit_barrier ();
4248 break;
4249
4250 case NOTE:
4251 switch (NOTE_KIND (insn))
4252 {
4253 /* In case prologue is empty and function contain label
4254 in first BB, we may want to copy the block. */
4255 case NOTE_INSN_PROLOGUE_END:
4256
4257 case NOTE_INSN_DELETED:
4258 case NOTE_INSN_DELETED_LABEL:
4259 case NOTE_INSN_DELETED_DEBUG_LABEL:
4260 /* No problem to strip these. */
4261 case NOTE_INSN_FUNCTION_BEG:
4262 /* There is always just single entry to function. */
4263 case NOTE_INSN_BASIC_BLOCK:
3371a64f
TJ
4264 /* We should only switch text sections once. */
4265 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
78bde837
SB
4266 break;
4267
4268 case NOTE_INSN_EPILOGUE_BEG:
d33606c3 4269 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
66e8df53 4270 emit_note_copy (as_a <rtx_note *> (insn));
78bde837
SB
4271 break;
4272
4273 default:
4274 /* All other notes should have already been eliminated. */
4275 gcc_unreachable ();
4276 }
4277 break;
4278 default:
4279 gcc_unreachable ();
4280 }
4281 }
4282 insn = NEXT_INSN (last);
4283 delete_insn (last);
5d34b050 4284 return insn;
78bde837
SB
4285}
4286
4287/* Create a duplicate of the basic block BB. */
4288
4289static basic_block
229d576c 4290cfg_layout_duplicate_bb (basic_block bb, copy_bb_data *)
78bde837 4291{
8879d71c 4292 rtx_insn *insn;
78bde837
SB
4293 basic_block new_bb;
4294
4295 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4296 new_bb = create_basic_block (insn,
4297 insn ? get_last_insn () : NULL,
fefa31b5 4298 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
78bde837
SB
4299
4300 BB_COPY_PARTITION (new_bb, bb);
4301 if (BB_HEADER (bb))
4302 {
4303 insn = BB_HEADER (bb);
4304 while (NEXT_INSN (insn))
4305 insn = NEXT_INSN (insn);
4306 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4307 if (insn)
1130d5e3 4308 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
78bde837
SB
4309 }
4310
4311 if (BB_FOOTER (bb))
4312 {
4313 insn = BB_FOOTER (bb);
4314 while (NEXT_INSN (insn))
4315 insn = NEXT_INSN (insn);
4316 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4317 if (insn)
d8ce2eae 4318 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
78bde837
SB
4319 }
4320
4321 return new_bb;
4322}
4323
4324\f
4325/* Main entry point to this module - initialize the datastructures for
4326 CFG layout changes. It keeps LOOPS up-to-date if not null.
4327
4328 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4329
4330void
1a817418 4331cfg_layout_initialize (int flags)
78bde837 4332{
b5241a5a 4333 rtx_insn_list *x;
78bde837
SB
4334 basic_block bb;
4335
8a9e6b45
BS
4336 /* Once bb partitioning is complete, cfg layout mode should not be
4337 re-entered. Entering cfg layout mode may require fixups. As an
4338 example, if edge forwarding performed when optimizing the cfg
4339 layout required moving a block from the hot to the cold
4340 section. This would create an illegal partitioning unless some
4341 manual fixup was performed. */
0b6bc904 4342 gcc_assert (!crtl->bb_reorder_complete || !crtl->has_bb_partition);
4ca19309 4343
78bde837
SB
4344 initialize_original_copy_tables ();
4345
4346 cfg_layout_rtl_register_cfg_hooks ();
4347
4348 record_effective_endpoints ();
4349
4350 /* Make sure that the targets of non local gotos are marked. */
2382940b 4351 for (x = nonlocal_goto_handler_labels; x; x = x->next ())
78bde837 4352 {
b5241a5a 4353 bb = BLOCK_FOR_INSN (x->insn ());
78bde837
SB
4354 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4355 }
4356
4357 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4358}
4359
4360/* Splits superblocks. */
4361void
4362break_superblocks (void)
4363{
78bde837
SB
4364 bool need = false;
4365 basic_block bb;
4366
7ba9e72d 4367 auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
f61e445a 4368 bitmap_clear (superblocks);
78bde837 4369
11cd3bed 4370 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
4371 if (bb->flags & BB_SUPERBLOCK)
4372 {
4373 bb->flags &= ~BB_SUPERBLOCK;
d7c028c0 4374 bitmap_set_bit (superblocks, bb->index);
78bde837
SB
4375 need = true;
4376 }
4377
4378 if (need)
4379 {
4380 rebuild_jump_labels (get_insns ());
4381 find_many_sub_basic_blocks (superblocks);
4382 }
78bde837
SB
4383}
4384
4385/* Finalize the changes: reorder insn list according to the sequence specified
29f3fd5b 4386 by aux pointers, enter compensation code, rebuild scope forest. */
78bde837
SB
4387
4388void
4389cfg_layout_finalize (void)
4390{
63da5ff6 4391 free_dominance_info (CDI_DOMINATORS);
78bde837
SB
4392 force_one_exit_fallthru ();
4393 rtl_register_cfg_hooks ();
e86a9946 4394 if (reload_completed && !targetm.have_epilogue ())
78bde837
SB
4395 fixup_fallthru_exit_predecessor ();
4396 fixup_reorder_chain ();
4397
29f3fd5b
SB
4398 rebuild_jump_labels (get_insns ());
4399 delete_dead_jumptables ();
4400
b2b29377
MM
4401 if (flag_checking)
4402 verify_insn_chain ();
4403 checking_verify_flow_info ();
78bde837
SB
4404}
4405
ba5e9aca 4406
9ee634e3 4407/* Same as split_block but update cfg_layout structures. */
f470c378
ZD
4408
4409static basic_block
d329e058 4410cfg_layout_split_block (basic_block bb, void *insnp)
9ee634e3 4411{
ae50c0cb 4412 rtx insn = (rtx) insnp;
f470c378 4413 basic_block new_bb = rtl_split_block (bb, insn);
9ee634e3 4414
d8ce2eae
DM
4415 BB_FOOTER (new_bb) = BB_FOOTER (bb);
4416 BB_FOOTER (bb) = NULL;
9ee634e3 4417
f470c378 4418 return new_bb;
9ee634e3
JH
4419}
4420
9ee634e3 4421/* Redirect Edge to DEST. */
6de9cd9a 4422static edge
d329e058 4423cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
9ee634e3
JH
4424{
4425 basic_block src = e->src;
6de9cd9a 4426 edge ret;
9ee634e3 4427
bc35512f 4428 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 4429 return NULL;
bc35512f 4430
3348b696 4431 if (e->dest == dest)
6de9cd9a 4432 return e;
bc35512f 4433
687d5dfe
JH
4434 if (e->flags & EDGE_CROSSING
4435 && BB_PARTITION (e->src) == BB_PARTITION (dest)
4436 && simplejump_p (BB_END (src)))
4437 {
4438 if (dump_file)
4439 fprintf (dump_file,
4440 "Removing crossing jump while redirecting edge form %i to %i\n",
4441 e->src->index, dest->index);
4442 delete_insn (BB_END (src));
91c50487 4443 remove_barriers_from_footer (src);
687d5dfe
JH
4444 e->flags |= EDGE_FALLTHRU;
4445 }
4446
fefa31b5 4447 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
6de9cd9a 4448 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
f345f21a 4449 {
6fb5fa3c 4450 df_set_bb_dirty (src);
6de9cd9a 4451 return ret;
f345f21a 4452 }
bc35512f 4453
fefa31b5 4454 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
bc35512f
JH
4455 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4456 {
c263766c
RH
4457 if (dump_file)
4458 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
bc35512f
JH
4459 e->src->index, dest->index);
4460
6fb5fa3c 4461 df_set_bb_dirty (e->src);
bc35512f 4462 redirect_edge_succ (e, dest);
6de9cd9a 4463 return e;
bc35512f
JH
4464 }
4465
9ee634e3
JH
4466 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4467 in the case the basic block appears to be in sequence. Avoid this
4468 transformation. */
4469
9ee634e3
JH
4470 if (e->flags & EDGE_FALLTHRU)
4471 {
4472 /* Redirect any branch edges unified with the fallthru one. */
4b4bf941 4473 if (JUMP_P (BB_END (src))
432f982f
JH
4474 && label_is_jump_target_p (BB_HEAD (e->dest),
4475 BB_END (src)))
9ee634e3 4476 {
341c100f 4477 edge redirected;
c22cacf3 4478
c263766c
RH
4479 if (dump_file)
4480 fprintf (dump_file, "Fallthru edge unified with branch "
432f982f
JH
4481 "%i->%i redirected to %i\n",
4482 e->src->index, e->dest->index, dest->index);
4483 e->flags &= ~EDGE_FALLTHRU;
341c100f
NS
4484 redirected = redirect_branch_edge (e, dest);
4485 gcc_assert (redirected);
0c617be4
JL
4486 redirected->flags |= EDGE_FALLTHRU;
4487 df_set_bb_dirty (redirected->src);
4488 return redirected;
9ee634e3
JH
4489 }
4490 /* In case we are redirecting fallthru edge to the branch edge
c22cacf3 4491 of conditional jump, remove it. */
628f6a4e 4492 if (EDGE_COUNT (src->succs) == 2)
9ee634e3 4493 {
03101c6f
KH
4494 /* Find the edge that is different from E. */
4495 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
628f6a4e 4496
9ee634e3 4497 if (s->dest == dest
a813c111
SB
4498 && any_condjump_p (BB_END (src))
4499 && onlyjump_p (BB_END (src)))
4500 delete_insn (BB_END (src));
9ee634e3 4501 }
c263766c 4502 if (dump_file)
dc764d10 4503 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
bc35512f 4504 e->src->index, e->dest->index, dest->index);
0c617be4 4505 ret = redirect_edge_succ_nodup (e, dest);
9ee634e3
JH
4506 }
4507 else
bc35512f 4508 ret = redirect_branch_edge (e, dest);
9ee634e3 4509
a09ad347
JJ
4510 if (!ret)
4511 return NULL;
4512
687d5dfe 4513 fixup_partition_crossing (ret);
9ee634e3 4514 /* We don't want simplejumps in the insn stream during cfglayout. */
687d5dfe 4515 gcc_assert (!simplejump_p (BB_END (src)) || CROSSING_JUMP_P (BB_END (src)));
9ee634e3 4516
6fb5fa3c 4517 df_set_bb_dirty (src);
9ee634e3
JH
4518 return ret;
4519}
4520
4521/* Simple wrapper as we always can redirect fallthru edges. */
4522static basic_block
d329e058 4523cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
9ee634e3 4524{
341c100f
NS
4525 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4526
4527 gcc_assert (redirected);
9ee634e3
JH
4528 return NULL;
4529}
4530
f470c378
ZD
4531/* Same as delete_basic_block but update cfg_layout structures. */
4532
9ee634e3 4533static void
d329e058 4534cfg_layout_delete_block (basic_block bb)
9ee634e3 4535{
8879d71c 4536 rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
1130d5e3 4537 rtx_insn **to;
9ee634e3 4538
bcc708fc 4539 if (BB_HEADER (bb))
9ee634e3 4540 {
a813c111 4541 next = BB_HEAD (bb);
9ee634e3 4542 if (prev)
0f82e5c9 4543 SET_NEXT_INSN (prev) = BB_HEADER (bb);
9ee634e3 4544 else
bcc708fc 4545 set_first_insn (BB_HEADER (bb));
0f82e5c9 4546 SET_PREV_INSN (BB_HEADER (bb)) = prev;
bcc708fc 4547 insn = BB_HEADER (bb);
9ee634e3
JH
4548 while (NEXT_INSN (insn))
4549 insn = NEXT_INSN (insn);
0f82e5c9
DM
4550 SET_NEXT_INSN (insn) = next;
4551 SET_PREV_INSN (next) = insn;
9ee634e3 4552 }
a813c111 4553 next = NEXT_INSN (BB_END (bb));
bcc708fc 4554 if (BB_FOOTER (bb))
9ee634e3 4555 {
bcc708fc 4556 insn = BB_FOOTER (bb);
bc35512f
JH
4557 while (insn)
4558 {
4b4bf941 4559 if (BARRIER_P (insn))
bc35512f
JH
4560 {
4561 if (PREV_INSN (insn))
0f82e5c9 4562 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
bc35512f 4563 else
d8ce2eae 4564 BB_FOOTER (bb) = NEXT_INSN (insn);
bc35512f 4565 if (NEXT_INSN (insn))
0f82e5c9 4566 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
bc35512f 4567 }
4b4bf941 4568 if (LABEL_P (insn))
bc35512f
JH
4569 break;
4570 insn = NEXT_INSN (insn);
4571 }
bcc708fc 4572 if (BB_FOOTER (bb))
bc35512f 4573 {
a813c111 4574 insn = BB_END (bb);
0f82e5c9
DM
4575 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4576 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
bc35512f
JH
4577 while (NEXT_INSN (insn))
4578 insn = NEXT_INSN (insn);
0f82e5c9 4579 SET_NEXT_INSN (insn) = next;
bc35512f 4580 if (next)
0f82e5c9 4581 SET_PREV_INSN (next) = insn;
bc35512f
JH
4582 else
4583 set_last_insn (insn);
4584 }
9ee634e3 4585 }
fefa31b5 4586 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1130d5e3 4587 to = &BB_HEADER (bb->next_bb);
9ee634e3
JH
4588 else
4589 to = &cfg_layout_function_footer;
997de8ed 4590
9ee634e3
JH
4591 rtl_delete_block (bb);
4592
4593 if (prev)
4594 prev = NEXT_INSN (prev);
d329e058 4595 else
9ee634e3
JH
4596 prev = get_insns ();
4597 if (next)
4598 next = PREV_INSN (next);
d329e058 4599 else
9ee634e3
JH
4600 next = get_last_insn ();
4601
4602 if (next && NEXT_INSN (next) != prev)
4603 {
4604 remaints = unlink_insn_chain (prev, next);
4605 insn = remaints;
4606 while (NEXT_INSN (insn))
4607 insn = NEXT_INSN (insn);
0f82e5c9 4608 SET_NEXT_INSN (insn) = *to;
9ee634e3 4609 if (*to)
0f82e5c9 4610 SET_PREV_INSN (*to) = insn;
9ee634e3
JH
4611 *to = remaints;
4612 }
4613}
4614
beb235f8 4615/* Return true when blocks A and B can be safely merged. */
b48d0358 4616
bc35512f 4617static bool
b48d0358 4618cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
bc35512f 4619{
750054a2
CT
4620 /* If we are partitioning hot/cold basic blocks, we don't want to
4621 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
4622 and cold sections.
4623
8e8d5162 4624 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
4625 be optimizable (or blocks that appear to be mergeable), but which really
4626 must be left untouched (they are required to make it safely across
4627 partition boundaries). See the comments at the top of
8e8d5162
CT
4628 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4629
87c8b4be 4630 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 4631 return false;
750054a2 4632
7d776ee2
RG
4633 /* Protect the loop latches. */
4634 if (current_loops && b->loop_father->latch == b)
4635 return false;
4636
894a84b5
BS
4637 /* If we would end up moving B's instructions, make sure it doesn't fall
4638 through into the exit block, since we cannot recover from a fallthrough
4639 edge into the exit block occurring in the middle of a function. */
4640 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4641 {
4642 edge e = find_fallthru_edge (b->succs);
fefa31b5 4643 if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
894a84b5
BS
4644 return false;
4645 }
4646
bc35512f 4647 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
4648 return (single_succ_p (a)
4649 && single_succ (a) == b
4650 && single_pred_p (b) == 1
628f6a4e 4651 && a != b
bc35512f 4652 /* Must be simple edge. */
c5cbcccf 4653 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
fefa31b5
DM
4654 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4655 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
49ea3702
SB
4656 /* If the jump insn has side effects, we can't kill the edge.
4657 When not optimizing, try_redirect_by_replacing_jump will
4658 not allow us to redirect an edge by replacing a table jump. */
4b4bf941 4659 && (!JUMP_P (BB_END (a))
49ea3702 4660 || ((!optimize || reload_completed)
a813c111 4661 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f
JH
4662}
4663
41806d92
NS
4664/* Merge block A and B. The blocks must be mergeable. */
4665
bc35512f
JH
4666static void
4667cfg_layout_merge_blocks (basic_block a, basic_block b)
4668{
7792f13c
EB
4669 /* If B is a forwarder block whose outgoing edge has no location, we'll
4670 propagate the locus of the edge between A and B onto it. */
4671 const bool forward_edge_locus
4672 = (b->flags & BB_FORWARDER_BLOCK) != 0
4673 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
8879d71c 4674 rtx_insn *insn;
50a36e42 4675
77a74ed7 4676 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
bc35512f 4677
6fb5fa3c 4678 if (dump_file)
50a36e42
EB
4679 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4680 a->index);
6fb5fa3c 4681
bc35512f 4682 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 4683 if (LABEL_P (BB_HEAD (b)))
2c97f8e4 4684 {
2c97f8e4
RH
4685 delete_insn (BB_HEAD (b));
4686 }
bc35512f
JH
4687
4688 /* We should have fallthru edge in a, or we can do dummy redirection to get
4689 it cleaned up. */
4b4bf941 4690 if (JUMP_P (BB_END (a)))
628f6a4e 4691 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
341c100f 4692 gcc_assert (!JUMP_P (BB_END (a)));
bc35512f 4693
7792f13c
EB
4694 /* If not optimizing, preserve the locus of the single edge between
4695 blocks A and B if necessary by emitting a nop. */
4696 if (!optimize
4697 && !forward_edge_locus
4698 && !DECL_IGNORED_P (current_function_decl))
9be94227 4699 emit_nop_for_unique_locus_between (a, b);
7241571e 4700
4c8af858
SB
4701 /* Move things from b->footer after a->footer. */
4702 if (BB_FOOTER (b))
bc35512f 4703 {
4c8af858 4704 if (!BB_FOOTER (a))
d8ce2eae 4705 BB_FOOTER (a) = BB_FOOTER (b);
4c8af858
SB
4706 else
4707 {
8879d71c 4708 rtx_insn *last = BB_FOOTER (a);
4c8af858
SB
4709
4710 while (NEXT_INSN (last))
4711 last = NEXT_INSN (last);
0f82e5c9
DM
4712 SET_NEXT_INSN (last) = BB_FOOTER (b);
4713 SET_PREV_INSN (BB_FOOTER (b)) = last;
4c8af858 4714 }
d8ce2eae 4715 BB_FOOTER (b) = NULL;
4c8af858
SB
4716 }
4717
4718 /* Move things from b->header before a->footer.
4719 Note that this may include dead tablejump data, but we don't clean
4720 those up until we go out of cfglayout mode. */
4721 if (BB_HEADER (b))
4722 {
4723 if (! BB_FOOTER (a))
d8ce2eae 4724 BB_FOOTER (a) = BB_HEADER (b);
4c8af858
SB
4725 else
4726 {
8879d71c 4727 rtx_insn *last = BB_HEADER (b);
4c8af858
SB
4728
4729 while (NEXT_INSN (last))
4730 last = NEXT_INSN (last);
0f82e5c9
DM
4731 SET_NEXT_INSN (last) = BB_FOOTER (a);
4732 SET_PREV_INSN (BB_FOOTER (a)) = last;
d8ce2eae 4733 BB_FOOTER (a) = BB_HEADER (b);
4c8af858 4734 }
1130d5e3 4735 BB_HEADER (b) = NULL;
bc35512f
JH
4736 }
4737
4738 /* In the case basic blocks are not adjacent, move them around. */
a813c111 4739 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
bc35512f 4740 {
f9df6f16 4741 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
6c3d0e31 4742
f9df6f16 4743 emit_insn_after_noloc (insn, BB_END (a), a);
bc35512f
JH
4744 }
4745 /* Otherwise just re-associate the instructions. */
4746 else
4747 {
a813c111 4748 insn = BB_HEAD (b);
1130d5e3 4749 BB_END (a) = BB_END (b);
bc35512f
JH
4750 }
4751
f9df6f16
JJ
4752 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4753 We need to explicitly call. */
4754 update_bb_for_insn_chain (insn, BB_END (b), a);
4755
4756 /* Skip possible DELETED_LABEL insn. */
4757 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4758 insn = NEXT_INSN (insn);
4759 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
1130d5e3 4760 BB_HEAD (b) = BB_END (b) = NULL;
f9df6f16
JJ
4761 delete_insn (insn);
4762
6fb5fa3c
DB
4763 df_bb_delete (b->index);
4764
7792f13c 4765 if (forward_edge_locus)
50a36e42
EB
4766 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4767
c263766c 4768 if (dump_file)
50a36e42 4769 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
bc35512f
JH
4770}
4771
4772/* Split edge E. */
f470c378 4773
bc35512f
JH
4774static basic_block
4775cfg_layout_split_edge (edge e)
4776{
bc35512f 4777 basic_block new_bb =
fefa31b5 4778 create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
a813c111 4779 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
bc35512f
JH
4780 NULL_RTX, e->src);
4781
fefa31b5 4782 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3e7eb734
JJ
4783 BB_COPY_PARTITION (new_bb, e->src);
4784 else
4785 BB_COPY_PARTITION (new_bb, e->dest);
a9b2ee88 4786 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
bc35512f
JH
4787 redirect_edge_and_branch_force (e, new_bb);
4788
4789 return new_bb;
4790}
4791
f470c378
ZD
4792/* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4793
4794static void
4795rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4796{
4797}
4798
df92c640
SB
4799/* Return true if BB contains only labels or non-executable
4800 instructions. */
4801
4802static bool
4803rtl_block_empty_p (basic_block bb)
4804{
8879d71c 4805 rtx_insn *insn;
df92c640 4806
fefa31b5
DM
4807 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4808 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
df92c640
SB
4809 return true;
4810
4811 FOR_BB_INSNS (bb, insn)
4812 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4813 return false;
4814
4815 return true;
4816}
4817
4818/* Split a basic block if it ends with a conditional branch and if
4819 the other part of the block is not empty. */
4820
4821static basic_block
4822rtl_split_block_before_cond_jump (basic_block bb)
4823{
8879d71c
DM
4824 rtx_insn *insn;
4825 rtx_insn *split_point = NULL;
4826 rtx_insn *last = NULL;
df92c640
SB
4827 bool found_code = false;
4828
4829 FOR_BB_INSNS (bb, insn)
4830 {
4831 if (any_condjump_p (insn))
4832 split_point = last;
4833 else if (NONDEBUG_INSN_P (insn))
4834 found_code = true;
4835 last = insn;
4836 }
4837
4838 /* Did not find everything. */
4839 if (found_code && split_point)
4840 return split_block (bb, split_point)->dest;
4841 else
4842 return NULL;
4843}
4844
6de9cd9a
DN
4845/* Return 1 if BB ends with a call, possibly followed by some
4846 instructions that must stay with the call, 0 otherwise. */
4847
4848static bool
b48d0358 4849rtl_block_ends_with_call_p (basic_block bb)
6de9cd9a 4850{
8879d71c 4851 rtx_insn *insn = BB_END (bb);
6de9cd9a 4852
4b4bf941 4853 while (!CALL_P (insn)
6de9cd9a 4854 && insn != BB_HEAD (bb)
92ddef69 4855 && (keep_with_call_p (insn)
b5b8b0ac
AO
4856 || NOTE_P (insn)
4857 || DEBUG_INSN_P (insn)))
6de9cd9a 4858 insn = PREV_INSN (insn);
4b4bf941 4859 return (CALL_P (insn));
6de9cd9a
DN
4860}
4861
4862/* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4863
4864static bool
9678086d 4865rtl_block_ends_with_condjump_p (const_basic_block bb)
6de9cd9a
DN
4866{
4867 return any_condjump_p (BB_END (bb));
4868}
4869
4870/* Return true if we need to add fake edge to exit.
4871 Helper function for rtl_flow_call_edges_add. */
4872
4873static bool
8879d71c 4874need_fake_edge_p (const rtx_insn *insn)
6de9cd9a
DN
4875{
4876 if (!INSN_P (insn))
4877 return false;
4878
4b4bf941 4879 if ((CALL_P (insn)
6de9cd9a
DN
4880 && !SIBLING_CALL_P (insn)
4881 && !find_reg_note (insn, REG_NORETURN, NULL)
becfd6e5 4882 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
6de9cd9a
DN
4883 return true;
4884
4885 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4886 && MEM_VOLATILE_P (PATTERN (insn)))
4887 || (GET_CODE (PATTERN (insn)) == PARALLEL
4888 && asm_noperands (insn) != -1
4889 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4890 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4891}
4892
4893/* Add fake edges to the function exit for any non constant and non noreturn
4894 calls, volatile inline assembly in the bitmap of blocks specified by
4895 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4896 that were split.
4897
4898 The goal is to expose cases in which entering a basic block does not imply
4899 that all subsequent instructions must be executed. */
4900
4901static int
4902rtl_flow_call_edges_add (sbitmap blocks)
4903{
4904 int i;
4905 int blocks_split = 0;
8b1c6fd7 4906 int last_bb = last_basic_block_for_fn (cfun);
6de9cd9a
DN
4907 bool check_last_block = false;
4908
0cae8d31 4909 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
6de9cd9a
DN
4910 return 0;
4911
4912 if (! blocks)
4913 check_last_block = true;
4914 else
fefa31b5
DM
4915 check_last_block = bitmap_bit_p (blocks,
4916 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
6de9cd9a
DN
4917
4918 /* In the last basic block, before epilogue generation, there will be
4919 a fallthru edge to EXIT. Special care is required if the last insn
4920 of the last basic block is a call because make_edge folds duplicate
4921 edges, which would result in the fallthru edge also being marked
4922 fake, which would result in the fallthru edge being removed by
4923 remove_fake_edges, which would result in an invalid CFG.
4924
4925 Moreover, we can't elide the outgoing fake edge, since the block
4926 profiler needs to take this into account in order to solve the minimal
4927 spanning tree in the case that the call doesn't return.
4928
4929 Handle this by adding a dummy instruction in a new last basic block. */
4930 if (check_last_block)
4931 {
fefa31b5 4932 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8879d71c 4933 rtx_insn *insn = BB_END (bb);
6de9cd9a
DN
4934
4935 /* Back up past insns that must be kept in the same block as a call. */
4936 while (insn != BB_HEAD (bb)
4937 && keep_with_call_p (insn))
4938 insn = PREV_INSN (insn);
4939
4940 if (need_fake_edge_p (insn))
4941 {
4942 edge e;
4943
fefa31b5 4944 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
9ff3d2de
JL
4945 if (e)
4946 {
c41c1387 4947 insert_insn_on_edge (gen_use (const0_rtx), e);
9ff3d2de
JL
4948 commit_edge_insertions ();
4949 }
6de9cd9a
DN
4950 }
4951 }
4952
4953 /* Now add fake edges to the function exit for any non constant
4954 calls since there is no way that we can determine if they will
4955 return or not... */
4956
24bd1a0b 4957 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
6de9cd9a 4958 {
06e28de2 4959 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8879d71c
DM
4960 rtx_insn *insn;
4961 rtx_insn *prev_insn;
6de9cd9a
DN
4962
4963 if (!bb)
4964 continue;
4965
d7c028c0 4966 if (blocks && !bitmap_bit_p (blocks, i))
6de9cd9a
DN
4967 continue;
4968
4969 for (insn = BB_END (bb); ; insn = prev_insn)
4970 {
4971 prev_insn = PREV_INSN (insn);
4972 if (need_fake_edge_p (insn))
4973 {
4974 edge e;
8879d71c 4975 rtx_insn *split_at_insn = insn;
6de9cd9a
DN
4976
4977 /* Don't split the block between a call and an insn that should
c22cacf3 4978 remain in the same block as the call. */
4b4bf941 4979 if (CALL_P (insn))
6de9cd9a
DN
4980 while (split_at_insn != BB_END (bb)
4981 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4982 split_at_insn = NEXT_INSN (split_at_insn);
4983
4984 /* The handling above of the final block before the epilogue
c22cacf3 4985 should be enough to verify that there is no edge to the exit
6de9cd9a
DN
4986 block in CFG already. Calling make_edge in such case would
4987 cause us to mark that edge as fake and remove it later. */
4988
b2b29377 4989 if (flag_checking && split_at_insn == BB_END (bb))
628f6a4e 4990 {
fefa31b5 4991 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
9ff3d2de 4992 gcc_assert (e == NULL);
628f6a4e 4993 }
6de9cd9a
DN
4994
4995 /* Note that the following may create a new basic block
4996 and renumber the existing basic blocks. */
4997 if (split_at_insn != BB_END (bb))
4998 {
4999 e = split_block (bb, split_at_insn);
5000 if (e)
5001 blocks_split++;
5002 }
5003
36d95aa8
JH
5004 edge ne = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
5005 ne->probability = profile_probability::guessed_never ();
6de9cd9a
DN
5006 }
5007
5008 if (insn == BB_HEAD (bb))
5009 break;
5010 }
5011 }
5012
5013 if (blocks_split)
5014 verify_flow_info ();
5015
5016 return blocks_split;
5017}
5018
1cb7dfc3 5019/* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
315682fb 5020 the conditional branch target, SECOND_HEAD should be the fall-thru
1cb7dfc3
MH
5021 there is no need to handle this here the loop versioning code handles
5022 this. the reason for SECON_HEAD is that it is needed for condition
5023 in trees, and this should be of the same type since it is a hook. */
5024static void
5025rtl_lv_add_condition_to_bb (basic_block first_head ,
c22cacf3
MS
5026 basic_block second_head ATTRIBUTE_UNUSED,
5027 basic_block cond_bb, void *comp_rtx)
1cb7dfc3 5028{
1476d1bd 5029 rtx_code_label *label;
8879d71c 5030 rtx_insn *seq, *jump;
1cb7dfc3
MH
5031 rtx op0 = XEXP ((rtx)comp_rtx, 0);
5032 rtx op1 = XEXP ((rtx)comp_rtx, 1);
5033 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
ef4bddc2 5034 machine_mode mode;
1cb7dfc3
MH
5035
5036
5037 label = block_label (first_head);
5038 mode = GET_MODE (op0);
5039 if (mode == VOIDmode)
5040 mode = GET_MODE (op1);
5041
5042 start_sequence ();
5043 op0 = force_operand (op0, NULL_RTX);
5044 op1 = force_operand (op1, NULL_RTX);
357067f2
JH
5045 do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label,
5046 profile_probability::uninitialized ());
1cb7dfc3
MH
5047 jump = get_last_insn ();
5048 JUMP_LABEL (jump) = label;
5049 LABEL_NUSES (label)++;
5050 seq = get_insns ();
5051 end_sequence ();
5052
2b9c63a2 5053 /* Add the new cond, in the new head. */
c3284718 5054 emit_insn_after (seq, BB_END (cond_bb));
1cb7dfc3
MH
5055}
5056
5057
5058/* Given a block B with unconditional branch at its end, get the
5059 store the return the branch edge and the fall-thru edge in
5060 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
5061static void
5062rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
5063 edge *fallthru_edge)
5064{
5065 edge e = EDGE_SUCC (b, 0);
5066
5067 if (e->flags & EDGE_FALLTHRU)
5068 {
5069 *fallthru_edge = e;
5070 *branch_edge = EDGE_SUCC (b, 1);
5071 }
5072 else
5073 {
5074 *branch_edge = e;
5075 *fallthru_edge = EDGE_SUCC (b, 1);
5076 }
5077}
5078
5e2d947c
JH
5079void
5080init_rtl_bb_info (basic_block bb)
5081{
bcc708fc
MM
5082 gcc_assert (!bb->il.x.rtl);
5083 bb->il.x.head_ = NULL;
766090c2 5084 bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5e2d947c
JH
5085}
5086
14fa2cc0
ZD
5087/* Returns true if it is possible to remove edge E by redirecting
5088 it to the destination of the other edge from E->src. */
5089
5090static bool
9678086d 5091rtl_can_remove_branch_p (const_edge e)
14fa2cc0 5092{
9678086d
KG
5093 const_basic_block src = e->src;
5094 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
8879d71c
DM
5095 const rtx_insn *insn = BB_END (src);
5096 rtx set;
14fa2cc0
ZD
5097
5098 /* The conditions are taken from try_redirect_by_replacing_jump. */
fefa31b5 5099 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
14fa2cc0
ZD
5100 return false;
5101
5102 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
5103 return false;
5104
3371a64f 5105 if (BB_PARTITION (src) != BB_PARTITION (target))
14fa2cc0
ZD
5106 return false;
5107
5108 if (!onlyjump_p (insn)
5109 || tablejump_p (insn, NULL, NULL))
5110 return false;
5111
5112 set = single_set (insn);
5113 if (!set || side_effects_p (set))
5114 return false;
5115
5116 return true;
5117}
5118
ffe14686 5119static basic_block
229d576c 5120rtl_duplicate_bb (basic_block bb, copy_bb_data *id)
ffe14686 5121{
229d576c 5122 bb = cfg_layout_duplicate_bb (bb, id);
ffe14686
AM
5123 bb->aux = NULL;
5124 return bb;
5125}
5126
aa4723d7 5127/* Do book-keeping of basic block BB for the profile consistency checker.
160576e1 5128 Store the counting in RECORD. */
aa4723d7 5129static void
160576e1 5130rtl_account_profile_record (basic_block bb, struct profile_record *record)
aa4723d7 5131{
8879d71c 5132 rtx_insn *insn;
aa4723d7
SB
5133 FOR_BB_INSNS (bb, insn)
5134 if (INSN_P (insn))
5135 {
160576e1 5136 record->size += insn_cost (insn, false);
3995f3a2 5137 if (bb->count.initialized_p ())
160576e1 5138 record->time
d09e78e8 5139 += insn_cost (insn, true) * bb->count.to_gcov_type ();
0a6a6ac9 5140 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
160576e1 5141 record->time
e7a74006 5142 += insn_cost (insn, true) * bb->count.to_frequency (cfun);
aa4723d7
SB
5143 }
5144}
5145
9ee634e3
JH
5146/* Implementation of CFG manipulation for linearized RTL. */
5147struct cfg_hooks rtl_cfg_hooks = {
f470c378 5148 "rtl",
9ee634e3 5149 rtl_verify_flow_info,
10e9fecc 5150 rtl_dump_bb,
2c895bd1 5151 rtl_dump_bb_for_graph,
bc35512f 5152 rtl_create_basic_block,
9ee634e3
JH
5153 rtl_redirect_edge_and_branch,
5154 rtl_redirect_edge_and_branch_force,
14fa2cc0 5155 rtl_can_remove_branch_p,
9ee634e3
JH
5156 rtl_delete_block,
5157 rtl_split_block,
f470c378 5158 rtl_move_block_after,
bc35512f
JH
5159 rtl_can_merge_blocks, /* can_merge_blocks_p */
5160 rtl_merge_blocks,
6de9cd9a
DN
5161 rtl_predict_edge,
5162 rtl_predicted_by_p,
ffe14686
AM
5163 cfg_layout_can_duplicate_bb_p,
5164 rtl_duplicate_bb,
f470c378
ZD
5165 rtl_split_edge,
5166 rtl_make_forwarder_block,
6de9cd9a 5167 rtl_tidy_fallthru_edge,
cf103ca4 5168 rtl_force_nonfallthru,
6de9cd9a
DN
5169 rtl_block_ends_with_call_p,
5170 rtl_block_ends_with_condjump_p,
d9d4706f
KH
5171 rtl_flow_call_edges_add,
5172 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
5173 NULL, /* execute_on_shrinking_pred */
5174 NULL, /* duplicate loop for trees */
5175 NULL, /* lv_add_condition_to_bb */
5176 NULL, /* lv_adjust_loop_header_phi*/
5177 NULL, /* extract_cond_bb_edges */
df92c640
SB
5178 NULL, /* flush_pending_stmts */
5179 rtl_block_empty_p, /* block_empty_p */
5180 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
aa4723d7 5181 rtl_account_profile_record,
9ee634e3
JH
5182};
5183
5184/* Implementation of CFG manipulation for cfg layout RTL, where
5185 basic block connected via fallthru edges does not have to be adjacent.
5186 This representation will hopefully become the default one in future
5187 version of the compiler. */
6de9cd9a 5188
9ee634e3 5189struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
f470c378 5190 "cfglayout mode",
bc35512f 5191 rtl_verify_flow_info_1,
10e9fecc 5192 rtl_dump_bb,
2c895bd1 5193 rtl_dump_bb_for_graph,
bc35512f 5194 cfg_layout_create_basic_block,
9ee634e3
JH
5195 cfg_layout_redirect_edge_and_branch,
5196 cfg_layout_redirect_edge_and_branch_force,
14fa2cc0 5197 rtl_can_remove_branch_p,
9ee634e3
JH
5198 cfg_layout_delete_block,
5199 cfg_layout_split_block,
f470c378 5200 rtl_move_block_after,
bc35512f
JH
5201 cfg_layout_can_merge_blocks_p,
5202 cfg_layout_merge_blocks,
6de9cd9a
DN
5203 rtl_predict_edge,
5204 rtl_predicted_by_p,
5205 cfg_layout_can_duplicate_bb_p,
5206 cfg_layout_duplicate_bb,
f470c378
ZD
5207 cfg_layout_split_edge,
5208 rtl_make_forwarder_block,
cf103ca4
EB
5209 NULL, /* tidy_fallthru_edge */
5210 rtl_force_nonfallthru,
6de9cd9a
DN
5211 rtl_block_ends_with_call_p,
5212 rtl_block_ends_with_condjump_p,
d9d4706f
KH
5213 rtl_flow_call_edges_add,
5214 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
5215 NULL, /* execute_on_shrinking_pred */
5216 duplicate_loop_to_header_edge, /* duplicate loop for trees */
5217 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5218 NULL, /* lv_adjust_loop_header_phi*/
5219 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
df92c640
SB
5220 NULL, /* flush_pending_stmts */
5221 rtl_block_empty_p, /* block_empty_p */
5222 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
aa4723d7 5223 rtl_account_profile_record,
9ee634e3 5224};
78bde837
SB
5225
5226#include "gt-cfgrtl.h"
0ecf545c
MS
5227
5228#if __GNUC__ >= 10
5229# pragma GCC diagnostic pop
5230#endif