]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgrtl.c
re PR tree-optimization/87917 (ICE in initialize_matrix_A at gcc/tree-data-ref.c...
[thirdparty/gcc.git] / gcc / cfgrtl.c
CommitLineData
ca6c03ca 1/* Control flow graph manipulation code for GNU compiler.
85ec4feb 2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
ca6c03ca
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
ca6c03ca
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ca6c03ca 19
5f0d2358
RK
20/* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
ca6c03ca
JH
22
23 Available functionality:
bc35512f 24 - Basic CFG/RTL manipulation API documented in cfghooks.h
5f0d2358 25 - CFG-aware instruction chain manipulation
ca6c03ca 26 delete_insn, delete_insn_chain
bc35512f
JH
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
ba5e9aca
EB
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
bc35512f
JH
33
34 Functions not supposed for generic use:
5f0d2358 35 - Infrastructure to determine quickly basic block for insn
ca6c03ca 36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
5f0d2358 37 - Edge redirection with updating and optimizing of insn chain
bc35512f 38 block_label, tidy_fallthru_edge, force_nonfallthru */
ca6c03ca
JH
39\f
40#include "config.h"
41#include "system.h"
4977bab6 42#include "coretypes.h"
c7131fb2 43#include "backend.h"
957060b5 44#include "target.h"
c7131fb2 45#include "rtl.h"
957060b5
AM
46#include "tree.h"
47#include "cfghooks.h"
c7131fb2 48#include "df.h"
957060b5 49#include "insn-config.h"
4d0cdd0c 50#include "memmodel.h"
957060b5 51#include "emit-rtl.h"
60393bbc
AM
52#include "cfgrtl.h"
53#include "cfganal.h"
54#include "cfgbuild.h"
55#include "cfgcleanup.h"
60393bbc 56#include "bb-reorder.h"
0cbd9993 57#include "rtl-error.h"
2a34bece 58#include "insn-attr.h"
36566b39 59#include "dojump.h"
ff25ef99 60#include "expr.h"
1cb7dfc3 61#include "cfgloop.h"
ef330312 62#include "tree-pass.h"
013a8899 63#include "print-rtl.h"
ca6c03ca 64
78bde837
SB
65/* Holds the interesting leading and trailing notes for the function.
66 Only applicable if the CFG is in cfglayout mode. */
1130d5e3 67static GTY(()) rtx_insn *cfg_layout_function_footer;
9152e0aa 68static GTY(()) rtx_insn *cfg_layout_function_header;
78bde837 69
8879d71c 70static rtx_insn *skip_insns_after_block (basic_block);
78bde837 71static void record_effective_endpoints (void);
78bde837
SB
72static void fixup_reorder_chain (void);
73
74void verify_insn_chain (void);
75static void fixup_fallthru_exit_predecessor (void);
8879d71c
DM
76static int can_delete_note_p (const rtx_note *);
77static int can_delete_label_p (const rtx_code_label *);
d329e058 78static basic_block rtl_split_edge (edge);
f470c378 79static bool rtl_move_block_after (basic_block, basic_block);
d329e058 80static int rtl_verify_flow_info (void);
f470c378 81static basic_block cfg_layout_split_block (basic_block, void *);
6de9cd9a 82static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
d329e058
AJ
83static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
84static void cfg_layout_delete_block (basic_block);
85static void rtl_delete_block (basic_block);
86static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
6de9cd9a 87static edge rtl_redirect_edge_and_branch (edge, basic_block);
f470c378 88static basic_block rtl_split_block (basic_block, void *);
1a817418 89static void rtl_dump_bb (FILE *, basic_block, int, dump_flags_t);
d329e058 90static int rtl_verify_flow_info_1 (void);
f470c378 91static void rtl_make_forwarder_block (edge);
ca6c03ca
JH
92\f
93/* Return true if NOTE is not one of the ones that must be kept paired,
5f0d2358 94 so that we may simply delete it. */
ca6c03ca
JH
95
96static int
8879d71c 97can_delete_note_p (const rtx_note *note)
ca6c03ca 98{
cd9c1ca8
RH
99 switch (NOTE_KIND (note))
100 {
101 case NOTE_INSN_DELETED:
102 case NOTE_INSN_BASIC_BLOCK:
103 case NOTE_INSN_EPILOGUE_BEG:
104 return true;
105
106 default:
107 return false;
108 }
ca6c03ca
JH
109}
110
111/* True if a given label can be deleted. */
112
113static int
8879d71c 114can_delete_label_p (const rtx_code_label *label)
ca6c03ca 115{
5f0d2358
RK
116 return (!LABEL_PRESERVE_P (label)
117 /* User declared labels must be preserved. */
118 && LABEL_NAME (label) == 0
6f7eba34
TS
119 && !vec_safe_contains<rtx_insn *> (forced_labels,
120 const_cast<rtx_code_label *> (label)));
ca6c03ca
JH
121}
122
03fbe718 123/* Delete INSN by patching it out. */
ca6c03ca 124
03fbe718 125void
a29f6a2b 126delete_insn (rtx_insn *insn)
ca6c03ca 127{
ca6c03ca
JH
128 rtx note;
129 bool really_delete = true;
130
4b4bf941 131 if (LABEL_P (insn))
ca6c03ca
JH
132 {
133 /* Some labels can't be directly removed from the INSN chain, as they
c22cacf3
MS
134 might be references via variables, constant pool etc.
135 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
8879d71c 136 if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
ca6c03ca
JH
137 {
138 const char *name = LABEL_NAME (insn);
5e5df392 139 basic_block bb = BLOCK_FOR_INSN (insn);
8879d71c 140 rtx_insn *bb_note = NEXT_INSN (insn);
ca6c03ca
JH
141
142 really_delete = false;
143 PUT_CODE (insn, NOTE);
a38e7aa5 144 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
6773e15f 145 NOTE_DELETED_LABEL_NAME (insn) = name;
03fbe718 146
0f33baa9 147 /* If the note following the label starts a basic block, and the
5e5df392 148 label is a member of the same basic block, interchange the two. */
0f33baa9
TV
149 if (bb_note != NULL_RTX
150 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
5e5df392
TV
151 && bb != NULL
152 && bb == BLOCK_FOR_INSN (bb_note))
03fbe718
TV
153 {
154 reorder_insns_nobb (insn, insn, bb_note);
1130d5e3 155 BB_HEAD (bb) = bb_note;
03fbe718 156 if (BB_END (bb) == bb_note)
1130d5e3 157 BB_END (bb) = insn;
03fbe718 158 }
ca6c03ca 159 }
5f0d2358 160
b5241a5a 161 remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
ca6c03ca
JH
162 }
163
164 if (really_delete)
165 {
cda94cbb 166 /* If this insn has already been deleted, something is very wrong. */
4654c0cf 167 gcc_assert (!insn->deleted ());
1f397f45
SB
168 if (INSN_P (insn))
169 df_insn_delete (insn);
ca6c03ca 170 remove_insn (insn);
4654c0cf 171 insn->set_deleted ();
ca6c03ca
JH
172 }
173
174 /* If deleting a jump, decrement the use count of the label. Deleting
175 the label itself should happen in the normal course of block merging. */
cf7c4aa6 176 if (JUMP_P (insn))
9295a326 177 {
cf7c4aa6
HPN
178 if (JUMP_LABEL (insn)
179 && LABEL_P (JUMP_LABEL (insn)))
180 LABEL_NUSES (JUMP_LABEL (insn))--;
181
182 /* If there are more targets, remove them too. */
183 while ((note
184 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
4b4bf941 185 && LABEL_P (XEXP (note, 0)))
9295a326
JZ
186 {
187 LABEL_NUSES (XEXP (note, 0))--;
188 remove_note (insn, note);
189 }
190 }
ca6c03ca 191
cf7c4aa6
HPN
192 /* Also if deleting any insn that references a label as an operand. */
193 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
194 && LABEL_P (XEXP (note, 0)))
195 {
196 LABEL_NUSES (XEXP (note, 0))--;
197 remove_note (insn, note);
198 }
199
95c43227 200 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
ca6c03ca 201 {
95c43227
DM
202 rtvec vec = table->get_labels ();
203 int len = GET_NUM_ELEM (vec);
ca6c03ca
JH
204 int i;
205
206 for (i = 0; i < len; i++)
a124fcda 207 {
95c43227 208 rtx label = XEXP (RTVEC_ELT (vec, i), 0);
a124fcda
RH
209
210 /* When deleting code in bulk (e.g. removing many unreachable
211 blocks) we can delete a label that's a target of the vector
212 before deleting the vector itself. */
4b4bf941 213 if (!NOTE_P (label))
a124fcda
RH
214 LABEL_NUSES (label)--;
215 }
ca6c03ca 216 }
ca6c03ca
JH
217}
218
98ccd1d7
IE
219/* Like delete_insn but also purge dead edges from BB.
220 Return true if any edges are eliminated. */
9ed7b221 221
98ccd1d7 222bool
373fc786 223delete_insn_and_edges (rtx_insn *insn)
3dec4024 224{
3dec4024
JH
225 bool purge = false;
226
ba4f7968 227 if (INSN_P (insn)
3dec4024 228 && BLOCK_FOR_INSN (insn)
a813c111 229 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
3dec4024 230 purge = true;
03fbe718 231 delete_insn (insn);
3dec4024 232 if (purge)
98ccd1d7
IE
233 return purge_dead_edges (BLOCK_FOR_INSN (insn));
234 return false;
3dec4024
JH
235}
236
ca6c03ca 237/* Unlink a chain of insns between START and FINISH, leaving notes
a7b87f73
ZD
238 that must be paired. If CLEAR_BB is true, we set bb field for
239 insns that cannot be removed to NULL. */
ca6c03ca
JH
240
241void
2d93cd20 242delete_insn_chain (rtx start, rtx_insn *finish, bool clear_bb)
ca6c03ca 243{
5f0d2358
RK
244 /* Unchain the insns one by one. It would be quicker to delete all of these
245 with a single unchaining, rather than one at a time, but we need to keep
246 the NOTE's. */
2d93cd20 247 rtx_insn *current = finish;
ca6c03ca
JH
248 while (1)
249 {
2d93cd20 250 rtx_insn *prev = PREV_INSN (current);
8879d71c 251 if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
ca6c03ca
JH
252 ;
253 else
03fbe718 254 delete_insn (current);
ca6c03ca 255
4654c0cf 256 if (clear_bb && !current->deleted ())
03fbe718 257 set_block_for_insn (current, NULL);
a7b87f73 258
03fbe718 259 if (current == start)
ca6c03ca 260 break;
03fbe718 261 current = prev;
ca6c03ca
JH
262 }
263}
264\f
5f0d2358
RK
265/* Create a new basic block consisting of the instructions between HEAD and END
266 inclusive. This function is designed to allow fast BB construction - reuses
267 the note and basic block struct in BB_NOTE, if any and do not grow
268 BASIC_BLOCK chain and should be used directly only by CFG construction code.
269 END can be NULL in to create new empty basic block before HEAD. Both END
918ed612
ZD
270 and HEAD can be NULL to create basic block at the end of INSN chain.
271 AFTER is the basic block we should be put after. */
ca6c03ca
JH
272
273basic_block
e00022e9 274create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
66e8df53 275 basic_block after)
ca6c03ca
JH
276{
277 basic_block bb;
278
279 if (bb_note
ca6c03ca
JH
280 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
281 && bb->aux == NULL)
282 {
283 /* If we found an existing note, thread it back onto the chain. */
284
fee3e72c 285 rtx_insn *after;
ca6c03ca 286
4b4bf941 287 if (LABEL_P (head))
ca6c03ca
JH
288 after = head;
289 else
290 {
291 after = PREV_INSN (head);
292 head = bb_note;
293 }
294
295 if (after != bb_note && NEXT_INSN (after) != bb_note)
ba4f7968 296 reorder_insns_nobb (bb_note, bb_note, after);
ca6c03ca
JH
297 }
298 else
299 {
300 /* Otherwise we must create a note and a basic block structure. */
301
302 bb = alloc_block ();
303
5e2d947c 304 init_rtl_bb_info (bb);
ca6c03ca 305 if (!head && !end)
5f0d2358
RK
306 head = end = bb_note
307 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
4b4bf941 308 else if (LABEL_P (head) && end)
ca6c03ca
JH
309 {
310 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
311 if (head == end)
312 end = bb_note;
313 }
314 else
315 {
316 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
317 head = bb_note;
318 if (!end)
319 end = head;
320 }
5f0d2358 321
ca6c03ca
JH
322 NOTE_BASIC_BLOCK (bb_note) = bb;
323 }
324
325 /* Always include the bb note in the block. */
326 if (NEXT_INSN (end) == bb_note)
327 end = bb_note;
328
1130d5e3
DM
329 BB_HEAD (bb) = head;
330 BB_END (bb) = end;
8b1c6fd7 331 bb->index = last_basic_block_for_fn (cfun)++;
5e2d947c 332 bb->flags = BB_NEW | BB_RTL;
918ed612 333 link_block (bb, after);
557c4b49 334 SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
6fb5fa3c 335 df_bb_refs_record (bb->index, false);
ba4f7968 336 update_bb_for_insn (bb);
076c7ab8 337 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
ca6c03ca
JH
338
339 /* Tag the block so that we know it has been used when considering
340 other basic block notes. */
341 bb->aux = bb;
342
343 return bb;
344}
345
5f0d2358 346/* Create new basic block consisting of instructions in between HEAD and END
ffe14686
AM
347 and place it to the BB chain after block AFTER. END can be NULL to
348 create a new empty basic block before HEAD. Both END and HEAD can be
349 NULL to create basic block at the end of INSN chain. */
ca6c03ca 350
bc35512f
JH
351static basic_block
352rtl_create_basic_block (void *headp, void *endp, basic_block after)
ca6c03ca 353{
e00022e9
DM
354 rtx_insn *head = (rtx_insn *) headp;
355 rtx_insn *end = (rtx_insn *) endp;
ca6c03ca 356 basic_block bb;
0b17ab2f 357
7eca0767 358 /* Grow the basic block array if needed. */
8b1c6fd7
DM
359 if ((size_t) last_basic_block_for_fn (cfun)
360 >= basic_block_info_for_fn (cfun)->length ())
7eca0767 361 {
8b1c6fd7
DM
362 size_t new_size =
363 (last_basic_block_for_fn (cfun)
364 + (last_basic_block_for_fn (cfun) + 3) / 4);
c5f05f68 365 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
7eca0767 366 }
0b17ab2f 367
0cae8d31 368 n_basic_blocks_for_fn (cfun)++;
ca6c03ca 369
852c6ec7 370 bb = create_basic_block_structure (head, end, NULL, after);
ca6c03ca
JH
371 bb->aux = NULL;
372 return bb;
373}
bc35512f
JH
374
375static basic_block
376cfg_layout_create_basic_block (void *head, void *end, basic_block after)
377{
378 basic_block newbb = rtl_create_basic_block (head, end, after);
379
bc35512f
JH
380 return newbb;
381}
ca6c03ca
JH
382\f
383/* Delete the insns in a (non-live) block. We physically delete every
384 non-deleted-note insn, and update the flow graph appropriately.
385
386 Return nonzero if we deleted an exception handler. */
387
388/* ??? Preserving all such notes strikes me as wrong. It would be nice
389 to post-process the stream to remove empty blocks, loops, ranges, etc. */
390
f0fda11c 391static void
d329e058 392rtl_delete_block (basic_block b)
ca6c03ca 393{
8879d71c 394 rtx_insn *insn, *end;
ca6c03ca
JH
395
396 /* If the head of this block is a CODE_LABEL, then it might be the
f39e46ba
SB
397 label for an exception handler which can't be reached. We need
398 to remove the label from the exception_handler_label list. */
a813c111 399 insn = BB_HEAD (b);
ca6c03ca 400
96370780 401 end = get_last_bb_insn (b);
ca6c03ca
JH
402
403 /* Selectively delete the entire chain. */
1130d5e3 404 BB_HEAD (b) = NULL;
a7b87f73
ZD
405 delete_insn_chain (insn, end, true);
406
6fb5fa3c
DB
407
408 if (dump_file)
409 fprintf (dump_file, "deleting block %d\n", b->index);
410 df_bb_delete (b->index);
ca6c03ca
JH
411}
412\f
852c6ec7 413/* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
ca6c03ca
JH
414
415void
d329e058 416compute_bb_for_insn (void)
ca6c03ca 417{
e0082a72 418 basic_block bb;
ca6c03ca 419
11cd3bed 420 FOR_EACH_BB_FN (bb, cfun)
ca6c03ca 421 {
8879d71c
DM
422 rtx_insn *end = BB_END (bb);
423 rtx_insn *insn;
ca6c03ca 424
a813c111 425 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
ca6c03ca 426 {
ba4f7968 427 BLOCK_FOR_INSN (insn) = bb;
ca6c03ca
JH
428 if (insn == end)
429 break;
ca6c03ca
JH
430 }
431 }
432}
433
434/* Release the basic_block_for_insn array. */
435
c2924966 436unsigned int
d329e058 437free_bb_for_insn (void)
ca6c03ca 438{
8879d71c 439 rtx_insn *insn;
ba4f7968 440 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 441 if (!BARRIER_P (insn))
ba4f7968 442 BLOCK_FOR_INSN (insn) = NULL;
c2924966 443 return 0;
ca6c03ca
JH
444}
445
27a4cd48
DM
446namespace {
447
448const pass_data pass_data_free_cfg =
449{
450 RTL_PASS, /* type */
451 "*free_cfg", /* name */
452 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
453 TV_NONE, /* tv_id */
454 0, /* properties_required */
455 0, /* properties_provided */
456 PROP_cfg, /* properties_destroyed */
457 0, /* todo_flags_start */
458 0, /* todo_flags_finish */
ef330312
PB
459};
460
27a4cd48
DM
461class pass_free_cfg : public rtl_opt_pass
462{
463public:
c3284718
RS
464 pass_free_cfg (gcc::context *ctxt)
465 : rtl_opt_pass (pass_data_free_cfg, ctxt)
27a4cd48
DM
466 {}
467
468 /* opt_pass methods: */
be55bfe6 469 virtual unsigned int execute (function *);
27a4cd48
DM
470
471}; // class pass_free_cfg
472
be55bfe6
TS
473unsigned int
474pass_free_cfg::execute (function *)
475{
be55bfe6
TS
476 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
477 valid at that point so it would be too late to call df_analyze. */
e90bedf5 478 if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
be55bfe6
TS
479 {
480 df_note_add_problem ();
481 df_analyze ();
482 }
be55bfe6
TS
483
484 if (crtl->has_bb_partition)
485 insert_section_boundary_note ();
486
487 free_bb_for_insn ();
488 return 0;
489}
490
27a4cd48
DM
491} // anon namespace
492
493rtl_opt_pass *
494make_pass_free_cfg (gcc::context *ctxt)
495{
496 return new pass_free_cfg (ctxt);
497}
498
91278841 499/* Return RTX to emit after when we want to emit code on the entry of function. */
8a2e8325 500rtx_insn *
91278841
AP
501entry_of_function (void)
502{
0cae8d31 503 return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
fefa31b5 504 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
91278841
AP
505}
506
11b904a1
BS
507/* Emit INSN at the entry point of the function, ensuring that it is only
508 executed once per function. */
509void
510emit_insn_at_entry (rtx insn)
511{
fefa31b5 512 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
11b904a1 513 edge e = ei_safe_edge (ei);
5419bc7f 514 gcc_assert (e->flags & EDGE_FALLTHRU);
11b904a1
BS
515
516 insert_insn_on_edge (insn, e);
517 commit_edge_insertions ();
518}
519
6c3d0e31 520/* Update BLOCK_FOR_INSN of insns between BEGIN and END
b8698a0f 521 (or BARRIER if found) and notify df of the bb change.
6c3d0e31
SP
522 The insn chain range is inclusive
523 (i.e. both BEGIN and END will be updated. */
ca6c03ca 524
6c3d0e31 525static void
8879d71c 526update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
ca6c03ca 527{
8879d71c 528 rtx_insn *insn;
ca6c03ca 529
6c3d0e31
SP
530 end = NEXT_INSN (end);
531 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
63642d5a
AO
532 if (!BARRIER_P (insn))
533 df_insn_change_bb (insn, bb);
ca6c03ca 534}
6c3d0e31
SP
535
536/* Update BLOCK_FOR_INSN of insns in BB to BB,
537 and notify df of the change. */
538
539void
540update_bb_for_insn (basic_block bb)
541{
542 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
543}
544
532aafad
SB
545\f
546/* Like active_insn_p, except keep the return value clobber around
547 even after reload. */
548
549static bool
8879d71c 550flow_active_insn_p (const rtx_insn *insn)
532aafad
SB
551{
552 if (active_insn_p (insn))
553 return true;
554
555 /* A clobber of the function return value exists for buggy
556 programs that fail to return a value. Its effect is to
557 keep the return value from being live across the entire
558 function. If we allow it to be skipped, we introduce the
559 possibility for register lifetime confusion. */
560 if (GET_CODE (PATTERN (insn)) == CLOBBER
561 && REG_P (XEXP (PATTERN (insn), 0))
562 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
563 return true;
564
565 return false;
566}
567
568/* Return true if the block has no effect and only forwards control flow to
569 its single destination. */
532aafad
SB
570
571bool
2407343c 572contains_no_active_insn_p (const_basic_block bb)
532aafad 573{
8879d71c 574 rtx_insn *insn;
532aafad 575
4833f561
JJ
576 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
577 || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
578 || !single_succ_p (bb)
579 || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
532aafad
SB
580 return false;
581
2407343c
JR
582 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
583 if (INSN_P (insn) && flow_active_insn_p (insn))
584 return false;
585
586 return (!INSN_P (insn)
587 || (JUMP_P (insn) && simplejump_p (insn))
588 || !flow_active_insn_p (insn));
589}
590
591/* Likewise, but protect loop latches, headers and preheaders. */
592/* FIXME: Make this a cfg hook. */
593
594bool
595forwarder_block_p (const_basic_block bb)
596{
597 if (!contains_no_active_insn_p (bb))
598 return false;
599
532aafad
SB
600 /* Protect loop latches, headers and preheaders. */
601 if (current_loops)
602 {
603 basic_block dest;
604 if (bb->loop_father->header == bb)
605 return false;
606 dest = EDGE_SUCC (bb, 0)->dest;
607 if (dest->loop_father->header == dest)
608 return false;
609 }
610
2407343c 611 return true;
532aafad
SB
612}
613
614/* Return nonzero if we can reach target from src by falling through. */
d7b6661b 615/* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
532aafad
SB
616
617bool
618can_fallthru (basic_block src, basic_block target)
619{
8879d71c
DM
620 rtx_insn *insn = BB_END (src);
621 rtx_insn *insn2;
532aafad
SB
622 edge e;
623 edge_iterator ei;
624
fefa31b5 625 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
532aafad
SB
626 return true;
627 if (src->next_bb != target)
d7b6661b
SB
628 return false;
629
630 /* ??? Later we may add code to move jump tables offline. */
631 if (tablejump_p (insn, NULL, NULL))
632 return false;
633
532aafad 634 FOR_EACH_EDGE (e, ei, src->succs)
fefa31b5 635 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
532aafad 636 && e->flags & EDGE_FALLTHRU)
d7b6661b 637 return false;
532aafad
SB
638
639 insn2 = BB_HEAD (target);
d7b6661b 640 if (!active_insn_p (insn2))
532aafad
SB
641 insn2 = next_active_insn (insn2);
642
532aafad
SB
643 return next_active_insn (insn) == insn2;
644}
645
646/* Return nonzero if we could reach target from src by falling through,
647 if the target was made adjacent. If we already have a fall-through
648 edge to the exit block, we can't do that. */
649static bool
650could_fall_through (basic_block src, basic_block target)
651{
652 edge e;
653 edge_iterator ei;
654
fefa31b5 655 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
532aafad
SB
656 return true;
657 FOR_EACH_EDGE (e, ei, src->succs)
fefa31b5 658 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
532aafad
SB
659 && e->flags & EDGE_FALLTHRU)
660 return 0;
661 return true;
662}
ca6c03ca 663\f
ef2be249 664/* Return the NOTE_INSN_BASIC_BLOCK of BB. */
ed270997 665rtx_note *
ef2be249
RS
666bb_note (basic_block bb)
667{
8879d71c 668 rtx_insn *note;
ef2be249
RS
669
670 note = BB_HEAD (bb);
671 if (LABEL_P (note))
672 note = NEXT_INSN (note);
673
674 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
ed270997 675 return as_a <rtx_note *> (note);
ef2be249
RS
676}
677
6fb5fa3c
DB
678/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
679 note associated with the BLOCK. */
680
8879d71c 681static rtx_insn *
6fb5fa3c
DB
682first_insn_after_basic_block_note (basic_block block)
683{
8879d71c 684 rtx_insn *insn;
6fb5fa3c
DB
685
686 /* Get the first instruction in the block. */
687 insn = BB_HEAD (block);
688
689 if (insn == NULL_RTX)
8879d71c 690 return NULL;
6fb5fa3c
DB
691 if (LABEL_P (insn))
692 insn = NEXT_INSN (insn);
693 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
694
695 return NEXT_INSN (insn);
696}
697
2b9c63a2
FY
698/* Creates a new basic block just after basic block BB by splitting
699 everything after specified instruction INSNP. */
ca6c03ca 700
f470c378 701static basic_block
d329e058 702rtl_split_block (basic_block bb, void *insnp)
ca6c03ca
JH
703{
704 basic_block new_bb;
8879d71c 705 rtx_insn *insn = (rtx_insn *) insnp;
f470c378 706 edge e;
628f6a4e 707 edge_iterator ei;
ca6c03ca 708
f470c378
ZD
709 if (!insn)
710 {
711 insn = first_insn_after_basic_block_note (bb);
712
713 if (insn)
b5b8b0ac 714 {
8879d71c 715 rtx_insn *next = insn;
b5b8b0ac
AO
716
717 insn = PREV_INSN (insn);
718
719 /* If the block contains only debug insns, insn would have
720 been NULL in a non-debug compilation, and then we'd end
721 up emitting a DELETED note. For -fcompare-debug
722 stability, emit the note too. */
723 if (insn != BB_END (bb)
724 && DEBUG_INSN_P (next)
725 && DEBUG_INSN_P (BB_END (bb)))
726 {
727 while (next != BB_END (bb) && DEBUG_INSN_P (next))
728 next = NEXT_INSN (next);
729
730 if (next == BB_END (bb))
731 emit_note_after (NOTE_INSN_DELETED, next);
732 }
733 }
f470c378
ZD
734 else
735 insn = get_last_insn ();
736 }
737
738 /* We probably should check type of the insn so that we do not create
739 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
740 bother. */
741 if (insn == BB_END (bb))
742 emit_note_after (NOTE_INSN_DELETED, insn);
ca6c03ca
JH
743
744 /* Create the new basic block. */
a813c111 745 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
076c7ab8 746 BB_COPY_PARTITION (new_bb, bb);
1130d5e3 747 BB_END (bb) = insn;
ca6c03ca
JH
748
749 /* Redirect the outgoing edges. */
628f6a4e
BE
750 new_bb->succs = bb->succs;
751 bb->succs = NULL;
752 FOR_EACH_EDGE (e, ei, new_bb->succs)
ca6c03ca
JH
753 e->src = new_bb;
754
6fb5fa3c
DB
755 /* The new block starts off being dirty. */
756 df_set_bb_dirty (bb);
f470c378 757 return new_bb;
bc35512f
JH
758}
759
9be94227
EB
760/* Return true if the single edge between blocks A and B is the only place
761 in RTL which holds some unique locus. */
762
763static bool
764unique_locus_on_edge_between_p (basic_block a, basic_block b)
765{
5368224f 766 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
8879d71c 767 rtx_insn *insn, *end;
9be94227 768
2f13f2de 769 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
9be94227
EB
770 return false;
771
772 /* First scan block A backward. */
773 insn = BB_END (a);
774 end = PREV_INSN (BB_HEAD (a));
5368224f 775 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
9be94227
EB
776 insn = PREV_INSN (insn);
777
5368224f 778 if (insn != end && INSN_LOCATION (insn) == goto_locus)
9be94227
EB
779 return false;
780
781 /* Then scan block B forward. */
782 insn = BB_HEAD (b);
783 if (insn)
784 {
785 end = NEXT_INSN (BB_END (b));
786 while (insn != end && !NONDEBUG_INSN_P (insn))
787 insn = NEXT_INSN (insn);
788
5368224f
DC
789 if (insn != end && INSN_HAS_LOCATION (insn)
790 && INSN_LOCATION (insn) == goto_locus)
9be94227
EB
791 return false;
792 }
793
794 return true;
795}
796
797/* If the single edge between blocks A and B is the only place in RTL which
798 holds some unique locus, emit a nop with that locus between the blocks. */
799
800static void
801emit_nop_for_unique_locus_between (basic_block a, basic_block b)
802{
803 if (!unique_locus_on_edge_between_p (a, b))
804 return;
805
1130d5e3 806 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
5368224f 807 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
9be94227
EB
808}
809
ca6c03ca 810/* Blocks A and B are to be merged into a single block A. The insns
bc35512f 811 are already contiguous. */
ca6c03ca 812
bc35512f
JH
813static void
814rtl_merge_blocks (basic_block a, basic_block b)
ca6c03ca 815{
7792f13c
EB
816 /* If B is a forwarder block whose outgoing edge has no location, we'll
817 propagate the locus of the edge between A and B onto it. */
818 const bool forward_edge_locus
819 = (b->flags & BB_FORWARDER_BLOCK) != 0
820 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
8879d71c
DM
821 rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
822 rtx_insn *del_first = NULL, *del_last = NULL;
823 rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
ca6c03ca
JH
824 int b_empty = 0;
825
6fb5fa3c 826 if (dump_file)
50a36e42
EB
827 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
828 a->index);
6fb5fa3c 829
b5b8b0ac
AO
830 while (DEBUG_INSN_P (b_end))
831 b_end = PREV_INSN (b_debug_start = b_end);
832
ca6c03ca 833 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 834 if (LABEL_P (b_head))
ca6c03ca
JH
835 {
836 /* Detect basic blocks with nothing but a label. This can happen
837 in particular at the end of a function. */
838 if (b_head == b_end)
839 b_empty = 1;
5f0d2358 840
ca6c03ca
JH
841 del_first = del_last = b_head;
842 b_head = NEXT_INSN (b_head);
843 }
844
5f0d2358
RK
845 /* Delete the basic block note and handle blocks containing just that
846 note. */
ca6c03ca
JH
847 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
848 {
849 if (b_head == b_end)
850 b_empty = 1;
851 if (! del_last)
852 del_first = b_head;
5f0d2358 853
ca6c03ca
JH
854 del_last = b_head;
855 b_head = NEXT_INSN (b_head);
856 }
857
858 /* If there was a jump out of A, delete it. */
4b4bf941 859 if (JUMP_P (a_end))
ca6c03ca 860 {
8879d71c 861 rtx_insn *prev;
ca6c03ca
JH
862
863 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
4b4bf941 864 if (!NOTE_P (prev)
a38e7aa5 865 || NOTE_INSN_BASIC_BLOCK_P (prev)
a813c111 866 || prev == BB_HEAD (a))
ca6c03ca
JH
867 break;
868
869 del_first = a_end;
870
ca6c03ca
JH
871 /* If this was a conditional jump, we need to also delete
872 the insn that set cc0. */
058eb3b0 873 if (HAVE_cc0 && only_sets_cc0_p (prev))
ca6c03ca 874 {
8879d71c 875 rtx_insn *tmp = prev;
5f0d2358 876
ca6c03ca
JH
877 prev = prev_nonnote_insn (prev);
878 if (!prev)
a813c111 879 prev = BB_HEAD (a);
ca6c03ca
JH
880 del_first = tmp;
881 }
ca6c03ca
JH
882
883 a_end = PREV_INSN (del_first);
884 }
4b4bf941 885 else if (BARRIER_P (NEXT_INSN (a_end)))
ca6c03ca
JH
886 del_first = NEXT_INSN (a_end);
887
ca6c03ca
JH
888 /* Delete everything marked above as well as crap that might be
889 hanging out between the two blocks. */
1130d5e3
DM
890 BB_END (a) = a_end;
891 BB_HEAD (b) = b_empty ? NULL : b_head;
a7b87f73 892 delete_insn_chain (del_first, del_last, true);
ca6c03ca 893
7792f13c
EB
894 /* If not optimizing, preserve the locus of the single edge between
895 blocks A and B if necessary by emitting a nop. */
896 if (!optimize
897 && !forward_edge_locus
898 && !DECL_IGNORED_P (current_function_decl))
9be94227
EB
899 {
900 emit_nop_for_unique_locus_between (a, b);
901 a_end = BB_END (a);
902 }
903
ca6c03ca
JH
904 /* Reassociate the insns of B with A. */
905 if (!b_empty)
906 {
b5b8b0ac 907 update_bb_for_insn_chain (a_end, b_debug_end, a);
5f0d2358 908
1130d5e3
DM
909 BB_END (a) = b_debug_end;
910 BB_HEAD (b) = NULL;
b5b8b0ac
AO
911 }
912 else if (b_end != b_debug_end)
913 {
914 /* Move any deleted labels and other notes between the end of A
915 and the debug insns that make up B after the debug insns,
916 bringing the debug insns into A while keeping the notes after
917 the end of A. */
918 if (NEXT_INSN (a_end) != b_debug_start)
919 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
920 b_debug_end);
921 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
1130d5e3 922 BB_END (a) = b_debug_end;
ca6c03ca 923 }
5f0d2358 924
6fb5fa3c 925 df_bb_delete (b->index);
50a36e42 926
7792f13c 927 if (forward_edge_locus)
50a36e42
EB
928 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
929
930 if (dump_file)
931 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
ca6c03ca 932}
bc35512f 933
6fb5fa3c 934
bc35512f 935/* Return true when block A and B can be merged. */
9678086d 936
b48d0358
DN
937static bool
938rtl_can_merge_blocks (basic_block a, basic_block b)
bc35512f 939{
750054a2
CT
940 /* If we are partitioning hot/cold basic blocks, we don't want to
941 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
942 and cold sections.
943
8e8d5162 944 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
945 be optimizable (or blocks that appear to be mergeable), but which really
946 must be left untouched (they are required to make it safely across
947 partition boundaries). See the comments at the top of
8e8d5162 948 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
076c7ab8 949
87c8b4be 950 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 951 return false;
750054a2 952
7d776ee2
RG
953 /* Protect the loop latches. */
954 if (current_loops && b->loop_father->latch == b)
955 return false;
956
bc35512f 957 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
958 return (single_succ_p (a)
959 && single_succ (a) == b
960 && single_pred_p (b)
628f6a4e 961 && a != b
bc35512f 962 /* Must be simple edge. */
c5cbcccf 963 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
bc35512f 964 && a->next_bb == b
fefa31b5
DM
965 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
966 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
bc35512f
JH
967 /* If the jump insn has side effects,
968 we can't kill the edge. */
4b4bf941 969 && (!JUMP_P (BB_END (a))
e24e7211 970 || (reload_completed
a813c111 971 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f 972}
ca6c03ca 973\f
5f0d2358
RK
974/* Return the label in the head of basic block BLOCK. Create one if it doesn't
975 exist. */
ca6c03ca 976
1476d1bd 977rtx_code_label *
d329e058 978block_label (basic_block block)
ca6c03ca 979{
fefa31b5 980 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
1476d1bd 981 return NULL;
5f0d2358 982
4b4bf941 983 if (!LABEL_P (BB_HEAD (block)))
ca6c03ca 984 {
1130d5e3 985 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
ca6c03ca 986 }
5f0d2358 987
1476d1bd 988 return as_a <rtx_code_label *> (BB_HEAD (block));
ca6c03ca
JH
989}
990
991/* Attempt to perform edge redirection by replacing possibly complex jump
5f0d2358
RK
992 instruction by unconditional jump or removing jump completely. This can
993 apply only if all edges now point to the same block. The parameters and
994 return values are equivalent to redirect_edge_and_branch. */
ca6c03ca 995
6de9cd9a 996edge
bc35512f 997try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
ca6c03ca
JH
998{
999 basic_block src = e->src;
8879d71c 1000 rtx_insn *insn = BB_END (src), *kill_from;
e1233a7d 1001 rtx set;
ca6c03ca 1002 int fallthru = 0;
750054a2
CT
1003
1004 /* If we are partitioning hot/cold basic blocks, we don't want to
1005 mess up unconditional or indirect jumps that cross between hot
8e8d5162
CT
1006 and cold sections.
1007
1008 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
1009 be optimizable (or blocks that appear to be mergeable), but which really
1010 must be left untouched (they are required to make it safely across
1011 partition boundaries). See the comments at the top of
8e8d5162 1012 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
c22cacf3 1013
3371a64f 1014 if (BB_PARTITION (src) != BB_PARTITION (target))
9cf84a3c 1015 return NULL;
750054a2 1016
6a66a8a7
KH
1017 /* We can replace or remove a complex jump only when we have exactly
1018 two edges. Also, if we have exactly one outgoing edge, we can
1019 redirect that. */
1020 if (EDGE_COUNT (src->succs) >= 3
1021 /* Verify that all targets will be TARGET. Specifically, the
1022 edge that is not E must also go to TARGET. */
1023 || (EDGE_COUNT (src->succs) == 2
1024 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1025 return NULL;
5f0d2358 1026
6a66a8a7 1027 if (!onlyjump_p (insn))
6de9cd9a 1028 return NULL;
3348b696 1029 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
6de9cd9a 1030 return NULL;
ca6c03ca
JH
1031
1032 /* Avoid removing branch with side effects. */
1033 set = single_set (insn);
1034 if (!set || side_effects_p (set))
6de9cd9a 1035 return NULL;
ca6c03ca
JH
1036
1037 /* In case we zap a conditional jump, we'll need to kill
1038 the cc0 setter too. */
1039 kill_from = insn;
058eb3b0 1040 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn))
9caea4a7 1041 && only_sets_cc0_p (PREV_INSN (insn)))
ca6c03ca 1042 kill_from = PREV_INSN (insn);
ca6c03ca
JH
1043
1044 /* See if we can create the fallthru edge. */
bc35512f 1045 if (in_cfglayout || can_fallthru (src, target))
ca6c03ca 1046 {
c263766c
RH
1047 if (dump_file)
1048 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
ca6c03ca
JH
1049 fallthru = 1;
1050
eaec9b3d 1051 /* Selectively unlink whole insn chain. */
bc35512f
JH
1052 if (in_cfglayout)
1053 {
8879d71c 1054 rtx_insn *insn = BB_FOOTER (src);
bc35512f 1055
a7b87f73 1056 delete_insn_chain (kill_from, BB_END (src), false);
bc35512f
JH
1057
1058 /* Remove barriers but keep jumptables. */
1059 while (insn)
1060 {
4b4bf941 1061 if (BARRIER_P (insn))
bc35512f
JH
1062 {
1063 if (PREV_INSN (insn))
0f82e5c9 1064 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
bc35512f 1065 else
d8ce2eae 1066 BB_FOOTER (src) = NEXT_INSN (insn);
bc35512f 1067 if (NEXT_INSN (insn))
0f82e5c9 1068 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
bc35512f 1069 }
4b4bf941 1070 if (LABEL_P (insn))
bc35512f
JH
1071 break;
1072 insn = NEXT_INSN (insn);
1073 }
1074 }
1075 else
a7b87f73
ZD
1076 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1077 false);
ca6c03ca 1078 }
5f0d2358 1079
ca6c03ca
JH
1080 /* If this already is simplejump, redirect it. */
1081 else if (simplejump_p (insn))
1082 {
1083 if (e->dest == target)
6de9cd9a 1084 return NULL;
c263766c
RH
1085 if (dump_file)
1086 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
0b17ab2f 1087 INSN_UID (insn), e->dest->index, target->index);
1476d1bd
MM
1088 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1089 block_label (target), 0))
6ee3c8e4 1090 {
fefa31b5 1091 gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
341c100f 1092 return NULL;
6ee3c8e4 1093 }
ca6c03ca 1094 }
5f0d2358 1095
6ee3c8e4 1096 /* Cannot do anything for target exit block. */
fefa31b5 1097 else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
6de9cd9a 1098 return NULL;
6ee3c8e4 1099
ca6c03ca
JH
1100 /* Or replace possibly complicated jump insn by simple jump insn. */
1101 else
1102 {
e67d1102 1103 rtx_code_label *target_label = block_label (target);
8879d71c 1104 rtx_insn *barrier;
dfe08bc4 1105 rtx_insn *label;
8942ee0f 1106 rtx_jump_table_data *table;
ca6c03ca 1107
ec4a505f 1108 emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
a813c111 1109 JUMP_LABEL (BB_END (src)) = target_label;
ca6c03ca 1110 LABEL_NUSES (target_label)++;
c263766c
RH
1111 if (dump_file)
1112 fprintf (dump_file, "Replacing insn %i by jump %i\n",
a813c111 1113 INSN_UID (insn), INSN_UID (BB_END (src)));
ca6c03ca 1114
4da2eb6b 1115
ba4807a0
LB
1116 delete_insn_chain (kill_from, insn, false);
1117
4da2eb6b
RH
1118 /* Recognize a tablejump that we are converting to a
1119 simple jump and remove its associated CODE_LABEL
1120 and ADDR_VEC or ADDR_DIFF_VEC. */
ba4807a0
LB
1121 if (tablejump_p (insn, &label, &table))
1122 delete_insn_chain (label, table, false);
eb5b8ad4 1123
65f4b875 1124 barrier = next_nonnote_nondebug_insn (BB_END (src));
4b4bf941 1125 if (!barrier || !BARRIER_P (barrier))
a813c111 1126 emit_barrier_after (BB_END (src));
5d693491
JZ
1127 else
1128 {
a813c111 1129 if (barrier != NEXT_INSN (BB_END (src)))
5d693491
JZ
1130 {
1131 /* Move the jump before barrier so that the notes
1132 which originally were or were created before jump table are
1133 inside the basic block. */
8879d71c 1134 rtx_insn *new_insn = BB_END (src);
5d693491 1135
6c3d0e31
SP
1136 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1137 PREV_INSN (barrier), src);
5d693491 1138
0f82e5c9
DM
1139 SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1140 SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
5d693491 1141
0f82e5c9
DM
1142 SET_NEXT_INSN (new_insn) = barrier;
1143 SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
5d693491 1144
0f82e5c9
DM
1145 SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1146 SET_PREV_INSN (barrier) = new_insn;
5d693491
JZ
1147 }
1148 }
ca6c03ca
JH
1149 }
1150
1151 /* Keep only one edge out and set proper flags. */
c5cbcccf 1152 if (!single_succ_p (src))
628f6a4e 1153 remove_edge (e);
c5cbcccf 1154 gcc_assert (single_succ_p (src));
628f6a4e 1155
c5cbcccf 1156 e = single_succ_edge (src);
ca6c03ca
JH
1157 if (fallthru)
1158 e->flags = EDGE_FALLTHRU;
1159 else
1160 e->flags = 0;
5f0d2358 1161
357067f2 1162 e->probability = profile_probability::always ();
ca6c03ca 1163
ca6c03ca
JH
1164 if (e->dest != target)
1165 redirect_edge_succ (e, target);
6de9cd9a 1166 return e;
ca6c03ca
JH
1167}
1168
4e3825db
MM
1169/* Subroutine of redirect_branch_edge that tries to patch the jump
1170 instruction INSN so that it reaches block NEW. Do this
1171 only when it originally reached block OLD. Return true if this
1172 worked or the original target wasn't OLD, return false if redirection
1173 doesn't work. */
1174
1175static bool
8879d71c 1176patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
ca6c03ca 1177{
8942ee0f 1178 rtx_jump_table_data *table;
ca6c03ca 1179 rtx tmp;
ca6c03ca 1180 /* Recognize a tablejump and adjust all matching cases. */
8942ee0f 1181 if (tablejump_p (insn, NULL, &table))
ca6c03ca
JH
1182 {
1183 rtvec vec;
1184 int j;
e67d1102 1185 rtx_code_label *new_label = block_label (new_bb);
ca6c03ca 1186
fefa31b5 1187 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4e3825db 1188 return false;
95c43227 1189 vec = table->get_labels ();
ca6c03ca
JH
1190
1191 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1192 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1193 {
1194 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1195 --LABEL_NUSES (old_label);
1196 ++LABEL_NUSES (new_label);
1197 }
1198
f9da5064 1199 /* Handle casesi dispatch insns. */
ca6c03ca
JH
1200 if ((tmp = single_set (insn)) != NULL
1201 && SET_DEST (tmp) == pc_rtx
1202 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1203 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
04a121a7 1204 && label_ref_label (XEXP (SET_SRC (tmp), 2)) == old_label)
ca6c03ca 1205 {
4c33cb26 1206 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
ca6c03ca
JH
1207 new_label);
1208 --LABEL_NUSES (old_label);
1209 ++LABEL_NUSES (new_label);
1210 }
1211 }
1c384bf1
RH
1212 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1213 {
1214 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
e67d1102 1215 rtx note;
1c384bf1 1216
fefa31b5 1217 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1c384bf1 1218 return false;
e67d1102 1219 rtx_code_label *new_label = block_label (new_bb);
1c384bf1
RH
1220
1221 for (i = 0; i < n; ++i)
1222 {
1223 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1224 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1225 if (XEXP (old_ref, 0) == old_label)
1226 {
1227 ASM_OPERANDS_LABEL (tmp, i)
1228 = gen_rtx_LABEL_REF (Pmode, new_label);
1229 --LABEL_NUSES (old_label);
1230 ++LABEL_NUSES (new_label);
1231 }
1232 }
1233
1234 if (JUMP_LABEL (insn) == old_label)
1235 {
1236 JUMP_LABEL (insn) = new_label;
1237 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1238 if (note)
1239 remove_note (insn, note);
1240 }
1241 else
1242 {
1243 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1244 if (note)
1245 remove_note (insn, note);
1246 if (JUMP_LABEL (insn) != new_label
1247 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1248 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1249 }
3b5fda81
JJ
1250 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1251 != NULL_RTX)
1252 XEXP (note, 0) = new_label;
1c384bf1 1253 }
ca6c03ca
JH
1254 else
1255 {
1256 /* ?? We may play the games with moving the named labels from
1257 one basic block to the other in case only one computed_jump is
1258 available. */
5f0d2358
RK
1259 if (computed_jump_p (insn)
1260 /* A return instruction can't be redirected. */
1261 || returnjump_p (insn))
4e3825db 1262 return false;
6ee3c8e4 1263
4e3825db 1264 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
6ee3c8e4 1265 {
4e3825db
MM
1266 /* If the insn doesn't go where we think, we're confused. */
1267 gcc_assert (JUMP_LABEL (insn) == old_label);
1268
1269 /* If the substitution doesn't succeed, die. This can happen
1270 if the back end emitted unrecognizable instructions or if
1271 target is exit block on some arches. */
1476d1bd
MM
1272 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1273 block_label (new_bb), 0))
4e3825db 1274 {
fefa31b5 1275 gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun));
4e3825db
MM
1276 return false;
1277 }
6ee3c8e4 1278 }
ca6c03ca 1279 }
4e3825db
MM
1280 return true;
1281}
1282
1283
1284/* Redirect edge representing branch of (un)conditional jump or tablejump,
1285 NULL on failure */
1286static edge
1287redirect_branch_edge (edge e, basic_block target)
1288{
8879d71c 1289 rtx_insn *old_label = BB_HEAD (e->dest);
4e3825db 1290 basic_block src = e->src;
8879d71c 1291 rtx_insn *insn = BB_END (src);
4e3825db
MM
1292
1293 /* We can only redirect non-fallthru edges of jump insn. */
1294 if (e->flags & EDGE_FALLTHRU)
1295 return NULL;
1296 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1297 return NULL;
1298
1299 if (!currently_expanding_to_rtl)
1300 {
1476d1bd 1301 if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
4e3825db
MM
1302 return NULL;
1303 }
1304 else
1305 /* When expanding this BB might actually contain multiple
1306 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1307 Redirect all of those that match our label. */
0f346928 1308 FOR_BB_INSNS (src, insn)
1476d1bd
MM
1309 if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
1310 old_label, target))
4e3825db 1311 return NULL;
ca6c03ca 1312
c263766c
RH
1313 if (dump_file)
1314 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
0b17ab2f 1315 e->src->index, e->dest->index, target->index);
5f0d2358 1316
ca6c03ca 1317 if (e->dest != target)
6de9cd9a 1318 e = redirect_edge_succ_nodup (e, target);
6fb5fa3c 1319
6de9cd9a 1320 return e;
bc35512f
JH
1321}
1322
3371a64f
TJ
1323/* Called when edge E has been redirected to a new destination,
1324 in order to update the region crossing flag on the edge and
1325 jump. */
1326
1327static void
1328fixup_partition_crossing (edge e)
1329{
fefa31b5
DM
1330 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1331 == EXIT_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1332 return;
1333 /* If we redirected an existing edge, it may already be marked
1334 crossing, even though the new src is missing a reg crossing note.
1335 But make sure reg crossing note doesn't already exist before
1336 inserting. */
1337 if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1338 {
1339 e->flags |= EDGE_CROSSING;
52af3804 1340 if (JUMP_P (BB_END (e->src)))
339ba33b 1341 CROSSING_JUMP_P (BB_END (e->src)) = 1;
3371a64f
TJ
1342 }
1343 else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1344 {
1345 e->flags &= ~EDGE_CROSSING;
1346 /* Remove the section crossing note from jump at end of
1347 src if it exists, and if no other successors are
1348 still crossing. */
339ba33b 1349 if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
3371a64f
TJ
1350 {
1351 bool has_crossing_succ = false;
1352 edge e2;
1353 edge_iterator ei;
1354 FOR_EACH_EDGE (e2, ei, e->src->succs)
1355 {
1356 has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1357 if (has_crossing_succ)
1358 break;
1359 }
1360 if (!has_crossing_succ)
339ba33b 1361 CROSSING_JUMP_P (BB_END (e->src)) = 0;
3371a64f
TJ
1362 }
1363 }
1364}
1365
600b5b1d
TJ
1366/* Called when block BB has been reassigned to the cold partition,
1367 because it is now dominated by another cold block,
1368 to ensure that the region crossing attributes are updated. */
1369
1370static void
1371fixup_new_cold_bb (basic_block bb)
1372{
1373 edge e;
1374 edge_iterator ei;
1375
1376 /* This is called when a hot bb is found to now be dominated
1377 by a cold bb and therefore needs to become cold. Therefore,
1378 its preds will no longer be region crossing. Any non-dominating
1379 preds that were previously hot would also have become cold
1380 in the caller for the same region. Any preds that were previously
1381 region-crossing will be adjusted in fixup_partition_crossing. */
1382 FOR_EACH_EDGE (e, ei, bb->preds)
1383 {
1384 fixup_partition_crossing (e);
1385 }
1386
1387 /* Possibly need to make bb's successor edges region crossing,
1388 or remove stale region crossing. */
1389 FOR_EACH_EDGE (e, ei, bb->succs)
1390 {
1391 /* We can't have fall-through edges across partition boundaries.
1392 Note that force_nonfallthru will do any necessary partition
1393 boundary fixup by calling fixup_partition_crossing itself. */
1394 if ((e->flags & EDGE_FALLTHRU)
1395 && BB_PARTITION (bb) != BB_PARTITION (e->dest)
fefa31b5 1396 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
600b5b1d
TJ
1397 force_nonfallthru (e);
1398 else
1399 fixup_partition_crossing (e);
1400 }
1401}
1402
bc35512f
JH
1403/* Attempt to change code to redirect edge E to TARGET. Don't do that on
1404 expense of adding new instructions or reordering basic blocks.
1405
1406 Function can be also called with edge destination equivalent to the TARGET.
1407 Then it should try the simplifications and do nothing if none is possible.
1408
6de9cd9a
DN
1409 Return edge representing the branch if transformation succeeded. Return NULL
1410 on failure.
1411 We still return NULL in case E already destinated TARGET and we didn't
1412 managed to simplify instruction stream. */
bc35512f 1413
6de9cd9a 1414static edge
5671bf27 1415rtl_redirect_edge_and_branch (edge e, basic_block target)
bc35512f 1416{
6de9cd9a 1417 edge ret;
f345f21a 1418 basic_block src = e->src;
3371a64f 1419 basic_block dest = e->dest;
f345f21a 1420
bc35512f 1421 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 1422 return NULL;
bc35512f 1423
3371a64f 1424 if (dest == target)
6de9cd9a 1425 return e;
3348b696 1426
6de9cd9a 1427 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
f345f21a 1428 {
6fb5fa3c 1429 df_set_bb_dirty (src);
3371a64f 1430 fixup_partition_crossing (ret);
6de9cd9a 1431 return ret;
f345f21a 1432 }
bc35512f 1433
6de9cd9a
DN
1434 ret = redirect_branch_edge (e, target);
1435 if (!ret)
1436 return NULL;
5f0d2358 1437
6fb5fa3c 1438 df_set_bb_dirty (src);
3371a64f 1439 fixup_partition_crossing (ret);
6de9cd9a 1440 return ret;
ca6c03ca
JH
1441}
1442
3371a64f
TJ
1443/* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1444
1445void
1446emit_barrier_after_bb (basic_block bb)
1447{
cd459bf8 1448 rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
c3284718 1449 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
3371a64f
TJ
1450 || current_ir_type () == IR_RTL_CFGLAYOUT);
1451 if (current_ir_type () == IR_RTL_CFGLAYOUT)
0fb3402f
RX
1452 {
1453 rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1454
1455 if (BB_FOOTER (bb))
1456 {
1457 rtx_insn *footer_tail = BB_FOOTER (bb);
1458
1459 while (NEXT_INSN (footer_tail))
1460 footer_tail = NEXT_INSN (footer_tail);
1461 if (!BARRIER_P (footer_tail))
1462 {
1463 SET_NEXT_INSN (footer_tail) = insn;
1464 SET_PREV_INSN (insn) = footer_tail;
1465 }
1466 }
1467 else
1468 BB_FOOTER (bb) = insn;
1469 }
3371a64f
TJ
1470}
1471
4fe9b91c 1472/* Like force_nonfallthru below, but additionally performs redirection
26898771
BS
1473 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1474 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1475 simple_return_rtx, indicating which kind of returnjump to create.
1476 It should be NULL otherwise. */
ca6c03ca 1477
dc0ff1c8 1478basic_block
26898771 1479force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
ca6c03ca 1480{
a3716585 1481 basic_block jump_block, new_bb = NULL, src = e->src;
ca6c03ca
JH
1482 rtx note;
1483 edge new_edge;
a3716585 1484 int abnormal_edge_flags = 0;
a3e6a37b 1485 bool asm_goto_edge = false;
7241571e 1486 int loc;
ca6c03ca 1487
cb9a1d9b
JH
1488 /* In the case the last instruction is conditional jump to the next
1489 instruction, first redirect the jump itself and then continue
b20b352b 1490 by creating a basic block afterwards to redirect fallthru edge. */
fefa31b5
DM
1491 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1492 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
a813c111 1493 && any_condjump_p (BB_END (e->src))
a813c111 1494 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
cb9a1d9b
JH
1495 {
1496 rtx note;
58e6ae30 1497 edge b = unchecked_make_edge (e->src, target, 0);
341c100f 1498 bool redirected;
cb9a1d9b 1499
1476d1bd
MM
1500 redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
1501 block_label (target), 0);
341c100f 1502 gcc_assert (redirected);
c22cacf3 1503
a813c111 1504 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
cb9a1d9b
JH
1505 if (note)
1506 {
e5af9ddd 1507 int prob = XINT (note, 0);
cb9a1d9b 1508
5fa396ad 1509 b->probability = profile_probability::from_reg_br_prob_note (prob);
cb9a1d9b 1510 e->probability -= e->probability;
cb9a1d9b
JH
1511 }
1512 }
1513
ca6c03ca 1514 if (e->flags & EDGE_ABNORMAL)
a3716585
JH
1515 {
1516 /* Irritating special case - fallthru edge to the same block as abnormal
1517 edge.
1518 We can't redirect abnormal edge, but we still can split the fallthru
d329e058 1519 one and create separate abnormal edge to original destination.
a3716585 1520 This allows bb-reorder to make such edge non-fallthru. */
341c100f 1521 gcc_assert (e->dest == target);
ee44c28d
SB
1522 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1523 e->flags &= EDGE_FALLTHRU;
a3716585 1524 }
341c100f 1525 else
24c545ff 1526 {
341c100f 1527 gcc_assert (e->flags & EDGE_FALLTHRU);
fefa31b5 1528 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
341c100f
NS
1529 {
1530 /* We can't redirect the entry block. Create an empty block
628f6a4e
BE
1531 at the start of the function which we use to add the new
1532 jump. */
1533 edge tmp;
1534 edge_iterator ei;
1535 bool found = false;
c22cacf3 1536
fefa31b5
DM
1537 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1538 ENTRY_BLOCK_PTR_FOR_FN (cfun));
e7a74006 1539 bb->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
c22cacf3 1540
63e1159c
JJ
1541 /* Make sure new block ends up in correct hot/cold section. */
1542 BB_COPY_PARTITION (bb, e->dest);
1543
341c100f
NS
1544 /* Change the existing edge's source to be the new block, and add
1545 a new edge from the entry block to the new block. */
1546 e->src = bb;
fefa31b5
DM
1547 for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1548 (tmp = ei_safe_edge (ei)); )
628f6a4e
BE
1549 {
1550 if (tmp == e)
1551 {
fefa31b5 1552 ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
628f6a4e
BE
1553 found = true;
1554 break;
1555 }
1556 else
1557 ei_next (&ei);
1558 }
c22cacf3 1559
628f6a4e 1560 gcc_assert (found);
c22cacf3 1561
9771b263 1562 vec_safe_push (bb->succs, e);
fefa31b5
DM
1563 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1564 EDGE_FALLTHRU);
341c100f 1565 }
24c545ff
BS
1566 }
1567
a3e6a37b 1568 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
891ca07d 1569 don't point to the target or fallthru label. */
a3e6a37b 1570 if (JUMP_P (BB_END (e->src))
fefa31b5 1571 && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
a3e6a37b
JJ
1572 && (e->flags & EDGE_FALLTHRU)
1573 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
ca6c03ca 1574 {
a3e6a37b 1575 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
5d68b7e6 1576 bool adjust_jump_target = false;
a3e6a37b
JJ
1577
1578 for (i = 0; i < n; ++i)
891ca07d
JJ
1579 {
1580 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
5d68b7e6
JJ
1581 {
1582 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1583 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1584 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1585 adjust_jump_target = true;
1586 }
891ca07d 1587 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
a3e6a37b 1588 asm_goto_edge = true;
891ca07d 1589 }
5d68b7e6
JJ
1590 if (adjust_jump_target)
1591 {
8879d71c
DM
1592 rtx_insn *insn = BB_END (e->src);
1593 rtx note;
1594 rtx_insn *old_label = BB_HEAD (e->dest);
1595 rtx_insn *new_label = BB_HEAD (target);
5d68b7e6
JJ
1596
1597 if (JUMP_LABEL (insn) == old_label)
1598 {
1599 JUMP_LABEL (insn) = new_label;
1600 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1601 if (note)
1602 remove_note (insn, note);
1603 }
1604 else
1605 {
1606 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1607 if (note)
1608 remove_note (insn, note);
1609 if (JUMP_LABEL (insn) != new_label
1610 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1611 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1612 }
1613 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1614 != NULL_RTX)
1615 XEXP (note, 0) = new_label;
1616 }
a3e6a37b
JJ
1617 }
1618
1619 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1620 {
dc01c3d1 1621 rtx_insn *new_head;
ef30ab83 1622 profile_count count = e->count ();
357067f2 1623 profile_probability probability = e->probability;
ca6c03ca 1624 /* Create the new structures. */
31a78298 1625
79019985
RH
1626 /* If the old block ended with a tablejump, skip its table
1627 by searching forward from there. Otherwise start searching
1628 forward from the last instruction of the old block. */
8942ee0f
DM
1629 rtx_jump_table_data *table;
1630 if (tablejump_p (BB_END (e->src), NULL, &table))
dc01c3d1 1631 new_head = table;
8942ee0f 1632 else
dc01c3d1
DM
1633 new_head = BB_END (e->src);
1634 new_head = NEXT_INSN (new_head);
31a78298 1635
dc01c3d1 1636 jump_block = create_basic_block (new_head, NULL, e->src);
a3e6a37b 1637 jump_block->count = count;
ca6c03ca 1638
750054a2
CT
1639 /* Make sure new block ends up in correct hot/cold section. */
1640
076c7ab8 1641 BB_COPY_PARTITION (jump_block, e->src);
c22cacf3 1642
ca6c03ca
JH
1643 /* Wire edge in. */
1644 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
a3e6a37b 1645 new_edge->probability = probability;
ca6c03ca
JH
1646
1647 /* Redirect old edge. */
1648 redirect_edge_pred (e, jump_block);
357067f2 1649 e->probability = profile_probability::always ();
ca6c03ca 1650
3371a64f
TJ
1651 /* If e->src was previously region crossing, it no longer is
1652 and the reg crossing note should be removed. */
1653 fixup_partition_crossing (new_edge);
1654
a3e6a37b
JJ
1655 /* If asm goto has any label refs to target's label,
1656 add also edge from asm goto bb to target. */
1657 if (asm_goto_edge)
1658 {
357067f2 1659 new_edge->probability = new_edge->probability.apply_scale (1, 2);
3995f3a2 1660 jump_block->count = jump_block->count.apply_scale (1, 2);
3995f3a2
JH
1661 edge new_edge2 = make_edge (new_edge->src, target,
1662 e->flags & ~EDGE_FALLTHRU);
1663 new_edge2->probability = probability - new_edge->probability;
a3e6a37b
JJ
1664 }
1665
ca6c03ca
JH
1666 new_bb = jump_block;
1667 }
1668 else
1669 jump_block = e->src;
5f0d2358 1670
2f13f2de 1671 loc = e->goto_locus;
ca6c03ca 1672 e->flags &= ~EDGE_FALLTHRU;
fefa31b5 1673 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
ca6c03ca 1674 {
26898771 1675 if (jump_label == ret_rtx)
4bccb39e
RS
1676 emit_jump_insn_after_setloc (targetm.gen_return (),
1677 BB_END (jump_block), loc);
26898771
BS
1678 else
1679 {
1680 gcc_assert (jump_label == simple_return_rtx);
4bccb39e 1681 emit_jump_insn_after_setloc (targetm.gen_simple_return (),
26898771 1682 BB_END (jump_block), loc);
26898771 1683 }
387748de 1684 set_return_jump_label (BB_END (jump_block));
ca6c03ca
JH
1685 }
1686 else
1687 {
e67d1102 1688 rtx_code_label *label = block_label (target);
ec4a505f
RS
1689 emit_jump_insn_after_setloc (targetm.gen_jump (label),
1690 BB_END (jump_block), loc);
a813c111 1691 JUMP_LABEL (BB_END (jump_block)) = label;
ca6c03ca
JH
1692 LABEL_NUSES (label)++;
1693 }
5f0d2358 1694
3371a64f
TJ
1695 /* We might be in cfg layout mode, and if so, the following routine will
1696 insert the barrier correctly. */
1697 emit_barrier_after_bb (jump_block);
ca6c03ca
JH
1698 redirect_edge_succ_nodup (e, target);
1699
a3716585
JH
1700 if (abnormal_edge_flags)
1701 make_edge (src, target, abnormal_edge_flags);
1702
b8698a0f 1703 df_mark_solutions_dirty ();
3371a64f 1704 fixup_partition_crossing (e);
ca6c03ca
JH
1705 return new_bb;
1706}
1707
1708/* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1709 (and possibly create new basic block) to make edge non-fallthru.
1710 Return newly created BB or NULL if none. */
5f0d2358 1711
cf103ca4
EB
1712static basic_block
1713rtl_force_nonfallthru (edge e)
ca6c03ca 1714{
26898771 1715 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
ca6c03ca
JH
1716}
1717
1718/* Redirect edge even at the expense of creating new jump insn or
1719 basic block. Return new basic block if created, NULL otherwise.
41806d92 1720 Conversion must be possible. */
ca6c03ca 1721
9ee634e3 1722static basic_block
d329e058 1723rtl_redirect_edge_and_branch_force (edge e, basic_block target)
ca6c03ca 1724{
5f0d2358
RK
1725 if (redirect_edge_and_branch (e, target)
1726 || e->dest == target)
ca6c03ca
JH
1727 return NULL;
1728
1729 /* In case the edge redirection failed, try to force it to be non-fallthru
1730 and redirect newly created simplejump. */
6fb5fa3c 1731 df_set_bb_dirty (e->src);
26898771 1732 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
ca6c03ca
JH
1733}
1734
1735/* The given edge should potentially be a fallthru edge. If that is in
1736 fact true, delete the jump and barriers that are in the way. */
1737
f470c378
ZD
1738static void
1739rtl_tidy_fallthru_edge (edge e)
ca6c03ca 1740{
8879d71c 1741 rtx_insn *q;
f470c378
ZD
1742 basic_block b = e->src, c = b->next_bb;
1743
ca6c03ca
JH
1744 /* ??? In a late-running flow pass, other folks may have deleted basic
1745 blocks by nopping out blocks, leaving multiple BARRIERs between here
0fa2e4df 1746 and the target label. They ought to be chastised and fixed.
ca6c03ca
JH
1747
1748 We can also wind up with a sequence of undeletable labels between
1749 one block and the next.
1750
1751 So search through a sequence of barriers, labels, and notes for
1752 the head of block C and assert that we really do fall through. */
1753
a813c111 1754 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
65f4b875 1755 if (NONDEBUG_INSN_P (q))
9c0a0632 1756 return;
ca6c03ca
JH
1757
1758 /* Remove what will soon cease being the jump insn from the source block.
1759 If block B consisted only of this single jump, turn it into a deleted
1760 note. */
a813c111 1761 q = BB_END (b);
4b4bf941 1762 if (JUMP_P (q)
ca6c03ca
JH
1763 && onlyjump_p (q)
1764 && (any_uncondjump_p (q)
c5cbcccf 1765 || single_succ_p (b)))
ca6c03ca 1766 {
dfe08bc4 1767 rtx_insn *label;
adfac8df
JJ
1768 rtx_jump_table_data *table;
1769
1770 if (tablejump_p (q, &label, &table))
1771 {
1772 /* The label is likely mentioned in some instruction before
1773 the tablejump and might not be DCEd, so turn it into
1774 a note instead and move before the tablejump that is going to
1775 be deleted. */
1776 const char *name = LABEL_NAME (label);
1777 PUT_CODE (label, NOTE);
1778 NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1779 NOTE_DELETED_LABEL_NAME (label) = name;
dfe08bc4 1780 reorder_insns (label, label, PREV_INSN (q));
adfac8df
JJ
1781 delete_insn (table);
1782 }
1783
ca6c03ca
JH
1784 /* If this was a conditional jump, we need to also delete
1785 the insn that set cc0. */
058eb3b0 1786 if (HAVE_cc0 && any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
ca6c03ca 1787 q = PREV_INSN (q);
ca6c03ca
JH
1788
1789 q = PREV_INSN (q);
ca6c03ca 1790 }
b34f29a1
JJ
1791 /* Unconditional jumps with side-effects (i.e. which we can't just delete
1792 together with the barrier) should never have a fallthru edge. */
1793 else if (JUMP_P (q) && any_uncondjump_p (q))
1794 return;
ca6c03ca
JH
1795
1796 /* Selectively unlink the sequence. */
a813c111 1797 if (q != PREV_INSN (BB_HEAD (c)))
a7b87f73 1798 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
ca6c03ca
JH
1799
1800 e->flags |= EDGE_FALLTHRU;
1801}
ca6c03ca 1802\f
f470c378
ZD
1803/* Should move basic block BB after basic block AFTER. NIY. */
1804
1805static bool
1806rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1807 basic_block after ATTRIBUTE_UNUSED)
1808{
1809 return false;
1810}
1811
3371a64f
TJ
1812/* Locate the last bb in the same partition as START_BB. */
1813
1814static basic_block
1815last_bb_in_partition (basic_block start_bb)
1816{
1817 basic_block bb;
fefa31b5 1818 FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
3371a64f
TJ
1819 {
1820 if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1821 return bb;
1822 }
6626665f 1823 /* Return bb before the exit block. */
3371a64f
TJ
1824 return bb->prev_bb;
1825}
1826
ca6c03ca 1827/* Split a (typically critical) edge. Return the new block.
41806d92 1828 The edge must not be abnormal.
ca6c03ca
JH
1829
1830 ??? The code generally expects to be called on critical edges.
1831 The case of a block ending in an unconditional jump to a
1832 block with multiple predecessors is not handled optimally. */
1833
8ce33230 1834static basic_block
d329e058 1835rtl_split_edge (edge edge_in)
ca6c03ca 1836{
3371a64f 1837 basic_block bb, new_bb;
8879d71c 1838 rtx_insn *before;
ca6c03ca
JH
1839
1840 /* Abnormal edges cannot be split. */
341c100f 1841 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
ca6c03ca
JH
1842
1843 /* We are going to place the new block in front of edge destination.
eaec9b3d 1844 Avoid existence of fallthru predecessors. */
ca6c03ca
JH
1845 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1846 {
0fd4b31d 1847 edge e = find_fallthru_edge (edge_in->dest->preds);
ca6c03ca
JH
1848
1849 if (e)
1850 force_nonfallthru (e);
1851 }
1852
96e82e0a 1853 /* Create the basic block note. */
fefa31b5 1854 if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
a813c111 1855 before = BB_HEAD (edge_in->dest);
ca6c03ca 1856 else
8879d71c 1857 before = NULL;
ca6c03ca 1858
623a66fa 1859 /* If this is a fall through edge to the exit block, the blocks might be
ffe14686 1860 not adjacent, and the right place is after the source. */
fefa31b5
DM
1861 if ((edge_in->flags & EDGE_FALLTHRU)
1862 && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
623a66fa
R
1863 {
1864 before = NEXT_INSN (BB_END (edge_in->src));
623a66fa 1865 bb = create_basic_block (before, NULL, edge_in->src);
076c7ab8 1866 BB_COPY_PARTITION (bb, edge_in->src);
623a66fa
R
1867 }
1868 else
9fb32434 1869 {
fefa31b5 1870 if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1871 {
1872 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1873 BB_COPY_PARTITION (bb, edge_in->dest);
1874 }
1875 else
1876 {
1877 basic_block after = edge_in->dest->prev_bb;
1878 /* If this is post-bb reordering, and the edge crosses a partition
1879 boundary, the new block needs to be inserted in the bb chain
1880 at the end of the src partition (since we put the new bb into
1881 that partition, see below). Otherwise we may end up creating
1882 an extra partition crossing in the chain, which is illegal.
1883 It can't go after the src, because src may have a fall-through
1884 to a different block. */
1885 if (crtl->bb_reorder_complete
1886 && (edge_in->flags & EDGE_CROSSING))
1887 {
1888 after = last_bb_in_partition (edge_in->src);
b3a3ae09 1889 before = get_last_bb_insn (after);
3371a64f
TJ
1890 /* The instruction following the last bb in partition should
1891 be a barrier, since it cannot end in a fall-through. */
1892 gcc_checking_assert (BARRIER_P (before));
1893 before = NEXT_INSN (before);
1894 }
1895 bb = create_basic_block (before, NULL, after);
1896 /* Put the split bb into the src partition, to avoid creating
1897 a situation where a cold bb dominates a hot bb, in the case
1898 where src is cold and dest is hot. The src will dominate
1899 the new bb (whereas it might not have dominated dest). */
1900 BB_COPY_PARTITION (bb, edge_in->src);
1901 }
9fb32434 1902 }
ca6c03ca 1903
4977bab6 1904 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
ca6c03ca 1905
3371a64f
TJ
1906 /* Can't allow a region crossing edge to be fallthrough. */
1907 if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
fefa31b5 1908 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1909 {
1910 new_bb = force_nonfallthru (single_succ_edge (bb));
1911 gcc_assert (!new_bb);
1912 }
1913
4d6922ee 1914 /* For non-fallthru edges, we must adjust the predecessor's
ca6c03ca
JH
1915 jump instruction to target our new block. */
1916 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1917 {
341c100f
NS
1918 edge redirected = redirect_edge_and_branch (edge_in, bb);
1919 gcc_assert (redirected);
ca6c03ca
JH
1920 }
1921 else
3b5fda81 1922 {
fefa31b5 1923 if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
3b5fda81
JJ
1924 {
1925 /* For asm goto even splitting of fallthru edge might
1926 need insn patching, as other labels might point to the
1927 old label. */
8879d71c 1928 rtx_insn *last = BB_END (edge_in->src);
3b5fda81
JJ
1929 if (last
1930 && JUMP_P (last)
fefa31b5 1931 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
81e63b63
SB
1932 && (extract_asm_operands (PATTERN (last))
1933 || JUMP_LABEL (last) == before)
3b5fda81
JJ
1934 && patch_jump_insn (last, before, bb))
1935 df_set_bb_dirty (edge_in->src);
1936 }
1937 redirect_edge_succ (edge_in, bb);
1938 }
ca6c03ca
JH
1939
1940 return bb;
1941}
1942
1943/* Queue instructions for insertion on an edge between two basic blocks.
1944 The new instructions and basic blocks (if any) will not appear in the
1945 CFG until commit_edge_insertions is called. */
1946
1947void
d329e058 1948insert_insn_on_edge (rtx pattern, edge e)
ca6c03ca
JH
1949{
1950 /* We cannot insert instructions on an abnormal critical edge.
1951 It will be easier to find the culprit if we die now. */
341c100f 1952 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
ca6c03ca 1953
6de9cd9a 1954 if (e->insns.r == NULL_RTX)
ca6c03ca
JH
1955 start_sequence ();
1956 else
6de9cd9a 1957 push_to_sequence (e->insns.r);
ca6c03ca
JH
1958
1959 emit_insn (pattern);
1960
6de9cd9a 1961 e->insns.r = get_insns ();
ca6c03ca
JH
1962 end_sequence ();
1963}
1964
1965/* Update the CFG for the instructions queued on edge E. */
1966
4e3825db 1967void
2ac66157 1968commit_one_edge_insertion (edge e)
ca6c03ca 1969{
8879d71c 1970 rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
898c90c0 1971 basic_block bb;
ca6c03ca
JH
1972
1973 /* Pull the insns off the edge now since the edge might go away. */
3ffa95c2
DM
1974 insns = e->insns.r;
1975 e->insns.r = NULL;
ca6c03ca 1976
898c90c0
EB
1977 /* Figure out where to put these insns. If the destination has
1978 one predecessor, insert there. Except for the exit block. */
fefa31b5 1979 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
ca6c03ca 1980 {
898c90c0
EB
1981 bb = e->dest;
1982
1983 /* Get the location correct wrt a code label, and "nice" wrt
1984 a basic block note, and before everything else. */
1985 tmp = BB_HEAD (bb);
1986 if (LABEL_P (tmp))
1987 tmp = NEXT_INSN (tmp);
1988 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1989 tmp = NEXT_INSN (tmp);
1990 if (tmp == BB_HEAD (bb))
1991 before = tmp;
1992 else if (tmp)
1993 after = PREV_INSN (tmp);
1994 else
1995 after = get_last_insn ();
1996 }
3dec4024 1997
898c90c0 1998 /* If the source has one successor and the edge is not abnormal,
a1d50386
JJ
1999 insert there. Except for the entry block.
2000 Don't do this if the predecessor ends in a jump other than
2001 unconditional simple jump. E.g. for asm goto that points all
2002 its labels at the fallthru basic block, we can't insert instructions
2003 before the asm goto, as the asm goto can have various of side effects,
2004 and can't emit instructions after the asm goto, as it must end
2005 the basic block. */
898c90c0
EB
2006 else if ((e->flags & EDGE_ABNORMAL) == 0
2007 && single_succ_p (e->src)
fefa31b5 2008 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
a1d50386
JJ
2009 && (!JUMP_P (BB_END (e->src))
2010 || simplejump_p (BB_END (e->src))))
898c90c0
EB
2011 {
2012 bb = e->src;
3dec4024 2013
898c90c0
EB
2014 /* It is possible to have a non-simple jump here. Consider a target
2015 where some forms of unconditional jumps clobber a register. This
2016 happens on the fr30 for example.
ca6c03ca 2017
898c90c0
EB
2018 We know this block has a single successor, so we can just emit
2019 the queued insns before the jump. */
2020 if (JUMP_P (BB_END (bb)))
2021 before = BB_END (bb);
3dec4024
JH
2022 else
2023 {
898c90c0
EB
2024 /* We'd better be fallthru, or we've lost track of what's what. */
2025 gcc_assert (e->flags & EDGE_FALLTHRU);
750054a2 2026
898c90c0 2027 after = BB_END (bb);
ca6c03ca
JH
2028 }
2029 }
2030
898c90c0
EB
2031 /* Otherwise we must split the edge. */
2032 else
2033 {
2034 bb = split_edge (e);
3371a64f
TJ
2035
2036 /* If E crossed a partition boundary, we needed to make bb end in
2037 a region-crossing jump, even though it was originally fallthru. */
2038 if (JUMP_P (BB_END (bb)))
2039 before = BB_END (bb);
2040 else
2041 after = BB_END (bb);
898c90c0
EB
2042 }
2043
2044 /* Now that we've found the spot, do the insertion. */
ca6c03ca
JH
2045 if (before)
2046 {
6fb5fa3c 2047 emit_insn_before_noloc (insns, before, bb);
ca6c03ca
JH
2048 last = prev_nonnote_insn (before);
2049 }
2050 else
6fb5fa3c 2051 last = emit_insn_after_noloc (insns, after, bb);
ca6c03ca
JH
2052
2053 if (returnjump_p (last))
2054 {
2055 /* ??? Remove all outgoing edges from BB and add one for EXIT.
c22cacf3
MS
2056 This is not currently a problem because this only happens
2057 for the (single) epilogue, which already has a fallthru edge
2058 to EXIT. */
ca6c03ca 2059
c5cbcccf 2060 e = single_succ_edge (bb);
fefa31b5 2061 gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
c5cbcccf 2062 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
ca6c03ca 2063
5f0d2358 2064 e->flags &= ~EDGE_FALLTHRU;
ca6c03ca 2065 emit_barrier_after (last);
0b17ab2f 2066
ca6c03ca
JH
2067 if (before)
2068 delete_insn (before);
2069 }
341c100f
NS
2070 else
2071 gcc_assert (!JUMP_P (last));
ca6c03ca
JH
2072}
2073
2074/* Update the CFG for all queued instructions. */
2075
2076void
d329e058 2077commit_edge_insertions (void)
ca6c03ca 2078{
ca6c03ca
JH
2079 basic_block bb;
2080
600b5b1d
TJ
2081 /* Optimization passes that invoke this routine can cause hot blocks
2082 previously reached by both hot and cold blocks to become dominated only
2083 by cold blocks. This will cause the verification below to fail,
2084 and lead to now cold code in the hot section. In some cases this
2085 may only be visible after newly unreachable blocks are deleted,
2086 which will be done by fixup_partitions. */
2087 fixup_partitions ();
2088
b2b29377 2089 checking_verify_flow_info ();
ca6c03ca 2090
fefa31b5
DM
2091 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2092 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
ca6c03ca 2093 {
628f6a4e
BE
2094 edge e;
2095 edge_iterator ei;
ca6c03ca 2096
628f6a4e
BE
2097 FOR_EACH_EDGE (e, ei, bb->succs)
2098 if (e->insns.r)
cf103ca4 2099 commit_one_edge_insertion (e);
3dec4024 2100 }
ca6c03ca
JH
2101}
2102\f
6fb5fa3c 2103
f470c378 2104/* Print out RTL-specific basic block information (live information
a315c44c
SB
2105 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2106 documented in dumpfile.h. */
ca6c03ca 2107
10e9fecc 2108static void
1a817418 2109rtl_dump_bb (FILE *outf, basic_block bb, int indent, dump_flags_t flags)
ca6c03ca 2110{
f470c378 2111 char *s_indent;
ca6c03ca 2112
ae50c0cb 2113 s_indent = (char *) alloca ((size_t) indent + 1);
400e39e3 2114 memset (s_indent, ' ', (size_t) indent);
f470c378 2115 s_indent[indent] = '\0';
b8698a0f 2116
a315c44c 2117 if (df && (flags & TDF_DETAILS))
6fb5fa3c
DB
2118 {
2119 df_dump_top (bb, outf);
2120 putc ('\n', outf);
2121 }
ca6c03ca 2122
68cc3174 2123 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
9fcaf059
SB
2124 {
2125 rtx_insn *last = BB_END (bb);
2126 if (last)
2127 last = NEXT_INSN (last);
2128 for (rtx_insn *insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
2129 {
2130 if (flags & TDF_DETAILS)
2131 df_dump_insn_top (insn, outf);
2132 if (! (flags & TDF_SLIM))
2133 print_rtl_single (outf, insn);
2134 else
2135 dump_insn_slim (outf, insn);
2136 if (flags & TDF_DETAILS)
2137 df_dump_insn_bottom (insn, outf);
2138 }
2139 }
a315c44c
SB
2140
2141 if (df && (flags & TDF_DETAILS))
6fb5fa3c
DB
2142 {
2143 df_dump_bottom (bb, outf);
2144 putc ('\n', outf);
2145 }
2146
ca6c03ca
JH
2147}
2148\f
c4669594
SB
2149/* Like dump_function_to_file, but for RTL. Print out dataflow information
2150 for the start of each basic block. FLAGS are the TDF_* masks documented
2151 in dumpfile.h. */
ca6c03ca
JH
2152
2153void
1a817418 2154print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, dump_flags_t flags)
ca6c03ca 2155{
b2908ba6 2156 const rtx_insn *tmp_rtx;
ca6c03ca
JH
2157 if (rtx_first == 0)
2158 fprintf (outf, "(nil)\n");
2159 else
2160 {
ca6c03ca
JH
2161 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2162 int max_uid = get_max_uid ();
5ed6ace5
MD
2163 basic_block *start = XCNEWVEC (basic_block, max_uid);
2164 basic_block *end = XCNEWVEC (basic_block, max_uid);
2165 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
e0082a72
ZD
2166 basic_block bb;
2167
c4669594
SB
2168 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2169 insns, but the CFG is not maintained so the basic block info
2170 is not reliable. Therefore it's omitted from the dumps. */
2171 if (! (cfun->curr_properties & PROP_cfg))
2172 flags &= ~TDF_BLOCKS;
2173
6fb5fa3c
DB
2174 if (df)
2175 df_dump_start (outf);
2176
c4669594 2177 if (flags & TDF_BLOCKS)
ca6c03ca 2178 {
4f42035e 2179 FOR_EACH_BB_REVERSE_FN (bb, cfun)
ca6c03ca 2180 {
8879d71c 2181 rtx_insn *x;
c4669594
SB
2182
2183 start[INSN_UID (BB_HEAD (bb))] = bb;
2184 end[INSN_UID (BB_END (bb))] = bb;
2185 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2186 {
2187 enum bb_state state = IN_MULTIPLE_BB;
5f0d2358 2188
c4669594
SB
2189 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2190 state = IN_ONE_BB;
2191 in_bb_p[INSN_UID (x)] = state;
ca6c03ca 2192
c4669594
SB
2193 if (x == BB_END (bb))
2194 break;
2195 }
ca6c03ca
JH
2196 }
2197 }
2198
01512446 2199 for (tmp_rtx = rtx_first; tmp_rtx != NULL; tmp_rtx = NEXT_INSN (tmp_rtx))
ca6c03ca 2200 {
c4669594
SB
2201 if (flags & TDF_BLOCKS)
2202 {
2203 bb = start[INSN_UID (tmp_rtx)];
2204 if (bb != NULL)
2205 {
9d9573d5 2206 dump_bb_info (outf, bb, 0, dump_flags, true, false);
c4669594
SB
2207 if (df && (flags & TDF_DETAILS))
2208 df_dump_top (bb, outf);
2209 }
ca6c03ca 2210
c4669594
SB
2211 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2212 && !NOTE_P (tmp_rtx)
2213 && !BARRIER_P (tmp_rtx))
2214 fprintf (outf, ";; Insn is not within a basic block\n");
2215 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2216 fprintf (outf, ";; Insn is in multiple basic blocks\n");
2217 }
ca6c03ca 2218
7b19209f
SB
2219 if (flags & TDF_DETAILS)
2220 df_dump_insn_top (tmp_rtx, outf);
a315c44c
SB
2221 if (! (flags & TDF_SLIM))
2222 print_rtl_single (outf, tmp_rtx);
2223 else
2224 dump_insn_slim (outf, tmp_rtx);
7b19209f
SB
2225 if (flags & TDF_DETAILS)
2226 df_dump_insn_bottom (tmp_rtx, outf);
ca6c03ca 2227
c4669594
SB
2228 if (flags & TDF_BLOCKS)
2229 {
2230 bb = end[INSN_UID (tmp_rtx)];
2231 if (bb != NULL)
2232 {
9d9573d5 2233 dump_bb_info (outf, bb, 0, dump_flags, false, true);
c4669594
SB
2234 if (df && (flags & TDF_DETAILS))
2235 df_dump_bottom (bb, outf);
1b03a96d 2236 putc ('\n', outf);
c4669594
SB
2237 }
2238 }
ca6c03ca
JH
2239 }
2240
2241 free (start);
2242 free (end);
2243 free (in_bb_p);
2244 }
ca6c03ca
JH
2245}
2246\f
532aafad
SB
2247/* Update the branch probability of BB if a REG_BR_PROB is present. */
2248
b446e5a2 2249void
d329e058 2250update_br_prob_note (basic_block bb)
b446e5a2
JH
2251{
2252 rtx note;
a813c111 2253 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
e7a74006
JH
2254 if (!JUMP_P (BB_END (bb)) || !BRANCH_EDGE (bb)->probability.initialized_p ())
2255 {
2256 if (note)
2257 {
2258 rtx *note_link, this_rtx;
2259
2260 note_link = &REG_NOTES (BB_END (bb));
2261 for (this_rtx = *note_link; this_rtx; this_rtx = XEXP (this_rtx, 1))
2262 if (this_rtx == note)
2263 {
2264 *note_link = XEXP (this_rtx, 1);
2265 break;
2266 }
2267 }
2268 return;
2269 }
357067f2 2270 if (!note
5fa396ad 2271 || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ())
b446e5a2 2272 return;
5fa396ad 2273 XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ();
b446e5a2 2274}
96370780
MK
2275
2276/* Get the last insn associated with block BB (that includes barriers and
2277 tablejumps after BB). */
636eb204 2278rtx_insn *
96370780
MK
2279get_last_bb_insn (basic_block bb)
2280{
8942ee0f 2281 rtx_jump_table_data *table;
636eb204
DM
2282 rtx_insn *tmp;
2283 rtx_insn *end = BB_END (bb);
96370780
MK
2284
2285 /* Include any jump table following the basic block. */
8942ee0f
DM
2286 if (tablejump_p (end, NULL, &table))
2287 end = table;
96370780
MK
2288
2289 /* Include any barriers that may follow the basic block. */
f40dd646 2290 tmp = next_nonnote_nondebug_insn_bb (end);
96370780
MK
2291 while (tmp && BARRIER_P (tmp))
2292 {
2293 end = tmp;
f40dd646 2294 tmp = next_nonnote_nondebug_insn_bb (end);
96370780
MK
2295 }
2296
2297 return end;
2298}
af205f67 2299
d9af4fea
JH
2300/* Add all BBs reachable from entry via hot paths into the SET. */
2301
2302void
2303find_bbs_reachable_by_hot_paths (hash_set<basic_block> *set)
2304{
2305 auto_vec<basic_block, 64> worklist;
2306
2307 set->add (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2308 worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2309
2310 while (worklist.length () > 0)
2311 {
2312 basic_block bb = worklist.pop ();
2313 edge_iterator ei;
2314 edge e;
2315
2316 FOR_EACH_EDGE (e, ei, bb->succs)
2317 if (BB_PARTITION (e->dest) != BB_COLD_PARTITION
2318 && !set->add (e->dest))
2319 worklist.safe_push (e->dest);
2320 }
2321}
2322
600b5b1d
TJ
2323/* Sanity check partition hotness to ensure that basic blocks in
2324   the cold partition don't dominate basic blocks in the hot partition.
2325 If FLAG_ONLY is true, report violations as errors. Otherwise
2326 re-mark the dominated blocks as cold, since this is run after
2327 cfg optimizations that may make hot blocks previously reached
2328 by both hot and cold blocks now only reachable along cold paths. */
2329
2330static vec<basic_block>
2331find_partition_fixes (bool flag_only)
2332{
2333 basic_block bb;
2334 vec<basic_block> bbs_in_cold_partition = vNULL;
2335 vec<basic_block> bbs_to_fix = vNULL;
d9af4fea 2336 hash_set<basic_block> set;
600b5b1d
TJ
2337
2338 /* Callers check this. */
2339 gcc_checking_assert (crtl->has_bb_partition);
2340
d9af4fea 2341 find_bbs_reachable_by_hot_paths (&set);
600b5b1d 2342
d9af4fea
JH
2343 FOR_EACH_BB_FN (bb, cfun)
2344 if (!set.contains (bb)
2345 && BB_PARTITION (bb) != BB_COLD_PARTITION)
2346 {
2347 if (flag_only)
2348 error ("non-cold basic block %d reachable only "
2349 "by paths crossing the cold partition", bb->index);
2350 else
2351 BB_SET_PARTITION (bb, BB_COLD_PARTITION);
2352 bbs_to_fix.safe_push (bb);
2353 bbs_in_cold_partition.safe_push (bb);
2354 }
600b5b1d
TJ
2355
2356 return bbs_to_fix;
2357}
2358
2359/* Perform cleanup on the hot/cold bb partitioning after optimization
2360 passes that modify the cfg. */
2361
2362void
2363fixup_partitions (void)
2364{
2365 basic_block bb;
2366
2367 if (!crtl->has_bb_partition)
2368 return;
2369
2370 /* Delete any blocks that became unreachable and weren't
2371 already cleaned up, for example during edge forwarding
2372 and convert_jumps_to_returns. This will expose more
2373 opportunities for fixing the partition boundaries here.
2374 Also, the calculation of the dominance graph during verification
2375 will assert if there are unreachable nodes. */
2376 delete_unreachable_blocks ();
2377
2378 /* If there are partitions, do a sanity check on them: A basic block in
2379   a cold partition cannot dominate a basic block in a hot partition.
2380 Fixup any that now violate this requirement, as a result of edge
2381 forwarding and unreachable block deletion.  */
2382 vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2383
2384 /* Do the partition fixup after all necessary blocks have been converted to
2385 cold, so that we only update the region crossings the minimum number of
2386 places, which can require forcing edges to be non fallthru. */
2387 while (! bbs_to_fix.is_empty ())
2388 {
2389 bb = bbs_to_fix.pop ();
2390 fixup_new_cold_bb (bb);
2391 }
2392}
2393
af205f67
TJ
2394/* Verify, in the basic block chain, that there is at most one switch
2395 between hot/cold partitions. This condition will not be true until
2396 after reorder_basic_blocks is called. */
2397
251a41b9 2398static int
af205f67
TJ
2399verify_hot_cold_block_grouping (void)
2400{
2401 basic_block bb;
2402 int err = 0;
2403 bool switched_sections = false;
2404 int current_partition = BB_UNPARTITIONED;
2405
3371a64f
TJ
2406 /* Even after bb reordering is complete, we go into cfglayout mode
2407 again (in compgoto). Ensure we don't call this before going back
2408 into linearized RTL when any layout fixes would have been committed. */
2409 if (!crtl->bb_reorder_complete
c3284718 2410 || current_ir_type () != IR_RTL_CFGRTL)
251a41b9 2411 return err;
af205f67 2412
11cd3bed 2413 FOR_EACH_BB_FN (bb, cfun)
af205f67
TJ
2414 {
2415 if (current_partition != BB_UNPARTITIONED
2416 && BB_PARTITION (bb) != current_partition)
2417 {
2418 if (switched_sections)
2419 {
2420 error ("multiple hot/cold transitions found (bb %i)",
2421 bb->index);
2422 err = 1;
2423 }
2424 else
2425 switched_sections = true;
2426
2427 if (!crtl->has_bb_partition)
2428 error ("partition found but function partition flag not set");
2429 }
2430 current_partition = BB_PARTITION (bb);
2431 }
2432
251a41b9 2433 return err;
af205f67 2434}
b446e5a2 2435\f
ca6c03ca 2436
251a41b9
TJ
2437/* Perform several checks on the edges out of each block, such as
2438 the consistency of the branch probabilities, the correctness
2439 of hot/cold partition crossing edges, and the number of expected
600b5b1d
TJ
2440 successor edges. Also verify that the dominance relationship
2441 between hot/cold blocks is sane. */
f470c378 2442
10e9fecc 2443static int
251a41b9 2444rtl_verify_edges (void)
ca6c03ca 2445{
10e9fecc 2446 int err = 0;
94eb5ddb 2447 basic_block bb;
ca6c03ca 2448
4f42035e 2449 FOR_EACH_BB_REVERSE_FN (bb, cfun)
ca6c03ca 2450 {
f99ffaa3
EB
2451 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2452 int n_eh = 0, n_abnormal = 0;
3cf54412 2453 edge e, fallthru = NULL;
628f6a4e 2454 edge_iterator ei;
251a41b9 2455 rtx note;
3371a64f 2456 bool has_crossing_edge = false;
ca6c03ca 2457
2085a21f 2458 if (JUMP_P (BB_END (bb))
a813c111 2459 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
628f6a4e 2460 && EDGE_COUNT (bb->succs) >= 2
a813c111 2461 && any_condjump_p (BB_END (bb)))
5a1a3e5e 2462 {
357067f2
JH
2463 if (!BRANCH_EDGE (bb)->probability.initialized_p ())
2464 {
a4c57f86 2465 if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
357067f2 2466 {
a4c57f86
JH
2467 error ("verify_flow_info: "
2468 "REG_BR_PROB is set but cfg probability is not");
2469 err = 1;
357067f2
JH
2470 }
2471 }
2472 else if (XINT (note, 0)
5fa396ad 2473 != BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ()
357067f2 2474 && profile_status_for_fn (cfun) != PROFILE_ABSENT)
5a1a3e5e 2475 {
e5af9ddd 2476 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
357067f2 2477 XINT (note, 0),
5fa396ad 2478 BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ());
5a1a3e5e
JH
2479 err = 1;
2480 }
2481 }
251a41b9 2482
628f6a4e 2483 FOR_EACH_EDGE (e, ei, bb->succs)
ca6c03ca 2484 {
0be7e7a6
RH
2485 bool is_crossing;
2486
ca6c03ca 2487 if (e->flags & EDGE_FALLTHRU)
0be7e7a6
RH
2488 n_fallthru++, fallthru = e;
2489
2490 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
fefa31b5
DM
2491 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2492 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
3371a64f 2493 has_crossing_edge |= is_crossing;
0be7e7a6 2494 if (e->flags & EDGE_CROSSING)
750054a2 2495 {
0be7e7a6
RH
2496 if (!is_crossing)
2497 {
2498 error ("EDGE_CROSSING incorrectly set across same section");
2499 err = 1;
2500 }
2501 if (e->flags & EDGE_FALLTHRU)
2502 {
f99ffaa3 2503 error ("fallthru edge crosses section boundary in bb %i",
750054a2
CT
2504 e->src->index);
2505 err = 1;
2506 }
0be7e7a6
RH
2507 if (e->flags & EDGE_EH)
2508 {
f99ffaa3 2509 error ("EH edge crosses section boundary in bb %i",
0be7e7a6
RH
2510 e->src->index);
2511 err = 1;
2512 }
339ba33b 2513 if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
3371a64f
TJ
2514 {
2515 error ("No region crossing jump at section boundary in bb %i",
2516 bb->index);
2517 err = 1;
2518 }
0be7e7a6
RH
2519 }
2520 else if (is_crossing)
2521 {
2522 error ("EDGE_CROSSING missing across section boundary");
2523 err = 1;
750054a2 2524 }
3dec4024 2525
65f43cdf
ZD
2526 if ((e->flags & ~(EDGE_DFS_BACK
2527 | EDGE_CAN_FALLTHRU
2528 | EDGE_IRREDUCIBLE_LOOP
9beb1c84 2529 | EDGE_LOOP_EXIT
7e872b90
JJ
2530 | EDGE_CROSSING
2531 | EDGE_PRESERVE)) == 0)
3dec4024
JH
2532 n_branch++;
2533
2534 if (e->flags & EDGE_ABNORMAL_CALL)
f99ffaa3
EB
2535 n_abnormal_call++;
2536
2537 if (e->flags & EDGE_SIBCALL)
2538 n_sibcall++;
3dec4024
JH
2539
2540 if (e->flags & EDGE_EH)
2541 n_eh++;
f99ffaa3
EB
2542
2543 if (e->flags & EDGE_ABNORMAL)
3dec4024 2544 n_abnormal++;
ca6c03ca 2545 }
5f0d2358 2546
d53e8ef4
JJ
2547 if (!has_crossing_edge
2548 && JUMP_P (BB_END (bb))
2549 && CROSSING_JUMP_P (BB_END (bb)))
2550 {
2551 print_rtl_with_bb (stderr, get_insns (), TDF_BLOCKS | TDF_DETAILS);
2552 error ("Region crossing jump across same section in bb %i",
2553 bb->index);
2554 err = 1;
2555 }
3371a64f 2556
1d65f45c 2557 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
3dec4024 2558 {
f99ffaa3 2559 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
3dec4024
JH
2560 err = 1;
2561 }
1d65f45c
RH
2562 if (n_eh > 1)
2563 {
f99ffaa3 2564 error ("too many exception handling edges in bb %i", bb->index);
1d65f45c
RH
2565 err = 1;
2566 }
3dec4024 2567 if (n_branch
4b4bf941 2568 && (!JUMP_P (BB_END (bb))
a813c111
SB
2569 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2570 || any_condjump_p (BB_END (bb))))))
3dec4024 2571 {
ab532386 2572 error ("too many outgoing branch edges from bb %i", bb->index);
3dec4024
JH
2573 err = 1;
2574 }
a813c111 2575 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
3dec4024 2576 {
f99ffaa3 2577 error ("fallthru edge after unconditional jump in bb %i", bb->index);
3dec4024
JH
2578 err = 1;
2579 }
a813c111 2580 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
3dec4024 2581 {
f99ffaa3
EB
2582 error ("wrong number of branch edges after unconditional jump"
2583 " in bb %i", bb->index);
3dec4024
JH
2584 err = 1;
2585 }
a813c111 2586 if (n_branch != 1 && any_condjump_p (BB_END (bb))
c11fd0b2 2587 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
3dec4024 2588 {
f99ffaa3
EB
2589 error ("wrong amount of branch edges after conditional jump"
2590 " in bb %i", bb->index);
2591 err = 1;
2592 }
2593 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2594 {
2595 error ("abnormal call edges for non-call insn in bb %i", bb->index);
3dec4024
JH
2596 err = 1;
2597 }
f99ffaa3 2598 if (n_sibcall && !CALL_P (BB_END (bb)))
3dec4024 2599 {
f99ffaa3 2600 error ("sibcall edges for non-call insn in bb %i", bb->index);
3dec4024
JH
2601 err = 1;
2602 }
f99ffaa3
EB
2603 if (n_abnormal > n_eh
2604 && !(CALL_P (BB_END (bb))
2605 && n_abnormal == n_abnormal_call + n_sibcall)
4b4bf941 2606 && (!JUMP_P (BB_END (bb))
a813c111
SB
2607 || any_condjump_p (BB_END (bb))
2608 || any_uncondjump_p (BB_END (bb))))
3dec4024 2609 {
ab532386 2610 error ("abnormal edges for no purpose in bb %i", bb->index);
3dec4024
JH
2611 err = 1;
2612 }
d53e8ef4
JJ
2613
2614 int has_eh = -1;
2615 FOR_EACH_EDGE (e, ei, bb->preds)
2616 {
2617 if (has_eh == -1)
2618 has_eh = (e->flags & EDGE_EH);
2619 if ((e->flags & EDGE_EH) == has_eh)
2620 continue;
2621 error ("EH incoming edge mixed with non-EH incoming edges "
2622 "in bb %i", bb->index);
2623 err = 1;
2624 break;
2625 }
251a41b9 2626 }
f87c27b4 2627
600b5b1d
TJ
2628 /* If there are partitions, do a sanity check on them: A basic block in
2629   a cold partition cannot dominate a basic block in a hot partition.  */
3211aea2
JH
2630 if (crtl->has_bb_partition && !err
2631 && current_ir_type () == IR_RTL_CFGLAYOUT)
600b5b1d
TJ
2632 {
2633 vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2634 err = !bbs_to_fix.is_empty ();
2635 }
2636
251a41b9
TJ
2637 /* Clean up. */
2638 return err;
2639}
5f0d2358 2640
251a41b9
TJ
2641/* Checks on the instructions within blocks. Currently checks that each
2642 block starts with a basic block note, and that basic block notes and
2643 control flow jumps are not found in the middle of the block. */
ca6c03ca 2644
251a41b9
TJ
2645static int
2646rtl_verify_bb_insns (void)
2647{
8879d71c 2648 rtx_insn *x;
251a41b9
TJ
2649 int err = 0;
2650 basic_block bb;
2651
4f42035e 2652 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9
TJ
2653 {
2654 /* Now check the header of basic
c22cacf3 2655 block. It ought to contain optional CODE_LABEL followed
ca6c03ca 2656 by NOTE_BASIC_BLOCK. */
a813c111 2657 x = BB_HEAD (bb);
4b4bf941 2658 if (LABEL_P (x))
ca6c03ca 2659 {
a813c111 2660 if (BB_END (bb) == x)
ca6c03ca
JH
2661 {
2662 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 2663 bb->index);
ca6c03ca
JH
2664 err = 1;
2665 }
5f0d2358 2666
ca6c03ca
JH
2667 x = NEXT_INSN (x);
2668 }
5f0d2358 2669
ca6c03ca
JH
2670 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2671 {
2672 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 2673 bb->index);
ca6c03ca
JH
2674 err = 1;
2675 }
2676
a813c111 2677 if (BB_END (bb) == x)
49243025 2678 /* Do checks for empty blocks here. */
5f0d2358 2679 ;
ca6c03ca 2680 else
5f0d2358
RK
2681 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2682 {
2683 if (NOTE_INSN_BASIC_BLOCK_P (x))
2684 {
2685 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
0b17ab2f 2686 INSN_UID (x), bb->index);
5f0d2358
RK
2687 err = 1;
2688 }
2689
a813c111 2690 if (x == BB_END (bb))
5f0d2358 2691 break;
ca6c03ca 2692
83fd323c 2693 if (control_flow_insn_p (x))
5f0d2358 2694 {
0b17ab2f 2695 error ("in basic block %d:", bb->index);
5f0d2358
RK
2696 fatal_insn ("flow control insn inside a basic block", x);
2697 }
2698 }
ca6c03ca
JH
2699 }
2700
251a41b9
TJ
2701 /* Clean up. */
2702 return err;
2703}
2704
2705/* Verify that block pointers for instructions in basic blocks, headers and
2706 footers are set appropriately. */
2707
2708static int
2709rtl_verify_bb_pointers (void)
2710{
2711 int err = 0;
2712 basic_block bb;
2713
2714 /* Check the general integrity of the basic blocks. */
4f42035e 2715 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9 2716 {
8879d71c 2717 rtx_insn *insn;
251a41b9
TJ
2718
2719 if (!(bb->flags & BB_RTL))
2720 {
2721 error ("BB_RTL flag not set for block %d", bb->index);
2722 err = 1;
2723 }
2724
2725 FOR_BB_INSNS (bb, insn)
2726 if (BLOCK_FOR_INSN (insn) != bb)
2727 {
2728 error ("insn %d basic block pointer is %d, should be %d",
2729 INSN_UID (insn),
2730 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2731 bb->index);
2732 err = 1;
2733 }
2734
2735 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2736 if (!BARRIER_P (insn)
2737 && BLOCK_FOR_INSN (insn) != NULL)
2738 {
2739 error ("insn %d in header of bb %d has non-NULL basic block",
2740 INSN_UID (insn), bb->index);
2741 err = 1;
2742 }
2743 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2744 if (!BARRIER_P (insn)
2745 && BLOCK_FOR_INSN (insn) != NULL)
2746 {
2747 error ("insn %d in footer of bb %d has non-NULL basic block",
2748 INSN_UID (insn), bb->index);
2749 err = 1;
2750 }
2751 }
af205f67 2752
10e9fecc 2753 /* Clean up. */
10e9fecc
JH
2754 return err;
2755}
5f0d2358 2756
10e9fecc
JH
2757/* Verify the CFG and RTL consistency common for both underlying RTL and
2758 cfglayout RTL.
5f0d2358 2759
10e9fecc 2760 Currently it does following checks:
251a41b9
TJ
2761
2762 - overlapping of basic blocks
2763 - insns with wrong BLOCK_FOR_INSN pointers
2764 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2765 - tails of basic blocks (ensure that boundary is necessary)
2766 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2767 and NOTE_INSN_BASIC_BLOCK
2768 - verify that no fall_thru edge crosses hot/cold partition boundaries
2769 - verify that there are no pending RTL branch predictions
600b5b1d 2770 - verify that hot blocks are not dominated by cold blocks
251a41b9
TJ
2771
2772 In future it can be extended check a lot of other stuff as well
2773 (reachability of basic blocks, life information, etc. etc.). */
9eab6785 2774
10e9fecc 2775static int
251a41b9
TJ
2776rtl_verify_flow_info_1 (void)
2777{
2778 int err = 0;
2779
2780 err |= rtl_verify_bb_pointers ();
2781
2782 err |= rtl_verify_bb_insns ();
2783
2784 err |= rtl_verify_edges ();
2785
251a41b9
TJ
2786 return err;
2787}
2788
2789/* Walk the instruction chain and verify that bb head/end pointers
2790 are correct, and that instructions are in exactly one bb and have
2791 correct block pointers. */
2792
2793static int
2794rtl_verify_bb_insn_chain (void)
10e9fecc
JH
2795{
2796 basic_block bb;
251a41b9 2797 int err = 0;
8879d71c
DM
2798 rtx_insn *x;
2799 rtx_insn *last_head = get_last_insn ();
9eab6785 2800 basic_block *bb_info;
9eab6785
SB
2801 const int max_uid = get_max_uid ();
2802
2803 bb_info = XCNEWVEC (basic_block, max_uid);
ca6c03ca 2804
4f42035e 2805 FOR_EACH_BB_REVERSE_FN (bb, cfun)
10e9fecc 2806 {
8879d71c
DM
2807 rtx_insn *head = BB_HEAD (bb);
2808 rtx_insn *end = BB_END (bb);
628f6a4e 2809
9eab6785 2810 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
a7b87f73
ZD
2811 {
2812 /* Verify the end of the basic block is in the INSN chain. */
2813 if (x == end)
2814 break;
2815
251a41b9
TJ
2816 /* And that the code outside of basic blocks has NULL bb field. */
2817 if (!BARRIER_P (x)
2818 && BLOCK_FOR_INSN (x) != NULL)
2819 {
2820 error ("insn %d outside of basic blocks has non-NULL bb field",
2821 INSN_UID (x));
2822 err = 1;
2823 }
a7b87f73 2824 }
9eab6785
SB
2825
2826 if (!x)
2827 {
2828 error ("end insn %d for block %d not found in the insn stream",
2829 INSN_UID (end), bb->index);
2830 err = 1;
2831 }
2832
2833 /* Work backwards from the end to the head of the basic block
2834 to verify the head is in the RTL chain. */
2835 for (; x != NULL_RTX; x = PREV_INSN (x))
a00d11f0 2836 {
9eab6785
SB
2837 /* While walking over the insn chain, verify insns appear
2838 in only one basic block. */
2839 if (bb_info[INSN_UID (x)] != NULL)
2840 {
2841 error ("insn %d is in multiple basic blocks (%d and %d)",
2842 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2843 err = 1;
2844 }
2845
2846 bb_info[INSN_UID (x)] = bb;
2847
2848 if (x == head)
2849 break;
2850 }
2851 if (!x)
2852 {
2853 error ("head insn %d for block %d not found in the insn stream",
2854 INSN_UID (head), bb->index);
a00d11f0
JH
2855 err = 1;
2856 }
2857
a7b87f73 2858 last_head = PREV_INSN (x);
251a41b9
TJ
2859 }
2860
2861 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2862 {
2863 /* Check that the code before the first basic block has NULL
2864 bb field. */
2865 if (!BARRIER_P (x)
2866 && BLOCK_FOR_INSN (x) != NULL)
2867 {
2868 error ("insn %d outside of basic blocks has non-NULL bb field",
2869 INSN_UID (x));
2870 err = 1;
2871 }
2872 }
2873 free (bb_info);
2874
2875 return err;
2876}
2877
2878/* Verify that fallthru edges point to adjacent blocks in layout order and
2879 that barriers exist after non-fallthru blocks. */
2880
2881static int
2882rtl_verify_fallthru (void)
2883{
2884 basic_block bb;
2885 int err = 0;
2886
4f42035e 2887 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9
TJ
2888 {
2889 edge e;
9eab6785 2890
0fd4b31d 2891 e = find_fallthru_edge (bb->succs);
10e9fecc
JH
2892 if (!e)
2893 {
8879d71c 2894 rtx_insn *insn;
10e9fecc
JH
2895
2896 /* Ensure existence of barrier in BB with no fallthru edges. */
468059bc
DD
2897 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2898 {
2899 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
10e9fecc
JH
2900 {
2901 error ("missing barrier after block %i", bb->index);
2902 err = 1;
2903 break;
2904 }
468059bc
DD
2905 if (BARRIER_P (insn))
2906 break;
2907 }
10e9fecc 2908 }
fefa31b5
DM
2909 else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2910 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
c22cacf3 2911 {
8879d71c 2912 rtx_insn *insn;
10e9fecc
JH
2913
2914 if (e->src->next_bb != e->dest)
2915 {
2916 error
2917 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2918 e->src->index, e->dest->index);
2919 err = 1;
2920 }
2921 else
a813c111 2922 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
10e9fecc 2923 insn = NEXT_INSN (insn))
65f4b875 2924 if (BARRIER_P (insn) || NONDEBUG_INSN_P (insn))
10e9fecc
JH
2925 {
2926 error ("verify_flow_info: Incorrect fallthru %i->%i",
2927 e->src->index, e->dest->index);
2928 fatal_insn ("wrong insn in the fallthru edge", insn);
2929 err = 1;
2930 }
c22cacf3 2931 }
10e9fecc 2932 }
ca6c03ca 2933
251a41b9
TJ
2934 return err;
2935}
2936
2937/* Verify that blocks are laid out in consecutive order. While walking the
2938 instructions, verify that all expected instructions are inside the basic
2939 blocks, and that all returns are followed by barriers. */
2940
2941static int
2942rtl_verify_bb_layout (void)
2943{
2944 basic_block bb;
2945 int err = 0;
65f4b875 2946 rtx_insn *x, *y;
251a41b9 2947 int num_bb_notes;
8879d71c 2948 rtx_insn * const rtx_first = get_insns ();
fefa31b5 2949 basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
9eab6785 2950
ca6c03ca 2951 num_bb_notes = 0;
fefa31b5 2952 last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
e0082a72 2953
5f0d2358 2954 for (x = rtx_first; x; x = NEXT_INSN (x))
ca6c03ca
JH
2955 {
2956 if (NOTE_INSN_BASIC_BLOCK_P (x))
2957 {
bf77398c 2958 bb = NOTE_BASIC_BLOCK (x);
5f0d2358 2959
ca6c03ca 2960 num_bb_notes++;
e0082a72 2961 if (bb != last_bb_seen->next_bb)
10e9fecc 2962 internal_error ("basic blocks not laid down consecutively");
ca6c03ca 2963
10e9fecc 2964 curr_bb = last_bb_seen = bb;
ca6c03ca
JH
2965 }
2966
10e9fecc 2967 if (!curr_bb)
ca6c03ca
JH
2968 {
2969 switch (GET_CODE (x))
2970 {
2971 case BARRIER:
2972 case NOTE:
2973 break;
2974
2975 case CODE_LABEL:
39718607 2976 /* An ADDR_VEC is placed outside any basic block. */
ca6c03ca 2977 if (NEXT_INSN (x)
481683e1 2978 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
5f0d2358 2979 x = NEXT_INSN (x);
ca6c03ca
JH
2980
2981 /* But in any case, non-deletable labels can appear anywhere. */
2982 break;
2983
2984 default:
1f978f5f 2985 fatal_insn ("insn outside basic block", x);
ca6c03ca
JH
2986 }
2987 }
2988
26cae194 2989 if (JUMP_P (x)
ca6c03ca 2990 && returnjump_p (x) && ! condjump_p (x)
65f4b875
AO
2991 && ! ((y = next_nonnote_nondebug_insn (x))
2992 && BARRIER_P (y)))
1f978f5f 2993 fatal_insn ("return not followed by barrier", x);
251a41b9 2994
a813c111 2995 if (curr_bb && x == BB_END (curr_bb))
10e9fecc 2996 curr_bb = NULL;
ca6c03ca
JH
2997 }
2998
0cae8d31 2999 if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
ca6c03ca 3000 internal_error
0b17ab2f 3001 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
0cae8d31 3002 num_bb_notes, n_basic_blocks_for_fn (cfun));
ca6c03ca 3003
10e9fecc 3004 return err;
ca6c03ca 3005}
251a41b9
TJ
3006
3007/* Verify the CFG and RTL consistency common for both underlying RTL and
3008 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
3009
3010 Currently it does following checks:
3011 - all checks of rtl_verify_flow_info_1
3012 - test head/end pointers
3013 - check that blocks are laid out in consecutive order
3014 - check that all insns are in the basic blocks
3015 (except the switch handling code, barriers and notes)
3016 - check that all returns are followed by barriers
600b5b1d
TJ
3017 - check that all fallthru edge points to the adjacent blocks
3018 - verify that there is a single hot/cold partition boundary after bbro */
251a41b9
TJ
3019
3020static int
3021rtl_verify_flow_info (void)
3022{
3023 int err = 0;
3024
3025 err |= rtl_verify_flow_info_1 ();
3026
3027 err |= rtl_verify_bb_insn_chain ();
3028
3029 err |= rtl_verify_fallthru ();
3030
3031 err |= rtl_verify_bb_layout ();
3032
3371a64f
TJ
3033 err |= verify_hot_cold_block_grouping ();
3034
251a41b9
TJ
3035 return err;
3036}
ca6c03ca 3037\f
eaec9b3d 3038/* Assume that the preceding pass has possibly eliminated jump instructions
ca6c03ca
JH
3039 or converted the unconditional jumps. Eliminate the edges from CFG.
3040 Return true if any edges are eliminated. */
3041
3042bool
d329e058 3043purge_dead_edges (basic_block bb)
ca6c03ca 3044{
628f6a4e 3045 edge e;
8879d71c
DM
3046 rtx_insn *insn = BB_END (bb);
3047 rtx note;
ca6c03ca 3048 bool purged = false;
628f6a4e
BE
3049 bool found;
3050 edge_iterator ei;
ca6c03ca 3051
b5b8b0ac
AO
3052 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
3053 do
3054 insn = PREV_INSN (insn);
3055 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3056
70da1d03 3057 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
4b4bf941 3058 if (NONJUMP_INSN_P (insn)
70da1d03
JH
3059 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3060 {
3061 rtx eqnote;
3062
3063 if (! may_trap_p (PATTERN (insn))
3064 || ((eqnote = find_reg_equal_equiv_note (insn))
3065 && ! may_trap_p (XEXP (eqnote, 0))))
3066 remove_note (insn, note);
3067 }
3068
546c093e 3069 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
628f6a4e 3070 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
546c093e 3071 {
1d65f45c
RH
3072 bool remove = false;
3073
e5f9a909
JW
3074 /* There are three types of edges we need to handle correctly here: EH
3075 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3076 latter can appear when nonlocal gotos are used. */
1d65f45c 3077 if (e->flags & EDGE_ABNORMAL_CALL)
546c093e 3078 {
1d65f45c
RH
3079 if (!CALL_P (insn))
3080 remove = true;
3081 else if (can_nonlocal_goto (insn))
3082 ;
3083 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3084 ;
0a35513e
AH
3085 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3086 ;
1d65f45c
RH
3087 else
3088 remove = true;
546c093e 3089 }
1d65f45c
RH
3090 else if (e->flags & EDGE_EH)
3091 remove = !can_throw_internal (insn);
3092
3093 if (remove)
546c093e 3094 {
1d65f45c
RH
3095 remove_edge (e);
3096 df_set_bb_dirty (bb);
3097 purged = true;
546c093e
RH
3098 }
3099 else
1d65f45c 3100 ei_next (&ei);
546c093e 3101 }
5f0d2358 3102
4b4bf941 3103 if (JUMP_P (insn))
ca6c03ca
JH
3104 {
3105 rtx note;
3106 edge b,f;
628f6a4e 3107 edge_iterator ei;
5f0d2358 3108
ca6c03ca
JH
3109 /* We do care only about conditional jumps and simplejumps. */
3110 if (!any_condjump_p (insn)
3111 && !returnjump_p (insn)
3112 && !simplejump_p (insn))
c51d95ec 3113 return purged;
5f0d2358 3114
5a1a3e5e
JH
3115 /* Branch probability/prediction notes are defined only for
3116 condjumps. We've possibly turned condjump into simplejump. */
3117 if (simplejump_p (insn))
3118 {
3119 note = find_reg_note (insn, REG_BR_PROB, NULL);
3120 if (note)
3121 remove_note (insn, note);
3122 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3123 remove_note (insn, note);
3124 }
3125
628f6a4e 3126 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 3127 {
7fcd7218
JH
3128 /* Avoid abnormal flags to leak from computed jumps turned
3129 into simplejumps. */
f87c27b4 3130
0e1638d4 3131 e->flags &= ~EDGE_ABNORMAL;
7fcd7218 3132
5a566bed
MM
3133 /* See if this edge is one we should keep. */
3134 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3135 /* A conditional jump can fall through into the next
3136 block, so we should keep the edge. */
628f6a4e
BE
3137 {
3138 ei_next (&ei);
3139 continue;
3140 }
fefa31b5 3141 else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
a813c111 3142 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
5a566bed
MM
3143 /* If the destination block is the target of the jump,
3144 keep the edge. */
628f6a4e
BE
3145 {
3146 ei_next (&ei);
3147 continue;
3148 }
fefa31b5
DM
3149 else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3150 && returnjump_p (insn))
5a566bed
MM
3151 /* If the destination block is the exit block, and this
3152 instruction is a return, then keep the edge. */
628f6a4e
BE
3153 {
3154 ei_next (&ei);
3155 continue;
3156 }
5a566bed
MM
3157 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3158 /* Keep the edges that correspond to exceptions thrown by
0b75beaa
EB
3159 this instruction and rematerialize the EDGE_ABNORMAL
3160 flag we just cleared above. */
3161 {
3162 e->flags |= EDGE_ABNORMAL;
628f6a4e 3163 ei_next (&ei);
0b75beaa
EB
3164 continue;
3165 }
5f0d2358 3166
5a566bed 3167 /* We do not need this edge. */
6fb5fa3c 3168 df_set_bb_dirty (bb);
ca6c03ca
JH
3169 purged = true;
3170 remove_edge (e);
3171 }
5f0d2358 3172
628f6a4e 3173 if (EDGE_COUNT (bb->succs) == 0 || !purged)
c51d95ec 3174 return purged;
5f0d2358 3175
c263766c
RH
3176 if (dump_file)
3177 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
5f0d2358 3178
ca6c03ca
JH
3179 if (!optimize)
3180 return purged;
3181
3182 /* Redistribute probabilities. */
c5cbcccf 3183 if (single_succ_p (bb))
ca6c03ca 3184 {
357067f2 3185 single_succ_edge (bb)->probability = profile_probability::always ();
f87c27b4 3186 }
ca6c03ca
JH
3187 else
3188 {
3189 note = find_reg_note (insn, REG_BR_PROB, NULL);
3190 if (!note)
3191 return purged;
5f0d2358 3192
ca6c03ca
JH
3193 b = BRANCH_EDGE (bb);
3194 f = FALLTHRU_EDGE (bb);
5fa396ad 3195 b->probability = profile_probability::from_reg_br_prob_note
357067f2 3196 (XINT (note, 0));
5fa396ad 3197 f->probability = b->probability.invert ();
ca6c03ca 3198 }
5f0d2358 3199
ca6c03ca
JH
3200 return purged;
3201 }
4b4bf941 3202 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
1722c2c8
RH
3203 {
3204 /* First, there should not be any EH or ABCALL edges resulting
3205 from non-local gotos and the like. If there were, we shouldn't
3206 have created the sibcall in the first place. Second, there
3207 should of course never have been a fallthru edge. */
c5cbcccf
ZD
3208 gcc_assert (single_succ_p (bb));
3209 gcc_assert (single_succ_edge (bb)->flags
3210 == (EDGE_SIBCALL | EDGE_ABNORMAL));
1722c2c8
RH
3211
3212 return 0;
3213 }
ca6c03ca 3214
ca6c03ca
JH
3215 /* If we don't see a jump insn, we don't know exactly why the block would
3216 have been broken at this point. Look for a simple, non-fallthru edge,
3217 as these are only created by conditional branches. If we find such an
3218 edge we know that there used to be a jump here and can then safely
3219 remove all non-fallthru edges. */
628f6a4e
BE
3220 found = false;
3221 FOR_EACH_EDGE (e, ei, bb->succs)
3222 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3223 {
3224 found = true;
3225 break;
3226 }
5f0d2358 3227
628f6a4e 3228 if (!found)
ca6c03ca 3229 return purged;
5f0d2358 3230
2afa8dce
DB
3231 /* Remove all but the fake and fallthru edges. The fake edge may be
3232 the only successor for this block in the case of noreturn
3233 calls. */
628f6a4e 3234 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 3235 {
2afa8dce 3236 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
c51d95ec 3237 {
6fb5fa3c 3238 df_set_bb_dirty (bb);
c51d95ec
JH
3239 remove_edge (e);
3240 purged = true;
3241 }
628f6a4e
BE
3242 else
3243 ei_next (&ei);
ca6c03ca 3244 }
5f0d2358 3245
c5cbcccf 3246 gcc_assert (single_succ_p (bb));
5f0d2358 3247
357067f2 3248 single_succ_edge (bb)->probability = profile_probability::always ();
ca6c03ca 3249
c263766c
RH
3250 if (dump_file)
3251 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
0b17ab2f 3252 bb->index);
ca6c03ca
JH
3253 return purged;
3254}
3255
5f0d2358
RK
3256/* Search all basic blocks for potentially dead edges and purge them. Return
3257 true if some edge has been eliminated. */
ca6c03ca
JH
3258
3259bool
25cd19de 3260purge_all_dead_edges (void)
ca6c03ca 3261{
e0082a72 3262 int purged = false;
e0082a72 3263 basic_block bb;
473fb060 3264
11cd3bed 3265 FOR_EACH_BB_FN (bb, cfun)
473fb060 3266 {
e0082a72 3267 bool purged_here = purge_dead_edges (bb);
5f0d2358 3268
473fb060 3269 purged |= purged_here;
473fb060 3270 }
5f0d2358 3271
ca6c03ca
JH
3272 return purged;
3273}
9ee634e3 3274
ba5e9aca
EB
3275/* This is used by a few passes that emit some instructions after abnormal
3276 calls, moving the basic block's end, while they in fact do want to emit
3277 them on the fallthru edge. Look for abnormal call edges, find backward
3278 the call in the block and insert the instructions on the edge instead.
3279
3280 Similarly, handle instructions throwing exceptions internally.
3281
3282 Return true when instructions have been found and inserted on edges. */
3283
3284bool
3285fixup_abnormal_edges (void)
3286{
3287 bool inserted = false;
3288 basic_block bb;
3289
11cd3bed 3290 FOR_EACH_BB_FN (bb, cfun)
ba5e9aca
EB
3291 {
3292 edge e;
3293 edge_iterator ei;
3294
3295 /* Look for cases we are interested in - calls or instructions causing
3296 exceptions. */
3297 FOR_EACH_EDGE (e, ei, bb->succs)
3298 if ((e->flags & EDGE_ABNORMAL_CALL)
3299 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3300 == (EDGE_ABNORMAL | EDGE_EH)))
3301 break;
3302
3303 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3304 {
8879d71c 3305 rtx_insn *insn;
ba5e9aca
EB
3306
3307 /* Get past the new insns generated. Allow notes, as the insns
3308 may be already deleted. */
3309 insn = BB_END (bb);
3310 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3311 && !can_throw_internal (insn)
3312 && insn != BB_HEAD (bb))
3313 insn = PREV_INSN (insn);
3314
3315 if (CALL_P (insn) || can_throw_internal (insn))
3316 {
8879d71c 3317 rtx_insn *stop, *next;
ba5e9aca
EB
3318
3319 e = find_fallthru_edge (bb->succs);
3320
3321 stop = NEXT_INSN (BB_END (bb));
1130d5e3 3322 BB_END (bb) = insn;
ba5e9aca
EB
3323
3324 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3325 {
3326 next = NEXT_INSN (insn);
3327 if (INSN_P (insn))
3328 {
3329 delete_insn (insn);
3330
3331 /* Sometimes there's still the return value USE.
3332 If it's placed after a trapping call (i.e. that
3333 call is the last insn anyway), we have no fallthru
3334 edge. Simply delete this use and don't try to insert
3335 on the non-existent edge. */
3336 if (GET_CODE (PATTERN (insn)) != USE)
3337 {
3338 /* We're not deleting it, we're moving it. */
4654c0cf 3339 insn->set_undeleted ();
0f82e5c9
DM
3340 SET_PREV_INSN (insn) = NULL_RTX;
3341 SET_NEXT_INSN (insn) = NULL_RTX;
ba5e9aca
EB
3342
3343 insert_insn_on_edge (insn, e);
3344 inserted = true;
3345 }
3346 }
3347 else if (!BARRIER_P (insn))
3348 set_block_for_insn (insn, NULL);
3349 }
3350 }
3351
3352 /* It may be that we don't find any trapping insn. In this
3353 case we discovered quite late that the insn that had been
3354 marked as can_throw_internal in fact couldn't trap at all.
3355 So we should in fact delete the EH edges out of the block. */
3356 else
3357 purge_dead_edges (bb);
3358 }
3359 }
3360
3361 return inserted;
3362}
78bde837
SB
3363\f
3364/* Cut the insns from FIRST to LAST out of the insns stream. */
3365
a90c614a 3366rtx_insn *
9152e0aa 3367unlink_insn_chain (rtx_insn *first, rtx_insn *last)
78bde837 3368{
a90c614a
DM
3369 rtx_insn *prevfirst = PREV_INSN (first);
3370 rtx_insn *nextlast = NEXT_INSN (last);
78bde837 3371
0f82e5c9
DM
3372 SET_PREV_INSN (first) = NULL;
3373 SET_NEXT_INSN (last) = NULL;
78bde837 3374 if (prevfirst)
0f82e5c9 3375 SET_NEXT_INSN (prevfirst) = nextlast;
78bde837 3376 if (nextlast)
0f82e5c9 3377 SET_PREV_INSN (nextlast) = prevfirst;
78bde837
SB
3378 else
3379 set_last_insn (prevfirst);
3380 if (!prevfirst)
3381 set_first_insn (nextlast);
9152e0aa 3382 return first;
78bde837
SB
3383}
3384\f
3385/* Skip over inter-block insns occurring after BB which are typically
3386 associated with BB (e.g., barriers). If there are any such insns,
3387 we return the last one. Otherwise, we return the end of BB. */
3388
8879d71c 3389static rtx_insn *
78bde837
SB
3390skip_insns_after_block (basic_block bb)
3391{
8879d71c 3392 rtx_insn *insn, *last_insn, *next_head, *prev;
78bde837 3393
8879d71c 3394 next_head = NULL;
fefa31b5 3395 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3396 next_head = BB_HEAD (bb->next_bb);
3397
3398 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3399 {
3400 if (insn == next_head)
3401 break;
3402
3403 switch (GET_CODE (insn))
3404 {
3405 case BARRIER:
3406 last_insn = insn;
3407 continue;
3408
3409 case NOTE:
3410 switch (NOTE_KIND (insn))
3411 {
3412 case NOTE_INSN_BLOCK_END:
3413 gcc_unreachable ();
3414 continue;
3415 default:
3416 continue;
3417 break;
3418 }
3419 break;
3420
3421 case CODE_LABEL:
3422 if (NEXT_INSN (insn)
3423 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3424 {
3425 insn = NEXT_INSN (insn);
3426 last_insn = insn;
3427 continue;
3428 }
3429 break;
3430
3431 default:
3432 break;
3433 }
3434
3435 break;
3436 }
3437
3438 /* It is possible to hit contradictory sequence. For instance:
3439
3440 jump_insn
3441 NOTE_INSN_BLOCK_BEG
3442 barrier
3443
3444 Where barrier belongs to jump_insn, but the note does not. This can be
3445 created by removing the basic block originally following
3446 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3447
3448 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3449 {
3450 prev = PREV_INSN (insn);
3451 if (NOTE_P (insn))
3452 switch (NOTE_KIND (insn))
3453 {
3454 case NOTE_INSN_BLOCK_END:
3455 gcc_unreachable ();
3456 break;
3457 case NOTE_INSN_DELETED:
3458 case NOTE_INSN_DELETED_LABEL:
3459 case NOTE_INSN_DELETED_DEBUG_LABEL:
3460 continue;
3461 default:
3462 reorder_insns (insn, insn, last_insn);
3463 }
3464 }
3465
3466 return last_insn;
3467}
3468
3469/* Locate or create a label for a given basic block. */
3470
e67d1102 3471static rtx_insn *
78bde837
SB
3472label_for_bb (basic_block bb)
3473{
e67d1102 3474 rtx_insn *label = BB_HEAD (bb);
78bde837
SB
3475
3476 if (!LABEL_P (label))
3477 {
3478 if (dump_file)
3479 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3480
3481 label = block_label (bb);
3482 }
3483
3484 return label;
3485}
3486
3487/* Locate the effective beginning and end of the insn chain for each
3488 block, as defined by skip_insns_after_block above. */
3489
3490static void
3491record_effective_endpoints (void)
3492{
8879d71c 3493 rtx_insn *next_insn;
78bde837 3494 basic_block bb;
8879d71c 3495 rtx_insn *insn;
78bde837
SB
3496
3497 for (insn = get_insns ();
3498 insn
3499 && NOTE_P (insn)
3500 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3501 insn = NEXT_INSN (insn))
3502 continue;
3503 /* No basic blocks at all? */
3504 gcc_assert (insn);
3505
3506 if (PREV_INSN (insn))
3507 cfg_layout_function_header =
3508 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3509 else
9152e0aa 3510 cfg_layout_function_header = NULL;
78bde837
SB
3511
3512 next_insn = get_insns ();
11cd3bed 3513 FOR_EACH_BB_FN (bb, cfun)
78bde837 3514 {
8879d71c 3515 rtx_insn *end;
78bde837
SB
3516
3517 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
1130d5e3 3518 BB_HEADER (bb) = unlink_insn_chain (next_insn,
190bea87 3519 PREV_INSN (BB_HEAD (bb)));
78bde837
SB
3520 end = skip_insns_after_block (bb);
3521 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
d8ce2eae 3522 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
78bde837
SB
3523 next_insn = NEXT_INSN (BB_END (bb));
3524 }
3525
3526 cfg_layout_function_footer = next_insn;
3527 if (cfg_layout_function_footer)
3528 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3529}
3530\f
27a4cd48
DM
3531namespace {
3532
3533const pass_data pass_data_into_cfg_layout_mode =
3534{
3535 RTL_PASS, /* type */
3536 "into_cfglayout", /* name */
3537 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3538 TV_CFG, /* tv_id */
3539 0, /* properties_required */
3540 PROP_cfglayout, /* properties_provided */
3541 0, /* properties_destroyed */
3542 0, /* todo_flags_start */
3543 0, /* todo_flags_finish */
78bde837
SB
3544};
3545
27a4cd48
DM
3546class pass_into_cfg_layout_mode : public rtl_opt_pass
3547{
3548public:
c3284718
RS
3549 pass_into_cfg_layout_mode (gcc::context *ctxt)
3550 : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
27a4cd48
DM
3551 {}
3552
3553 /* opt_pass methods: */
be55bfe6
TS
3554 virtual unsigned int execute (function *)
3555 {
3556 cfg_layout_initialize (0);
3557 return 0;
3558 }
27a4cd48
DM
3559
3560}; // class pass_into_cfg_layout_mode
3561
3562} // anon namespace
3563
3564rtl_opt_pass *
3565make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3566{
3567 return new pass_into_cfg_layout_mode (ctxt);
3568}
3569
3570namespace {
3571
3572const pass_data pass_data_outof_cfg_layout_mode =
3573{
3574 RTL_PASS, /* type */
3575 "outof_cfglayout", /* name */
3576 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3577 TV_CFG, /* tv_id */
3578 0, /* properties_required */
3579 0, /* properties_provided */
3580 PROP_cfglayout, /* properties_destroyed */
3581 0, /* todo_flags_start */
3582 0, /* todo_flags_finish */
78bde837 3583};
27a4cd48
DM
3584
3585class pass_outof_cfg_layout_mode : public rtl_opt_pass
3586{
3587public:
c3284718
RS
3588 pass_outof_cfg_layout_mode (gcc::context *ctxt)
3589 : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
27a4cd48
DM
3590 {}
3591
3592 /* opt_pass methods: */
be55bfe6 3593 virtual unsigned int execute (function *);
27a4cd48
DM
3594
3595}; // class pass_outof_cfg_layout_mode
3596
be55bfe6
TS
3597unsigned int
3598pass_outof_cfg_layout_mode::execute (function *fun)
3599{
3600 basic_block bb;
3601
3602 FOR_EACH_BB_FN (bb, fun)
3603 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3604 bb->aux = bb->next_bb;
3605
3606 cfg_layout_finalize ();
3607
3608 return 0;
3609}
3610
27a4cd48
DM
3611} // anon namespace
3612
3613rtl_opt_pass *
3614make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3615{
3616 return new pass_outof_cfg_layout_mode (ctxt);
3617}
78bde837
SB
3618\f
3619
3620/* Link the basic blocks in the correct order, compacting the basic
3621 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3622 function also clears the basic block header and footer fields.
3623
3624 This function is usually called after a pass (e.g. tracer) finishes
3625 some transformations while in cfglayout mode. The required sequence
3626 of the basic blocks is in a linked list along the bb->aux field.
3627 This functions re-links the basic block prev_bb and next_bb pointers
532aafad
SB
3628 accordingly, and it compacts and renumbers the blocks.
3629
3630 FIXME: This currently works only for RTL, but the only RTL-specific
3631 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3632 to GIMPLE a long time ago, but it doesn't relink the basic block
3633 chain. It could do that (to give better initial RTL) if this function
3634 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
78bde837
SB
3635
3636void
3637relink_block_chain (bool stay_in_cfglayout_mode)
3638{
3639 basic_block bb, prev_bb;
3640 int index;
3641
3642 /* Maybe dump the re-ordered sequence. */
3643 if (dump_file)
3644 {
3645 fprintf (dump_file, "Reordered sequence:\n");
fefa31b5
DM
3646 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3647 NUM_FIXED_BLOCKS;
78bde837
SB
3648 bb;
3649 bb = (basic_block) bb->aux, index++)
3650 {
3651 fprintf (dump_file, " %i ", index);
3652 if (get_bb_original (bb))
3653 fprintf (dump_file, "duplicate of %i ",
3654 get_bb_original (bb)->index);
3655 else if (forwarder_block_p (bb)
3656 && !LABEL_P (BB_HEAD (bb)))
3657 fprintf (dump_file, "compensation ");
3658 else
3659 fprintf (dump_file, "bb %i ", bb->index);
78bde837
SB
3660 }
3661 }
3662
3663 /* Now reorder the blocks. */
fefa31b5
DM
3664 prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3665 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
78bde837
SB
3666 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3667 {
3668 bb->prev_bb = prev_bb;
3669 prev_bb->next_bb = bb;
3670 }
fefa31b5
DM
3671 prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3672 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
78bde837
SB
3673
3674 /* Then, clean up the aux fields. */
04a90bec 3675 FOR_ALL_BB_FN (bb, cfun)
78bde837
SB
3676 {
3677 bb->aux = NULL;
3678 if (!stay_in_cfglayout_mode)
1130d5e3 3679 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
78bde837
SB
3680 }
3681
3682 /* Maybe reset the original copy tables, they are not valid anymore
3683 when we renumber the basic blocks in compact_blocks. If we are
3684 are going out of cfglayout mode, don't re-allocate the tables. */
c2e84327
DM
3685 if (original_copy_tables_initialized_p ())
3686 free_original_copy_tables ();
78bde837
SB
3687 if (stay_in_cfglayout_mode)
3688 initialize_original_copy_tables ();
3689
3690 /* Finally, put basic_block_info in the new order. */
3691 compact_blocks ();
3692}
3693\f
3694
3695/* Given a reorder chain, rearrange the code to match. */
3696
3697static void
3698fixup_reorder_chain (void)
3699{
3700 basic_block bb;
9152e0aa 3701 rtx_insn *insn = NULL;
78bde837
SB
3702
3703 if (cfg_layout_function_header)
3704 {
3705 set_first_insn (cfg_layout_function_header);
3706 insn = cfg_layout_function_header;
3707 while (NEXT_INSN (insn))
3708 insn = NEXT_INSN (insn);
3709 }
3710
3711 /* First do the bulk reordering -- rechain the blocks without regard to
3712 the needed changes to jumps and labels. */
3713
fefa31b5
DM
3714 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3715 bb->aux)
78bde837
SB
3716 {
3717 if (BB_HEADER (bb))
3718 {
3719 if (insn)
0f82e5c9 3720 SET_NEXT_INSN (insn) = BB_HEADER (bb);
78bde837
SB
3721 else
3722 set_first_insn (BB_HEADER (bb));
0f82e5c9 3723 SET_PREV_INSN (BB_HEADER (bb)) = insn;
78bde837
SB
3724 insn = BB_HEADER (bb);
3725 while (NEXT_INSN (insn))
3726 insn = NEXT_INSN (insn);
3727 }
3728 if (insn)
0f82e5c9 3729 SET_NEXT_INSN (insn) = BB_HEAD (bb);
78bde837
SB
3730 else
3731 set_first_insn (BB_HEAD (bb));
0f82e5c9 3732 SET_PREV_INSN (BB_HEAD (bb)) = insn;
78bde837
SB
3733 insn = BB_END (bb);
3734 if (BB_FOOTER (bb))
3735 {
0f82e5c9
DM
3736 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3737 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
78bde837
SB
3738 while (NEXT_INSN (insn))
3739 insn = NEXT_INSN (insn);
3740 }
3741 }
3742
0f82e5c9 3743 SET_NEXT_INSN (insn) = cfg_layout_function_footer;
78bde837 3744 if (cfg_layout_function_footer)
0f82e5c9 3745 SET_PREV_INSN (cfg_layout_function_footer) = insn;
78bde837
SB
3746
3747 while (NEXT_INSN (insn))
3748 insn = NEXT_INSN (insn);
3749
3750 set_last_insn (insn);
b2b29377
MM
3751 if (flag_checking)
3752 verify_insn_chain ();
78bde837
SB
3753
3754 /* Now add jumps and labels as needed to match the blocks new
29f3fd5b 3755 outgoing edges. */
78bde837 3756
29f3fd5b
SB
3757 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3758 bb->aux)
78bde837
SB
3759 {
3760 edge e_fall, e_taken, e;
8879d71c 3761 rtx_insn *bb_end_insn;
78bde837 3762 rtx ret_label = NULL_RTX;
3371a64f 3763 basic_block nb;
78bde837
SB
3764 edge_iterator ei;
3765
3766 if (EDGE_COUNT (bb->succs) == 0)
3767 continue;
3768
3769 /* Find the old fallthru edge, and another non-EH edge for
3770 a taken jump. */
3771 e_taken = e_fall = NULL;
3772
3773 FOR_EACH_EDGE (e, ei, bb->succs)
3774 if (e->flags & EDGE_FALLTHRU)
3775 e_fall = e;
3776 else if (! (e->flags & EDGE_EH))
3777 e_taken = e;
3778
3779 bb_end_insn = BB_END (bb);
1476d1bd 3780 if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
78bde837 3781 {
1476d1bd
MM
3782 ret_label = JUMP_LABEL (bb_end_jump);
3783 if (any_condjump_p (bb_end_jump))
78bde837
SB
3784 {
3785 /* This might happen if the conditional jump has side
3786 effects and could therefore not be optimized away.
3787 Make the basic block to end with a barrier in order
3788 to prevent rtl_verify_flow_info from complaining. */
3789 if (!e_fall)
3790 {
1476d1bd
MM
3791 gcc_assert (!onlyjump_p (bb_end_jump)
3792 || returnjump_p (bb_end_jump)
91af97c3 3793 || (e_taken->flags & EDGE_CROSSING));
1476d1bd 3794 emit_barrier_after (bb_end_jump);
78bde837
SB
3795 continue;
3796 }
3797
3798 /* If the old fallthru is still next, nothing to do. */
3799 if (bb->aux == e_fall->dest
fefa31b5 3800 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3801 continue;
3802
3803 /* The degenerated case of conditional jump jumping to the next
3804 instruction can happen for jumps with side effects. We need
3805 to construct a forwarder block and this will be done just
3806 fine by force_nonfallthru below. */
3807 if (!e_taken)
3808 ;
3809
3810 /* There is another special case: if *neither* block is next,
3811 such as happens at the very end of a function, then we'll
3812 need to add a new unconditional jump. Choose the taken
3813 edge based on known or assumed probability. */
3814 else if (bb->aux != e_taken->dest)
3815 {
1476d1bd 3816 rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
78bde837
SB
3817
3818 if (note
5fa396ad
JH
3819 && profile_probability::from_reg_br_prob_note
3820 (XINT (note, 0)) < profile_probability::even ()
1476d1bd 3821 && invert_jump (bb_end_jump,
fefa31b5
DM
3822 (e_fall->dest
3823 == EXIT_BLOCK_PTR_FOR_FN (cfun)
78bde837
SB
3824 ? NULL_RTX
3825 : label_for_bb (e_fall->dest)), 0))
3826 {
3827 e_fall->flags &= ~EDGE_FALLTHRU;
3828 gcc_checking_assert (could_fall_through
3829 (e_taken->src, e_taken->dest));
3830 e_taken->flags |= EDGE_FALLTHRU;
3831 update_br_prob_note (bb);
3832 e = e_fall, e_fall = e_taken, e_taken = e;
3833 }
3834 }
3835
3836 /* If the "jumping" edge is a crossing edge, and the fall
3837 through edge is non-crossing, leave things as they are. */
3838 else if ((e_taken->flags & EDGE_CROSSING)
3839 && !(e_fall->flags & EDGE_CROSSING))
3840 continue;
3841
3842 /* Otherwise we can try to invert the jump. This will
3843 basically never fail, however, keep up the pretense. */
1476d1bd 3844 else if (invert_jump (bb_end_jump,
fefa31b5
DM
3845 (e_fall->dest
3846 == EXIT_BLOCK_PTR_FOR_FN (cfun)
78bde837
SB
3847 ? NULL_RTX
3848 : label_for_bb (e_fall->dest)), 0))
3849 {
3850 e_fall->flags &= ~EDGE_FALLTHRU;
3851 gcc_checking_assert (could_fall_through
3852 (e_taken->src, e_taken->dest));
3853 e_taken->flags |= EDGE_FALLTHRU;
3854 update_br_prob_note (bb);
3855 if (LABEL_NUSES (ret_label) == 0
3856 && single_pred_p (e_taken->dest))
a29f6a2b 3857 delete_insn (as_a<rtx_insn *> (ret_label));
78bde837
SB
3858 continue;
3859 }
3860 }
3861 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3862 {
3863 /* If the old fallthru is still next or if
3864 asm goto doesn't have a fallthru (e.g. when followed by
3865 __builtin_unreachable ()), nothing to do. */
3866 if (! e_fall
3867 || bb->aux == e_fall->dest
fefa31b5 3868 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3869 continue;
3870
3871 /* Otherwise we'll have to use the fallthru fixup below. */
3872 }
3873 else
3874 {
3875 /* Otherwise we have some return, switch or computed
3876 jump. In the 99% case, there should not have been a
3877 fallthru edge. */
3878 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3879 continue;
3880 }
3881 }
3882 else
3883 {
3884 /* No fallthru implies a noreturn function with EH edges, or
3885 something similarly bizarre. In any case, we don't need to
3886 do anything. */
3887 if (! e_fall)
3888 continue;
3889
3890 /* If the fallthru block is still next, nothing to do. */
3891 if (bb->aux == e_fall->dest)
3892 continue;
3893
3894 /* A fallthru to exit block. */
fefa31b5 3895 if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3896 continue;
3897 }
3898
3899 /* We got here if we need to add a new jump insn.
3900 Note force_nonfallthru can delete E_FALL and thus we have to
3901 save E_FALL->src prior to the call to force_nonfallthru. */
78bde837
SB
3902 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3903 if (nb)
3904 {
3905 nb->aux = bb->aux;
3906 bb->aux = nb;
3907 /* Don't process this new block. */
3908 bb = nb;
78bde837
SB
3909 }
3910 }
3911
3912 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3913
29f3fd5b 3914 /* Annoying special case - jump around dead jumptables left in the code. */
11cd3bed 3915 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
3916 {
3917 edge e = find_fallthru_edge (bb->succs);
3918
29f3fd5b
SB
3919 if (e && !can_fallthru (e->src, e->dest))
3920 force_nonfallthru (e);
78bde837
SB
3921 }
3922
7792f13c
EB
3923 /* Ensure goto_locus from edges has some instructions with that locus in RTL
3924 when not optimizing. */
3925 if (!optimize && !DECL_IGNORED_P (current_function_decl))
11cd3bed 3926 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
3927 {
3928 edge e;
3929 edge_iterator ei;
3930
3931 FOR_EACH_EDGE (e, ei, bb->succs)
2f13f2de 3932 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
5368224f 3933 && !(e->flags & EDGE_ABNORMAL))
78bde837
SB
3934 {
3935 edge e2;
3936 edge_iterator ei2;
3937 basic_block dest, nb;
8879d71c 3938 rtx_insn *end;
78bde837
SB
3939
3940 insn = BB_END (e->src);
3941 end = PREV_INSN (BB_HEAD (e->src));
3942 while (insn != end
5368224f 3943 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
78bde837
SB
3944 insn = PREV_INSN (insn);
3945 if (insn != end
5368224f 3946 && INSN_LOCATION (insn) == e->goto_locus)
78bde837
SB
3947 continue;
3948 if (simplejump_p (BB_END (e->src))
5368224f 3949 && !INSN_HAS_LOCATION (BB_END (e->src)))
78bde837 3950 {
5368224f 3951 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
78bde837
SB
3952 continue;
3953 }
3954 dest = e->dest;
fefa31b5 3955 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3956 {
3957 /* Non-fallthru edges to the exit block cannot be split. */
3958 if (!(e->flags & EDGE_FALLTHRU))
3959 continue;
3960 }
3961 else
3962 {
3963 insn = BB_HEAD (dest);
3964 end = NEXT_INSN (BB_END (dest));
3965 while (insn != end && !NONDEBUG_INSN_P (insn))
3966 insn = NEXT_INSN (insn);
5368224f
DC
3967 if (insn != end && INSN_HAS_LOCATION (insn)
3968 && INSN_LOCATION (insn) == e->goto_locus)
78bde837
SB
3969 continue;
3970 }
3971 nb = split_edge (e);
3972 if (!INSN_P (BB_END (nb)))
1130d5e3 3973 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
190bea87 3974 nb);
5368224f 3975 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
78bde837
SB
3976
3977 /* If there are other incoming edges to the destination block
3978 with the same goto locus, redirect them to the new block as
3979 well, this can prevent other such blocks from being created
3980 in subsequent iterations of the loop. */
3981 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
2f13f2de 3982 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
78bde837 3983 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
5368224f 3984 && e->goto_locus == e2->goto_locus)
78bde837
SB
3985 redirect_edge_and_branch (e2, nb);
3986 else
3987 ei_next (&ei2);
3988 }
3989 }
3990}
3991\f
3992/* Perform sanity checks on the insn chain.
3993 1. Check that next/prev pointers are consistent in both the forward and
3994 reverse direction.
3995 2. Count insns in chain, going both directions, and check if equal.
3996 3. Check that get_last_insn () returns the actual end of chain. */
3997
3998DEBUG_FUNCTION void
3999verify_insn_chain (void)
4000{
8879d71c 4001 rtx_insn *x, *prevx, *nextx;
78bde837
SB
4002 int insn_cnt1, insn_cnt2;
4003
4004 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
4005 x != 0;
4006 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
4007 gcc_assert (PREV_INSN (x) == prevx);
4008
4009 gcc_assert (prevx == get_last_insn ());
4010
4011 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
4012 x != 0;
4013 nextx = x, insn_cnt2++, x = PREV_INSN (x))
4014 gcc_assert (NEXT_INSN (x) == nextx);
4015
4016 gcc_assert (insn_cnt1 == insn_cnt2);
4017}
4018\f
4019/* If we have assembler epilogues, the block falling through to exit must
4020 be the last one in the reordered chain when we reach final. Ensure
4021 that this condition is met. */
4022static void
4023fixup_fallthru_exit_predecessor (void)
4024{
4025 edge e;
4026 basic_block bb = NULL;
4027
4028 /* This transformation is not valid before reload, because we might
4029 separate a call from the instruction that copies the return
4030 value. */
4031 gcc_assert (reload_completed);
4032
fefa31b5 4033 e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
78bde837
SB
4034 if (e)
4035 bb = e->src;
4036
4037 if (bb && bb->aux)
4038 {
fefa31b5 4039 basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
78bde837
SB
4040
4041 /* If the very first block is the one with the fall-through exit
4042 edge, we have to split that block. */
4043 if (c == bb)
4044 {
c4d281b2 4045 bb = split_block_after_labels (bb)->dest;
78bde837
SB
4046 bb->aux = c->aux;
4047 c->aux = bb;
d8ce2eae
DM
4048 BB_FOOTER (bb) = BB_FOOTER (c);
4049 BB_FOOTER (c) = NULL;
78bde837
SB
4050 }
4051
4052 while (c->aux != bb)
4053 c = (basic_block) c->aux;
4054
4055 c->aux = bb->aux;
4056 while (c->aux)
4057 c = (basic_block) c->aux;
4058
4059 c->aux = bb;
4060 bb->aux = NULL;
4061 }
4062}
4063
4064/* In case there are more than one fallthru predecessors of exit, force that
4065 there is only one. */
4066
4067static void
4068force_one_exit_fallthru (void)
4069{
4070 edge e, predecessor = NULL;
4071 bool more = false;
4072 edge_iterator ei;
4073 basic_block forwarder, bb;
4074
fefa31b5 4075 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
78bde837
SB
4076 if (e->flags & EDGE_FALLTHRU)
4077 {
4078 if (predecessor == NULL)
4079 predecessor = e;
4080 else
4081 {
4082 more = true;
4083 break;
4084 }
4085 }
4086
4087 if (!more)
4088 return;
4089
4090 /* Exit has several fallthru predecessors. Create a forwarder block for
4091 them. */
4092 forwarder = split_edge (predecessor);
fefa31b5
DM
4093 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4094 (e = ei_safe_edge (ei)); )
78bde837
SB
4095 {
4096 if (e->src == forwarder
4097 || !(e->flags & EDGE_FALLTHRU))
4098 ei_next (&ei);
4099 else
4100 redirect_edge_and_branch_force (e, forwarder);
4101 }
4102
4103 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4104 exit block. */
11cd3bed 4105 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
4106 {
4107 if (bb->aux == NULL && bb != forwarder)
4108 {
4109 bb->aux = forwarder;
4110 break;
4111 }
4112 }
4113}
4114\f
4115/* Return true in case it is possible to duplicate the basic block BB. */
4116
4117static bool
4118cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4119{
4120 /* Do not attempt to duplicate tablejumps, as we need to unshare
4121 the dispatch table. This is difficult to do, as the instructions
4122 computing jump destination may be hoisted outside the basic block. */
4123 if (tablejump_p (BB_END (bb), NULL, NULL))
4124 return false;
4125
4126 /* Do not duplicate blocks containing insns that can't be copied. */
4127 if (targetm.cannot_copy_insn_p)
4128 {
8879d71c 4129 rtx_insn *insn = BB_HEAD (bb);
78bde837
SB
4130 while (1)
4131 {
4132 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4133 return false;
4134 if (insn == BB_END (bb))
4135 break;
4136 insn = NEXT_INSN (insn);
4137 }
4138 }
4139
4140 return true;
4141}
4142
a90c614a 4143rtx_insn *
5d34b050 4144duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
78bde837 4145{
5d34b050 4146 rtx_insn *insn, *next, *copy;
66e8df53 4147 rtx_note *last;
78bde837
SB
4148
4149 /* Avoid updating of boundaries of previous basic block. The
4150 note will get removed from insn stream in fixup. */
4151 last = emit_note (NOTE_INSN_DELETED);
4152
4153 /* Create copy at the end of INSN chain. The chain will
4154 be reordered later. */
4155 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4156 {
4157 switch (GET_CODE (insn))
4158 {
4159 case DEBUG_INSN:
4160 /* Don't duplicate label debug insns. */
65f4b875
AO
4161 if (DEBUG_BIND_INSN_P (insn)
4162 && TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
78bde837
SB
4163 break;
4164 /* FALLTHRU */
4165 case INSN:
4166 case CALL_INSN:
4167 case JUMP_INSN:
78bde837
SB
4168 copy = emit_copy_of_insn_after (insn, get_last_insn ());
4169 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4170 && ANY_RETURN_P (JUMP_LABEL (insn)))
4171 JUMP_LABEL (copy) = JUMP_LABEL (insn);
4172 maybe_copy_prologue_epilogue_insn (insn, copy);
4173 break;
4174
39718607
SB
4175 case JUMP_TABLE_DATA:
4176 /* Avoid copying of dispatch tables. We never duplicate
4177 tablejumps, so this can hit only in case the table got
4178 moved far from original jump.
4179 Avoid copying following barrier as well if any
4180 (and debug insns in between). */
4181 for (next = NEXT_INSN (insn);
4182 next != NEXT_INSN (to);
4183 next = NEXT_INSN (next))
4184 if (!DEBUG_INSN_P (next))
4185 break;
4186 if (next != NEXT_INSN (to) && BARRIER_P (next))
4187 insn = next;
4188 break;
4189
78bde837
SB
4190 case CODE_LABEL:
4191 break;
4192
4193 case BARRIER:
4194 emit_barrier ();
4195 break;
4196
4197 case NOTE:
4198 switch (NOTE_KIND (insn))
4199 {
4200 /* In case prologue is empty and function contain label
4201 in first BB, we may want to copy the block. */
4202 case NOTE_INSN_PROLOGUE_END:
4203
4204 case NOTE_INSN_DELETED:
4205 case NOTE_INSN_DELETED_LABEL:
4206 case NOTE_INSN_DELETED_DEBUG_LABEL:
4207 /* No problem to strip these. */
4208 case NOTE_INSN_FUNCTION_BEG:
4209 /* There is always just single entry to function. */
4210 case NOTE_INSN_BASIC_BLOCK:
3371a64f
TJ
4211 /* We should only switch text sections once. */
4212 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
78bde837
SB
4213 break;
4214
4215 case NOTE_INSN_EPILOGUE_BEG:
d33606c3 4216 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
66e8df53 4217 emit_note_copy (as_a <rtx_note *> (insn));
78bde837
SB
4218 break;
4219
4220 default:
4221 /* All other notes should have already been eliminated. */
4222 gcc_unreachable ();
4223 }
4224 break;
4225 default:
4226 gcc_unreachable ();
4227 }
4228 }
4229 insn = NEXT_INSN (last);
4230 delete_insn (last);
5d34b050 4231 return insn;
78bde837
SB
4232}
4233
4234/* Create a duplicate of the basic block BB. */
4235
4236static basic_block
4237cfg_layout_duplicate_bb (basic_block bb)
4238{
8879d71c 4239 rtx_insn *insn;
78bde837
SB
4240 basic_block new_bb;
4241
4242 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4243 new_bb = create_basic_block (insn,
4244 insn ? get_last_insn () : NULL,
fefa31b5 4245 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
78bde837
SB
4246
4247 BB_COPY_PARTITION (new_bb, bb);
4248 if (BB_HEADER (bb))
4249 {
4250 insn = BB_HEADER (bb);
4251 while (NEXT_INSN (insn))
4252 insn = NEXT_INSN (insn);
4253 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4254 if (insn)
1130d5e3 4255 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
78bde837
SB
4256 }
4257
4258 if (BB_FOOTER (bb))
4259 {
4260 insn = BB_FOOTER (bb);
4261 while (NEXT_INSN (insn))
4262 insn = NEXT_INSN (insn);
4263 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4264 if (insn)
d8ce2eae 4265 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
78bde837
SB
4266 }
4267
4268 return new_bb;
4269}
4270
4271\f
4272/* Main entry point to this module - initialize the datastructures for
4273 CFG layout changes. It keeps LOOPS up-to-date if not null.
4274
4275 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4276
4277void
1a817418 4278cfg_layout_initialize (int flags)
78bde837 4279{
b5241a5a 4280 rtx_insn_list *x;
78bde837
SB
4281 basic_block bb;
4282
8a9e6b45
BS
4283 /* Once bb partitioning is complete, cfg layout mode should not be
4284 re-entered. Entering cfg layout mode may require fixups. As an
4285 example, if edge forwarding performed when optimizing the cfg
4286 layout required moving a block from the hot to the cold
4287 section. This would create an illegal partitioning unless some
4288 manual fixup was performed. */
0b6bc904 4289 gcc_assert (!crtl->bb_reorder_complete || !crtl->has_bb_partition);
4ca19309 4290
78bde837
SB
4291 initialize_original_copy_tables ();
4292
4293 cfg_layout_rtl_register_cfg_hooks ();
4294
4295 record_effective_endpoints ();
4296
4297 /* Make sure that the targets of non local gotos are marked. */
2382940b 4298 for (x = nonlocal_goto_handler_labels; x; x = x->next ())
78bde837 4299 {
b5241a5a 4300 bb = BLOCK_FOR_INSN (x->insn ());
78bde837
SB
4301 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4302 }
4303
4304 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4305}
4306
4307/* Splits superblocks. */
4308void
4309break_superblocks (void)
4310{
78bde837
SB
4311 bool need = false;
4312 basic_block bb;
4313
7ba9e72d 4314 auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
f61e445a 4315 bitmap_clear (superblocks);
78bde837 4316
11cd3bed 4317 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
4318 if (bb->flags & BB_SUPERBLOCK)
4319 {
4320 bb->flags &= ~BB_SUPERBLOCK;
d7c028c0 4321 bitmap_set_bit (superblocks, bb->index);
78bde837
SB
4322 need = true;
4323 }
4324
4325 if (need)
4326 {
4327 rebuild_jump_labels (get_insns ());
4328 find_many_sub_basic_blocks (superblocks);
4329 }
78bde837
SB
4330}
4331
4332/* Finalize the changes: reorder insn list according to the sequence specified
29f3fd5b 4333 by aux pointers, enter compensation code, rebuild scope forest. */
78bde837
SB
4334
4335void
4336cfg_layout_finalize (void)
4337{
63da5ff6 4338 free_dominance_info (CDI_DOMINATORS);
78bde837
SB
4339 force_one_exit_fallthru ();
4340 rtl_register_cfg_hooks ();
e86a9946 4341 if (reload_completed && !targetm.have_epilogue ())
78bde837
SB
4342 fixup_fallthru_exit_predecessor ();
4343 fixup_reorder_chain ();
4344
29f3fd5b
SB
4345 rebuild_jump_labels (get_insns ());
4346 delete_dead_jumptables ();
4347
b2b29377
MM
4348 if (flag_checking)
4349 verify_insn_chain ();
4350 checking_verify_flow_info ();
78bde837
SB
4351}
4352
ba5e9aca 4353
9ee634e3 4354/* Same as split_block but update cfg_layout structures. */
f470c378
ZD
4355
4356static basic_block
d329e058 4357cfg_layout_split_block (basic_block bb, void *insnp)
9ee634e3 4358{
ae50c0cb 4359 rtx insn = (rtx) insnp;
f470c378 4360 basic_block new_bb = rtl_split_block (bb, insn);
9ee634e3 4361
d8ce2eae
DM
4362 BB_FOOTER (new_bb) = BB_FOOTER (bb);
4363 BB_FOOTER (bb) = NULL;
9ee634e3 4364
f470c378 4365 return new_bb;
9ee634e3
JH
4366}
4367
9ee634e3 4368/* Redirect Edge to DEST. */
6de9cd9a 4369static edge
d329e058 4370cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
9ee634e3
JH
4371{
4372 basic_block src = e->src;
6de9cd9a 4373 edge ret;
9ee634e3 4374
bc35512f 4375 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 4376 return NULL;
bc35512f 4377
3348b696 4378 if (e->dest == dest)
6de9cd9a 4379 return e;
bc35512f 4380
687d5dfe
JH
4381 if (e->flags & EDGE_CROSSING
4382 && BB_PARTITION (e->src) == BB_PARTITION (dest)
4383 && simplejump_p (BB_END (src)))
4384 {
4385 if (dump_file)
4386 fprintf (dump_file,
4387 "Removing crossing jump while redirecting edge form %i to %i\n",
4388 e->src->index, dest->index);
4389 delete_insn (BB_END (src));
4390 e->flags |= EDGE_FALLTHRU;
4391 }
4392
fefa31b5 4393 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
6de9cd9a 4394 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
f345f21a 4395 {
6fb5fa3c 4396 df_set_bb_dirty (src);
6de9cd9a 4397 return ret;
f345f21a 4398 }
bc35512f 4399
fefa31b5 4400 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
bc35512f
JH
4401 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4402 {
c263766c
RH
4403 if (dump_file)
4404 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
bc35512f
JH
4405 e->src->index, dest->index);
4406
6fb5fa3c 4407 df_set_bb_dirty (e->src);
bc35512f 4408 redirect_edge_succ (e, dest);
6de9cd9a 4409 return e;
bc35512f
JH
4410 }
4411
9ee634e3
JH
4412 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4413 in the case the basic block appears to be in sequence. Avoid this
4414 transformation. */
4415
9ee634e3
JH
4416 if (e->flags & EDGE_FALLTHRU)
4417 {
4418 /* Redirect any branch edges unified with the fallthru one. */
4b4bf941 4419 if (JUMP_P (BB_END (src))
432f982f
JH
4420 && label_is_jump_target_p (BB_HEAD (e->dest),
4421 BB_END (src)))
9ee634e3 4422 {
341c100f 4423 edge redirected;
c22cacf3 4424
c263766c
RH
4425 if (dump_file)
4426 fprintf (dump_file, "Fallthru edge unified with branch "
432f982f
JH
4427 "%i->%i redirected to %i\n",
4428 e->src->index, e->dest->index, dest->index);
4429 e->flags &= ~EDGE_FALLTHRU;
341c100f
NS
4430 redirected = redirect_branch_edge (e, dest);
4431 gcc_assert (redirected);
0c617be4
JL
4432 redirected->flags |= EDGE_FALLTHRU;
4433 df_set_bb_dirty (redirected->src);
4434 return redirected;
9ee634e3
JH
4435 }
4436 /* In case we are redirecting fallthru edge to the branch edge
c22cacf3 4437 of conditional jump, remove it. */
628f6a4e 4438 if (EDGE_COUNT (src->succs) == 2)
9ee634e3 4439 {
03101c6f
KH
4440 /* Find the edge that is different from E. */
4441 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
628f6a4e 4442
9ee634e3 4443 if (s->dest == dest
a813c111
SB
4444 && any_condjump_p (BB_END (src))
4445 && onlyjump_p (BB_END (src)))
4446 delete_insn (BB_END (src));
9ee634e3 4447 }
c263766c 4448 if (dump_file)
dc764d10 4449 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
bc35512f 4450 e->src->index, e->dest->index, dest->index);
0c617be4 4451 ret = redirect_edge_succ_nodup (e, dest);
9ee634e3
JH
4452 }
4453 else
bc35512f 4454 ret = redirect_branch_edge (e, dest);
9ee634e3 4455
687d5dfe 4456 fixup_partition_crossing (ret);
9ee634e3 4457 /* We don't want simplejumps in the insn stream during cfglayout. */
687d5dfe 4458 gcc_assert (!simplejump_p (BB_END (src)) || CROSSING_JUMP_P (BB_END (src)));
9ee634e3 4459
6fb5fa3c 4460 df_set_bb_dirty (src);
9ee634e3
JH
4461 return ret;
4462}
4463
4464/* Simple wrapper as we always can redirect fallthru edges. */
4465static basic_block
d329e058 4466cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
9ee634e3 4467{
341c100f
NS
4468 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4469
4470 gcc_assert (redirected);
9ee634e3
JH
4471 return NULL;
4472}
4473
f470c378
ZD
4474/* Same as delete_basic_block but update cfg_layout structures. */
4475
9ee634e3 4476static void
d329e058 4477cfg_layout_delete_block (basic_block bb)
9ee634e3 4478{
8879d71c 4479 rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
1130d5e3 4480 rtx_insn **to;
9ee634e3 4481
bcc708fc 4482 if (BB_HEADER (bb))
9ee634e3 4483 {
a813c111 4484 next = BB_HEAD (bb);
9ee634e3 4485 if (prev)
0f82e5c9 4486 SET_NEXT_INSN (prev) = BB_HEADER (bb);
9ee634e3 4487 else
bcc708fc 4488 set_first_insn (BB_HEADER (bb));
0f82e5c9 4489 SET_PREV_INSN (BB_HEADER (bb)) = prev;
bcc708fc 4490 insn = BB_HEADER (bb);
9ee634e3
JH
4491 while (NEXT_INSN (insn))
4492 insn = NEXT_INSN (insn);
0f82e5c9
DM
4493 SET_NEXT_INSN (insn) = next;
4494 SET_PREV_INSN (next) = insn;
9ee634e3 4495 }
a813c111 4496 next = NEXT_INSN (BB_END (bb));
bcc708fc 4497 if (BB_FOOTER (bb))
9ee634e3 4498 {
bcc708fc 4499 insn = BB_FOOTER (bb);
bc35512f
JH
4500 while (insn)
4501 {
4b4bf941 4502 if (BARRIER_P (insn))
bc35512f
JH
4503 {
4504 if (PREV_INSN (insn))
0f82e5c9 4505 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
bc35512f 4506 else
d8ce2eae 4507 BB_FOOTER (bb) = NEXT_INSN (insn);
bc35512f 4508 if (NEXT_INSN (insn))
0f82e5c9 4509 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
bc35512f 4510 }
4b4bf941 4511 if (LABEL_P (insn))
bc35512f
JH
4512 break;
4513 insn = NEXT_INSN (insn);
4514 }
bcc708fc 4515 if (BB_FOOTER (bb))
bc35512f 4516 {
a813c111 4517 insn = BB_END (bb);
0f82e5c9
DM
4518 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4519 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
bc35512f
JH
4520 while (NEXT_INSN (insn))
4521 insn = NEXT_INSN (insn);
0f82e5c9 4522 SET_NEXT_INSN (insn) = next;
bc35512f 4523 if (next)
0f82e5c9 4524 SET_PREV_INSN (next) = insn;
bc35512f
JH
4525 else
4526 set_last_insn (insn);
4527 }
9ee634e3 4528 }
fefa31b5 4529 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1130d5e3 4530 to = &BB_HEADER (bb->next_bb);
9ee634e3
JH
4531 else
4532 to = &cfg_layout_function_footer;
997de8ed 4533
9ee634e3
JH
4534 rtl_delete_block (bb);
4535
4536 if (prev)
4537 prev = NEXT_INSN (prev);
d329e058 4538 else
9ee634e3
JH
4539 prev = get_insns ();
4540 if (next)
4541 next = PREV_INSN (next);
d329e058 4542 else
9ee634e3
JH
4543 next = get_last_insn ();
4544
4545 if (next && NEXT_INSN (next) != prev)
4546 {
4547 remaints = unlink_insn_chain (prev, next);
4548 insn = remaints;
4549 while (NEXT_INSN (insn))
4550 insn = NEXT_INSN (insn);
0f82e5c9 4551 SET_NEXT_INSN (insn) = *to;
9ee634e3 4552 if (*to)
0f82e5c9 4553 SET_PREV_INSN (*to) = insn;
9ee634e3
JH
4554 *to = remaints;
4555 }
4556}
4557
beb235f8 4558/* Return true when blocks A and B can be safely merged. */
b48d0358 4559
bc35512f 4560static bool
b48d0358 4561cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
bc35512f 4562{
750054a2
CT
4563 /* If we are partitioning hot/cold basic blocks, we don't want to
4564 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
4565 and cold sections.
4566
8e8d5162 4567 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
4568 be optimizable (or blocks that appear to be mergeable), but which really
4569 must be left untouched (they are required to make it safely across
4570 partition boundaries). See the comments at the top of
8e8d5162
CT
4571 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4572
87c8b4be 4573 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 4574 return false;
750054a2 4575
7d776ee2
RG
4576 /* Protect the loop latches. */
4577 if (current_loops && b->loop_father->latch == b)
4578 return false;
4579
894a84b5
BS
4580 /* If we would end up moving B's instructions, make sure it doesn't fall
4581 through into the exit block, since we cannot recover from a fallthrough
4582 edge into the exit block occurring in the middle of a function. */
4583 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4584 {
4585 edge e = find_fallthru_edge (b->succs);
fefa31b5 4586 if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
894a84b5
BS
4587 return false;
4588 }
4589
bc35512f 4590 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
4591 return (single_succ_p (a)
4592 && single_succ (a) == b
4593 && single_pred_p (b) == 1
628f6a4e 4594 && a != b
bc35512f 4595 /* Must be simple edge. */
c5cbcccf 4596 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
fefa31b5
DM
4597 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4598 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
49ea3702
SB
4599 /* If the jump insn has side effects, we can't kill the edge.
4600 When not optimizing, try_redirect_by_replacing_jump will
4601 not allow us to redirect an edge by replacing a table jump. */
4b4bf941 4602 && (!JUMP_P (BB_END (a))
49ea3702 4603 || ((!optimize || reload_completed)
a813c111 4604 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f
JH
4605}
4606
41806d92
NS
4607/* Merge block A and B. The blocks must be mergeable. */
4608
bc35512f
JH
4609static void
4610cfg_layout_merge_blocks (basic_block a, basic_block b)
4611{
7792f13c
EB
4612 /* If B is a forwarder block whose outgoing edge has no location, we'll
4613 propagate the locus of the edge between A and B onto it. */
4614 const bool forward_edge_locus
4615 = (b->flags & BB_FORWARDER_BLOCK) != 0
4616 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
8879d71c 4617 rtx_insn *insn;
50a36e42 4618
77a74ed7 4619 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
bc35512f 4620
6fb5fa3c 4621 if (dump_file)
50a36e42
EB
4622 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4623 a->index);
6fb5fa3c 4624
bc35512f 4625 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 4626 if (LABEL_P (BB_HEAD (b)))
2c97f8e4 4627 {
2c97f8e4
RH
4628 delete_insn (BB_HEAD (b));
4629 }
bc35512f
JH
4630
4631 /* We should have fallthru edge in a, or we can do dummy redirection to get
4632 it cleaned up. */
4b4bf941 4633 if (JUMP_P (BB_END (a)))
628f6a4e 4634 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
341c100f 4635 gcc_assert (!JUMP_P (BB_END (a)));
bc35512f 4636
7792f13c
EB
4637 /* If not optimizing, preserve the locus of the single edge between
4638 blocks A and B if necessary by emitting a nop. */
4639 if (!optimize
4640 && !forward_edge_locus
4641 && !DECL_IGNORED_P (current_function_decl))
9be94227 4642 emit_nop_for_unique_locus_between (a, b);
7241571e 4643
4c8af858
SB
4644 /* Move things from b->footer after a->footer. */
4645 if (BB_FOOTER (b))
bc35512f 4646 {
4c8af858 4647 if (!BB_FOOTER (a))
d8ce2eae 4648 BB_FOOTER (a) = BB_FOOTER (b);
4c8af858
SB
4649 else
4650 {
8879d71c 4651 rtx_insn *last = BB_FOOTER (a);
4c8af858
SB
4652
4653 while (NEXT_INSN (last))
4654 last = NEXT_INSN (last);
0f82e5c9
DM
4655 SET_NEXT_INSN (last) = BB_FOOTER (b);
4656 SET_PREV_INSN (BB_FOOTER (b)) = last;
4c8af858 4657 }
d8ce2eae 4658 BB_FOOTER (b) = NULL;
4c8af858
SB
4659 }
4660
4661 /* Move things from b->header before a->footer.
4662 Note that this may include dead tablejump data, but we don't clean
4663 those up until we go out of cfglayout mode. */
4664 if (BB_HEADER (b))
4665 {
4666 if (! BB_FOOTER (a))
d8ce2eae 4667 BB_FOOTER (a) = BB_HEADER (b);
4c8af858
SB
4668 else
4669 {
8879d71c 4670 rtx_insn *last = BB_HEADER (b);
4c8af858
SB
4671
4672 while (NEXT_INSN (last))
4673 last = NEXT_INSN (last);
0f82e5c9
DM
4674 SET_NEXT_INSN (last) = BB_FOOTER (a);
4675 SET_PREV_INSN (BB_FOOTER (a)) = last;
d8ce2eae 4676 BB_FOOTER (a) = BB_HEADER (b);
4c8af858 4677 }
1130d5e3 4678 BB_HEADER (b) = NULL;
bc35512f
JH
4679 }
4680
4681 /* In the case basic blocks are not adjacent, move them around. */
a813c111 4682 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
bc35512f 4683 {
f9df6f16 4684 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
6c3d0e31 4685
f9df6f16 4686 emit_insn_after_noloc (insn, BB_END (a), a);
bc35512f
JH
4687 }
4688 /* Otherwise just re-associate the instructions. */
4689 else
4690 {
a813c111 4691 insn = BB_HEAD (b);
1130d5e3 4692 BB_END (a) = BB_END (b);
bc35512f
JH
4693 }
4694
f9df6f16
JJ
4695 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4696 We need to explicitly call. */
4697 update_bb_for_insn_chain (insn, BB_END (b), a);
4698
4699 /* Skip possible DELETED_LABEL insn. */
4700 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4701 insn = NEXT_INSN (insn);
4702 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
1130d5e3 4703 BB_HEAD (b) = BB_END (b) = NULL;
f9df6f16
JJ
4704 delete_insn (insn);
4705
6fb5fa3c
DB
4706 df_bb_delete (b->index);
4707
7792f13c 4708 if (forward_edge_locus)
50a36e42
EB
4709 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4710
c263766c 4711 if (dump_file)
50a36e42 4712 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
bc35512f
JH
4713}
4714
4715/* Split edge E. */
f470c378 4716
bc35512f
JH
4717static basic_block
4718cfg_layout_split_edge (edge e)
4719{
bc35512f 4720 basic_block new_bb =
fefa31b5 4721 create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
a813c111 4722 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
bc35512f
JH
4723 NULL_RTX, e->src);
4724
fefa31b5 4725 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3e7eb734
JJ
4726 BB_COPY_PARTITION (new_bb, e->src);
4727 else
4728 BB_COPY_PARTITION (new_bb, e->dest);
a9b2ee88 4729 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
bc35512f
JH
4730 redirect_edge_and_branch_force (e, new_bb);
4731
4732 return new_bb;
4733}
4734
f470c378
ZD
4735/* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4736
4737static void
4738rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4739{
4740}
4741
df92c640
SB
4742/* Return true if BB contains only labels or non-executable
4743 instructions. */
4744
4745static bool
4746rtl_block_empty_p (basic_block bb)
4747{
8879d71c 4748 rtx_insn *insn;
df92c640 4749
fefa31b5
DM
4750 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4751 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
df92c640
SB
4752 return true;
4753
4754 FOR_BB_INSNS (bb, insn)
4755 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4756 return false;
4757
4758 return true;
4759}
4760
4761/* Split a basic block if it ends with a conditional branch and if
4762 the other part of the block is not empty. */
4763
4764static basic_block
4765rtl_split_block_before_cond_jump (basic_block bb)
4766{
8879d71c
DM
4767 rtx_insn *insn;
4768 rtx_insn *split_point = NULL;
4769 rtx_insn *last = NULL;
df92c640
SB
4770 bool found_code = false;
4771
4772 FOR_BB_INSNS (bb, insn)
4773 {
4774 if (any_condjump_p (insn))
4775 split_point = last;
4776 else if (NONDEBUG_INSN_P (insn))
4777 found_code = true;
4778 last = insn;
4779 }
4780
4781 /* Did not find everything. */
4782 if (found_code && split_point)
4783 return split_block (bb, split_point)->dest;
4784 else
4785 return NULL;
4786}
4787
6de9cd9a
DN
4788/* Return 1 if BB ends with a call, possibly followed by some
4789 instructions that must stay with the call, 0 otherwise. */
4790
4791static bool
b48d0358 4792rtl_block_ends_with_call_p (basic_block bb)
6de9cd9a 4793{
8879d71c 4794 rtx_insn *insn = BB_END (bb);
6de9cd9a 4795
4b4bf941 4796 while (!CALL_P (insn)
6de9cd9a 4797 && insn != BB_HEAD (bb)
92ddef69 4798 && (keep_with_call_p (insn)
b5b8b0ac
AO
4799 || NOTE_P (insn)
4800 || DEBUG_INSN_P (insn)))
6de9cd9a 4801 insn = PREV_INSN (insn);
4b4bf941 4802 return (CALL_P (insn));
6de9cd9a
DN
4803}
4804
4805/* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4806
4807static bool
9678086d 4808rtl_block_ends_with_condjump_p (const_basic_block bb)
6de9cd9a
DN
4809{
4810 return any_condjump_p (BB_END (bb));
4811}
4812
4813/* Return true if we need to add fake edge to exit.
4814 Helper function for rtl_flow_call_edges_add. */
4815
4816static bool
8879d71c 4817need_fake_edge_p (const rtx_insn *insn)
6de9cd9a
DN
4818{
4819 if (!INSN_P (insn))
4820 return false;
4821
4b4bf941 4822 if ((CALL_P (insn)
6de9cd9a
DN
4823 && !SIBLING_CALL_P (insn)
4824 && !find_reg_note (insn, REG_NORETURN, NULL)
becfd6e5 4825 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
6de9cd9a
DN
4826 return true;
4827
4828 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4829 && MEM_VOLATILE_P (PATTERN (insn)))
4830 || (GET_CODE (PATTERN (insn)) == PARALLEL
4831 && asm_noperands (insn) != -1
4832 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4833 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4834}
4835
4836/* Add fake edges to the function exit for any non constant and non noreturn
4837 calls, volatile inline assembly in the bitmap of blocks specified by
4838 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4839 that were split.
4840
4841 The goal is to expose cases in which entering a basic block does not imply
4842 that all subsequent instructions must be executed. */
4843
4844static int
4845rtl_flow_call_edges_add (sbitmap blocks)
4846{
4847 int i;
4848 int blocks_split = 0;
8b1c6fd7 4849 int last_bb = last_basic_block_for_fn (cfun);
6de9cd9a
DN
4850 bool check_last_block = false;
4851
0cae8d31 4852 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
6de9cd9a
DN
4853 return 0;
4854
4855 if (! blocks)
4856 check_last_block = true;
4857 else
fefa31b5
DM
4858 check_last_block = bitmap_bit_p (blocks,
4859 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
6de9cd9a
DN
4860
4861 /* In the last basic block, before epilogue generation, there will be
4862 a fallthru edge to EXIT. Special care is required if the last insn
4863 of the last basic block is a call because make_edge folds duplicate
4864 edges, which would result in the fallthru edge also being marked
4865 fake, which would result in the fallthru edge being removed by
4866 remove_fake_edges, which would result in an invalid CFG.
4867
4868 Moreover, we can't elide the outgoing fake edge, since the block
4869 profiler needs to take this into account in order to solve the minimal
4870 spanning tree in the case that the call doesn't return.
4871
4872 Handle this by adding a dummy instruction in a new last basic block. */
4873 if (check_last_block)
4874 {
fefa31b5 4875 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8879d71c 4876 rtx_insn *insn = BB_END (bb);
6de9cd9a
DN
4877
4878 /* Back up past insns that must be kept in the same block as a call. */
4879 while (insn != BB_HEAD (bb)
4880 && keep_with_call_p (insn))
4881 insn = PREV_INSN (insn);
4882
4883 if (need_fake_edge_p (insn))
4884 {
4885 edge e;
4886
fefa31b5 4887 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
9ff3d2de
JL
4888 if (e)
4889 {
c41c1387 4890 insert_insn_on_edge (gen_use (const0_rtx), e);
9ff3d2de
JL
4891 commit_edge_insertions ();
4892 }
6de9cd9a
DN
4893 }
4894 }
4895
4896 /* Now add fake edges to the function exit for any non constant
4897 calls since there is no way that we can determine if they will
4898 return or not... */
4899
24bd1a0b 4900 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
6de9cd9a 4901 {
06e28de2 4902 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8879d71c
DM
4903 rtx_insn *insn;
4904 rtx_insn *prev_insn;
6de9cd9a
DN
4905
4906 if (!bb)
4907 continue;
4908
d7c028c0 4909 if (blocks && !bitmap_bit_p (blocks, i))
6de9cd9a
DN
4910 continue;
4911
4912 for (insn = BB_END (bb); ; insn = prev_insn)
4913 {
4914 prev_insn = PREV_INSN (insn);
4915 if (need_fake_edge_p (insn))
4916 {
4917 edge e;
8879d71c 4918 rtx_insn *split_at_insn = insn;
6de9cd9a
DN
4919
4920 /* Don't split the block between a call and an insn that should
c22cacf3 4921 remain in the same block as the call. */
4b4bf941 4922 if (CALL_P (insn))
6de9cd9a
DN
4923 while (split_at_insn != BB_END (bb)
4924 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4925 split_at_insn = NEXT_INSN (split_at_insn);
4926
4927 /* The handling above of the final block before the epilogue
c22cacf3 4928 should be enough to verify that there is no edge to the exit
6de9cd9a
DN
4929 block in CFG already. Calling make_edge in such case would
4930 cause us to mark that edge as fake and remove it later. */
4931
b2b29377 4932 if (flag_checking && split_at_insn == BB_END (bb))
628f6a4e 4933 {
fefa31b5 4934 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
9ff3d2de 4935 gcc_assert (e == NULL);
628f6a4e 4936 }
6de9cd9a
DN
4937
4938 /* Note that the following may create a new basic block
4939 and renumber the existing basic blocks. */
4940 if (split_at_insn != BB_END (bb))
4941 {
4942 e = split_block (bb, split_at_insn);
4943 if (e)
4944 blocks_split++;
4945 }
4946
36d95aa8
JH
4947 edge ne = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
4948 ne->probability = profile_probability::guessed_never ();
6de9cd9a
DN
4949 }
4950
4951 if (insn == BB_HEAD (bb))
4952 break;
4953 }
4954 }
4955
4956 if (blocks_split)
4957 verify_flow_info ();
4958
4959 return blocks_split;
4960}
4961
1cb7dfc3 4962/* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
315682fb 4963 the conditional branch target, SECOND_HEAD should be the fall-thru
1cb7dfc3
MH
4964 there is no need to handle this here the loop versioning code handles
4965 this. the reason for SECON_HEAD is that it is needed for condition
4966 in trees, and this should be of the same type since it is a hook. */
4967static void
4968rtl_lv_add_condition_to_bb (basic_block first_head ,
c22cacf3
MS
4969 basic_block second_head ATTRIBUTE_UNUSED,
4970 basic_block cond_bb, void *comp_rtx)
1cb7dfc3 4971{
1476d1bd 4972 rtx_code_label *label;
8879d71c 4973 rtx_insn *seq, *jump;
1cb7dfc3
MH
4974 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4975 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4976 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
ef4bddc2 4977 machine_mode mode;
1cb7dfc3
MH
4978
4979
4980 label = block_label (first_head);
4981 mode = GET_MODE (op0);
4982 if (mode == VOIDmode)
4983 mode = GET_MODE (op1);
4984
4985 start_sequence ();
4986 op0 = force_operand (op0, NULL_RTX);
4987 op1 = force_operand (op1, NULL_RTX);
357067f2
JH
4988 do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label,
4989 profile_probability::uninitialized ());
1cb7dfc3
MH
4990 jump = get_last_insn ();
4991 JUMP_LABEL (jump) = label;
4992 LABEL_NUSES (label)++;
4993 seq = get_insns ();
4994 end_sequence ();
4995
2b9c63a2 4996 /* Add the new cond, in the new head. */
c3284718 4997 emit_insn_after (seq, BB_END (cond_bb));
1cb7dfc3
MH
4998}
4999
5000
5001/* Given a block B with unconditional branch at its end, get the
5002 store the return the branch edge and the fall-thru edge in
5003 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
5004static void
5005rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
5006 edge *fallthru_edge)
5007{
5008 edge e = EDGE_SUCC (b, 0);
5009
5010 if (e->flags & EDGE_FALLTHRU)
5011 {
5012 *fallthru_edge = e;
5013 *branch_edge = EDGE_SUCC (b, 1);
5014 }
5015 else
5016 {
5017 *branch_edge = e;
5018 *fallthru_edge = EDGE_SUCC (b, 1);
5019 }
5020}
5021
5e2d947c
JH
5022void
5023init_rtl_bb_info (basic_block bb)
5024{
bcc708fc
MM
5025 gcc_assert (!bb->il.x.rtl);
5026 bb->il.x.head_ = NULL;
766090c2 5027 bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5e2d947c
JH
5028}
5029
14fa2cc0
ZD
5030/* Returns true if it is possible to remove edge E by redirecting
5031 it to the destination of the other edge from E->src. */
5032
5033static bool
9678086d 5034rtl_can_remove_branch_p (const_edge e)
14fa2cc0 5035{
9678086d
KG
5036 const_basic_block src = e->src;
5037 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
8879d71c
DM
5038 const rtx_insn *insn = BB_END (src);
5039 rtx set;
14fa2cc0
ZD
5040
5041 /* The conditions are taken from try_redirect_by_replacing_jump. */
fefa31b5 5042 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
14fa2cc0
ZD
5043 return false;
5044
5045 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
5046 return false;
5047
3371a64f 5048 if (BB_PARTITION (src) != BB_PARTITION (target))
14fa2cc0
ZD
5049 return false;
5050
5051 if (!onlyjump_p (insn)
5052 || tablejump_p (insn, NULL, NULL))
5053 return false;
5054
5055 set = single_set (insn);
5056 if (!set || side_effects_p (set))
5057 return false;
5058
5059 return true;
5060}
5061
ffe14686
AM
5062static basic_block
5063rtl_duplicate_bb (basic_block bb)
5064{
5065 bb = cfg_layout_duplicate_bb (bb);
5066 bb->aux = NULL;
5067 return bb;
5068}
5069
aa4723d7 5070/* Do book-keeping of basic block BB for the profile consistency checker.
160576e1 5071 Store the counting in RECORD. */
aa4723d7 5072static void
160576e1 5073rtl_account_profile_record (basic_block bb, struct profile_record *record)
aa4723d7 5074{
8879d71c 5075 rtx_insn *insn;
aa4723d7
SB
5076 FOR_BB_INSNS (bb, insn)
5077 if (INSN_P (insn))
5078 {
160576e1 5079 record->size += insn_cost (insn, false);
3995f3a2 5080 if (bb->count.initialized_p ())
160576e1 5081 record->time
d09e78e8 5082 += insn_cost (insn, true) * bb->count.to_gcov_type ();
0a6a6ac9 5083 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
160576e1 5084 record->time
e7a74006 5085 += insn_cost (insn, true) * bb->count.to_frequency (cfun);
aa4723d7
SB
5086 }
5087}
5088
9ee634e3
JH
5089/* Implementation of CFG manipulation for linearized RTL. */
5090struct cfg_hooks rtl_cfg_hooks = {
f470c378 5091 "rtl",
9ee634e3 5092 rtl_verify_flow_info,
10e9fecc 5093 rtl_dump_bb,
2c895bd1 5094 rtl_dump_bb_for_graph,
bc35512f 5095 rtl_create_basic_block,
9ee634e3
JH
5096 rtl_redirect_edge_and_branch,
5097 rtl_redirect_edge_and_branch_force,
14fa2cc0 5098 rtl_can_remove_branch_p,
9ee634e3
JH
5099 rtl_delete_block,
5100 rtl_split_block,
f470c378 5101 rtl_move_block_after,
bc35512f
JH
5102 rtl_can_merge_blocks, /* can_merge_blocks_p */
5103 rtl_merge_blocks,
6de9cd9a
DN
5104 rtl_predict_edge,
5105 rtl_predicted_by_p,
ffe14686
AM
5106 cfg_layout_can_duplicate_bb_p,
5107 rtl_duplicate_bb,
f470c378
ZD
5108 rtl_split_edge,
5109 rtl_make_forwarder_block,
6de9cd9a 5110 rtl_tidy_fallthru_edge,
cf103ca4 5111 rtl_force_nonfallthru,
6de9cd9a
DN
5112 rtl_block_ends_with_call_p,
5113 rtl_block_ends_with_condjump_p,
d9d4706f
KH
5114 rtl_flow_call_edges_add,
5115 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
5116 NULL, /* execute_on_shrinking_pred */
5117 NULL, /* duplicate loop for trees */
5118 NULL, /* lv_add_condition_to_bb */
5119 NULL, /* lv_adjust_loop_header_phi*/
5120 NULL, /* extract_cond_bb_edges */
df92c640
SB
5121 NULL, /* flush_pending_stmts */
5122 rtl_block_empty_p, /* block_empty_p */
5123 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
aa4723d7 5124 rtl_account_profile_record,
9ee634e3
JH
5125};
5126
5127/* Implementation of CFG manipulation for cfg layout RTL, where
5128 basic block connected via fallthru edges does not have to be adjacent.
5129 This representation will hopefully become the default one in future
5130 version of the compiler. */
6de9cd9a 5131
9ee634e3 5132struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
f470c378 5133 "cfglayout mode",
bc35512f 5134 rtl_verify_flow_info_1,
10e9fecc 5135 rtl_dump_bb,
2c895bd1 5136 rtl_dump_bb_for_graph,
bc35512f 5137 cfg_layout_create_basic_block,
9ee634e3
JH
5138 cfg_layout_redirect_edge_and_branch,
5139 cfg_layout_redirect_edge_and_branch_force,
14fa2cc0 5140 rtl_can_remove_branch_p,
9ee634e3
JH
5141 cfg_layout_delete_block,
5142 cfg_layout_split_block,
f470c378 5143 rtl_move_block_after,
bc35512f
JH
5144 cfg_layout_can_merge_blocks_p,
5145 cfg_layout_merge_blocks,
6de9cd9a
DN
5146 rtl_predict_edge,
5147 rtl_predicted_by_p,
5148 cfg_layout_can_duplicate_bb_p,
5149 cfg_layout_duplicate_bb,
f470c378
ZD
5150 cfg_layout_split_edge,
5151 rtl_make_forwarder_block,
cf103ca4
EB
5152 NULL, /* tidy_fallthru_edge */
5153 rtl_force_nonfallthru,
6de9cd9a
DN
5154 rtl_block_ends_with_call_p,
5155 rtl_block_ends_with_condjump_p,
d9d4706f
KH
5156 rtl_flow_call_edges_add,
5157 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
5158 NULL, /* execute_on_shrinking_pred */
5159 duplicate_loop_to_header_edge, /* duplicate loop for trees */
5160 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5161 NULL, /* lv_adjust_loop_header_phi*/
5162 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
df92c640
SB
5163 NULL, /* flush_pending_stmts */
5164 rtl_block_empty_p, /* block_empty_p */
5165 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
aa4723d7 5166 rtl_account_profile_record,
9ee634e3 5167};
78bde837
SB
5168
5169#include "gt-cfgrtl.h"