]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgrtl.c
remove cast to rtx_insn * in remove_note
[thirdparty/gcc.git] / gcc / cfgrtl.c
CommitLineData
ca6c03ca 1/* Control flow graph manipulation code for GNU compiler.
818ab71a 2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
ca6c03ca
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
ca6c03ca
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ca6c03ca 19
5f0d2358
RK
20/* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
ca6c03ca
JH
22
23 Available functionality:
bc35512f 24 - Basic CFG/RTL manipulation API documented in cfghooks.h
5f0d2358 25 - CFG-aware instruction chain manipulation
ca6c03ca 26 delete_insn, delete_insn_chain
bc35512f
JH
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
ba5e9aca
EB
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
bc35512f
JH
33
34 Functions not supposed for generic use:
5f0d2358 35 - Infrastructure to determine quickly basic block for insn
ca6c03ca 36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
5f0d2358 37 - Edge redirection with updating and optimizing of insn chain
bc35512f 38 block_label, tidy_fallthru_edge, force_nonfallthru */
ca6c03ca
JH
39\f
40#include "config.h"
41#include "system.h"
4977bab6 42#include "coretypes.h"
c7131fb2 43#include "backend.h"
957060b5 44#include "target.h"
c7131fb2 45#include "rtl.h"
957060b5
AM
46#include "tree.h"
47#include "cfghooks.h"
c7131fb2 48#include "df.h"
957060b5 49#include "insn-config.h"
4d0cdd0c 50#include "memmodel.h"
957060b5 51#include "emit-rtl.h"
60393bbc
AM
52#include "cfgrtl.h"
53#include "cfganal.h"
54#include "cfgbuild.h"
55#include "cfgcleanup.h"
60393bbc 56#include "bb-reorder.h"
0cbd9993 57#include "rtl-error.h"
2a34bece 58#include "insn-attr.h"
36566b39 59#include "dojump.h"
ff25ef99 60#include "expr.h"
1cb7dfc3 61#include "cfgloop.h"
ef330312 62#include "tree-pass.h"
013a8899 63#include "print-rtl.h"
ca6c03ca 64
78bde837
SB
65/* Holds the interesting leading and trailing notes for the function.
66 Only applicable if the CFG is in cfglayout mode. */
1130d5e3 67static GTY(()) rtx_insn *cfg_layout_function_footer;
9152e0aa 68static GTY(()) rtx_insn *cfg_layout_function_header;
78bde837 69
8879d71c 70static rtx_insn *skip_insns_after_block (basic_block);
78bde837 71static void record_effective_endpoints (void);
78bde837
SB
72static void fixup_reorder_chain (void);
73
74void verify_insn_chain (void);
75static void fixup_fallthru_exit_predecessor (void);
8879d71c
DM
76static int can_delete_note_p (const rtx_note *);
77static int can_delete_label_p (const rtx_code_label *);
d329e058 78static basic_block rtl_split_edge (edge);
f470c378 79static bool rtl_move_block_after (basic_block, basic_block);
d329e058 80static int rtl_verify_flow_info (void);
f470c378 81static basic_block cfg_layout_split_block (basic_block, void *);
6de9cd9a 82static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
d329e058
AJ
83static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
84static void cfg_layout_delete_block (basic_block);
85static void rtl_delete_block (basic_block);
86static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
6de9cd9a 87static edge rtl_redirect_edge_and_branch (edge, basic_block);
f470c378 88static basic_block rtl_split_block (basic_block, void *);
a315c44c 89static void rtl_dump_bb (FILE *, basic_block, int, int);
d329e058 90static int rtl_verify_flow_info_1 (void);
f470c378 91static void rtl_make_forwarder_block (edge);
ca6c03ca
JH
92\f
93/* Return true if NOTE is not one of the ones that must be kept paired,
5f0d2358 94 so that we may simply delete it. */
ca6c03ca
JH
95
96static int
8879d71c 97can_delete_note_p (const rtx_note *note)
ca6c03ca 98{
cd9c1ca8
RH
99 switch (NOTE_KIND (note))
100 {
101 case NOTE_INSN_DELETED:
102 case NOTE_INSN_BASIC_BLOCK:
103 case NOTE_INSN_EPILOGUE_BEG:
104 return true;
105
106 default:
107 return false;
108 }
ca6c03ca
JH
109}
110
111/* True if a given label can be deleted. */
112
113static int
8879d71c 114can_delete_label_p (const rtx_code_label *label)
ca6c03ca 115{
5f0d2358
RK
116 return (!LABEL_PRESERVE_P (label)
117 /* User declared labels must be preserved. */
118 && LABEL_NAME (label) == 0
6f7eba34
TS
119 && !vec_safe_contains<rtx_insn *> (forced_labels,
120 const_cast<rtx_code_label *> (label)));
ca6c03ca
JH
121}
122
03fbe718 123/* Delete INSN by patching it out. */
ca6c03ca 124
03fbe718 125void
1130d5e3 126delete_insn (rtx uncast_insn)
ca6c03ca 127{
1130d5e3 128 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
ca6c03ca
JH
129 rtx note;
130 bool really_delete = true;
131
4b4bf941 132 if (LABEL_P (insn))
ca6c03ca
JH
133 {
134 /* Some labels can't be directly removed from the INSN chain, as they
c22cacf3
MS
135 might be references via variables, constant pool etc.
136 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
8879d71c 137 if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
ca6c03ca
JH
138 {
139 const char *name = LABEL_NAME (insn);
5e5df392 140 basic_block bb = BLOCK_FOR_INSN (insn);
8879d71c 141 rtx_insn *bb_note = NEXT_INSN (insn);
ca6c03ca
JH
142
143 really_delete = false;
144 PUT_CODE (insn, NOTE);
a38e7aa5 145 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
6773e15f 146 NOTE_DELETED_LABEL_NAME (insn) = name;
03fbe718 147
0f33baa9 148 /* If the note following the label starts a basic block, and the
5e5df392 149 label is a member of the same basic block, interchange the two. */
0f33baa9
TV
150 if (bb_note != NULL_RTX
151 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
5e5df392
TV
152 && bb != NULL
153 && bb == BLOCK_FOR_INSN (bb_note))
03fbe718
TV
154 {
155 reorder_insns_nobb (insn, insn, bb_note);
1130d5e3 156 BB_HEAD (bb) = bb_note;
03fbe718 157 if (BB_END (bb) == bb_note)
1130d5e3 158 BB_END (bb) = insn;
03fbe718 159 }
ca6c03ca 160 }
5f0d2358 161
b5241a5a 162 remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
ca6c03ca
JH
163 }
164
165 if (really_delete)
166 {
cda94cbb 167 /* If this insn has already been deleted, something is very wrong. */
4654c0cf 168 gcc_assert (!insn->deleted ());
1f397f45
SB
169 if (INSN_P (insn))
170 df_insn_delete (insn);
ca6c03ca 171 remove_insn (insn);
4654c0cf 172 insn->set_deleted ();
ca6c03ca
JH
173 }
174
175 /* If deleting a jump, decrement the use count of the label. Deleting
176 the label itself should happen in the normal course of block merging. */
cf7c4aa6 177 if (JUMP_P (insn))
9295a326 178 {
cf7c4aa6
HPN
179 if (JUMP_LABEL (insn)
180 && LABEL_P (JUMP_LABEL (insn)))
181 LABEL_NUSES (JUMP_LABEL (insn))--;
182
183 /* If there are more targets, remove them too. */
184 while ((note
185 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
4b4bf941 186 && LABEL_P (XEXP (note, 0)))
9295a326
JZ
187 {
188 LABEL_NUSES (XEXP (note, 0))--;
189 remove_note (insn, note);
190 }
191 }
ca6c03ca 192
cf7c4aa6
HPN
193 /* Also if deleting any insn that references a label as an operand. */
194 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
195 && LABEL_P (XEXP (note, 0)))
196 {
197 LABEL_NUSES (XEXP (note, 0))--;
198 remove_note (insn, note);
199 }
200
95c43227 201 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
ca6c03ca 202 {
95c43227
DM
203 rtvec vec = table->get_labels ();
204 int len = GET_NUM_ELEM (vec);
ca6c03ca
JH
205 int i;
206
207 for (i = 0; i < len; i++)
a124fcda 208 {
95c43227 209 rtx label = XEXP (RTVEC_ELT (vec, i), 0);
a124fcda
RH
210
211 /* When deleting code in bulk (e.g. removing many unreachable
212 blocks) we can delete a label that's a target of the vector
213 before deleting the vector itself. */
4b4bf941 214 if (!NOTE_P (label))
a124fcda
RH
215 LABEL_NUSES (label)--;
216 }
ca6c03ca 217 }
ca6c03ca
JH
218}
219
98ccd1d7
IE
220/* Like delete_insn but also purge dead edges from BB.
221 Return true if any edges are eliminated. */
9ed7b221 222
98ccd1d7 223bool
373fc786 224delete_insn_and_edges (rtx_insn *insn)
3dec4024 225{
3dec4024
JH
226 bool purge = false;
227
ba4f7968 228 if (INSN_P (insn)
3dec4024 229 && BLOCK_FOR_INSN (insn)
a813c111 230 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
3dec4024 231 purge = true;
03fbe718 232 delete_insn (insn);
3dec4024 233 if (purge)
98ccd1d7
IE
234 return purge_dead_edges (BLOCK_FOR_INSN (insn));
235 return false;
3dec4024
JH
236}
237
ca6c03ca 238/* Unlink a chain of insns between START and FINISH, leaving notes
a7b87f73
ZD
239 that must be paired. If CLEAR_BB is true, we set bb field for
240 insns that cannot be removed to NULL. */
ca6c03ca
JH
241
242void
a7b87f73 243delete_insn_chain (rtx start, rtx finish, bool clear_bb)
ca6c03ca 244{
8879d71c 245 rtx_insn *prev, *current;
ca6c03ca 246
5f0d2358
RK
247 /* Unchain the insns one by one. It would be quicker to delete all of these
248 with a single unchaining, rather than one at a time, but we need to keep
249 the NOTE's. */
8879d71c 250 current = safe_as_a <rtx_insn *> (finish);
ca6c03ca
JH
251 while (1)
252 {
03fbe718 253 prev = PREV_INSN (current);
8879d71c 254 if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
ca6c03ca
JH
255 ;
256 else
03fbe718 257 delete_insn (current);
ca6c03ca 258
4654c0cf 259 if (clear_bb && !current->deleted ())
03fbe718 260 set_block_for_insn (current, NULL);
a7b87f73 261
03fbe718 262 if (current == start)
ca6c03ca 263 break;
03fbe718 264 current = prev;
ca6c03ca
JH
265 }
266}
267\f
5f0d2358
RK
268/* Create a new basic block consisting of the instructions between HEAD and END
269 inclusive. This function is designed to allow fast BB construction - reuses
270 the note and basic block struct in BB_NOTE, if any and do not grow
271 BASIC_BLOCK chain and should be used directly only by CFG construction code.
272 END can be NULL in to create new empty basic block before HEAD. Both END
918ed612
ZD
273 and HEAD can be NULL to create basic block at the end of INSN chain.
274 AFTER is the basic block we should be put after. */
ca6c03ca
JH
275
276basic_block
e00022e9 277create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
66e8df53 278 basic_block after)
ca6c03ca
JH
279{
280 basic_block bb;
281
282 if (bb_note
ca6c03ca
JH
283 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
284 && bb->aux == NULL)
285 {
286 /* If we found an existing note, thread it back onto the chain. */
287
fee3e72c 288 rtx_insn *after;
ca6c03ca 289
4b4bf941 290 if (LABEL_P (head))
ca6c03ca
JH
291 after = head;
292 else
293 {
294 after = PREV_INSN (head);
295 head = bb_note;
296 }
297
298 if (after != bb_note && NEXT_INSN (after) != bb_note)
ba4f7968 299 reorder_insns_nobb (bb_note, bb_note, after);
ca6c03ca
JH
300 }
301 else
302 {
303 /* Otherwise we must create a note and a basic block structure. */
304
305 bb = alloc_block ();
306
5e2d947c 307 init_rtl_bb_info (bb);
ca6c03ca 308 if (!head && !end)
5f0d2358
RK
309 head = end = bb_note
310 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
4b4bf941 311 else if (LABEL_P (head) && end)
ca6c03ca
JH
312 {
313 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
314 if (head == end)
315 end = bb_note;
316 }
317 else
318 {
319 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
320 head = bb_note;
321 if (!end)
322 end = head;
323 }
5f0d2358 324
ca6c03ca
JH
325 NOTE_BASIC_BLOCK (bb_note) = bb;
326 }
327
328 /* Always include the bb note in the block. */
329 if (NEXT_INSN (end) == bb_note)
330 end = bb_note;
331
1130d5e3
DM
332 BB_HEAD (bb) = head;
333 BB_END (bb) = end;
8b1c6fd7 334 bb->index = last_basic_block_for_fn (cfun)++;
5e2d947c 335 bb->flags = BB_NEW | BB_RTL;
918ed612 336 link_block (bb, after);
557c4b49 337 SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
6fb5fa3c 338 df_bb_refs_record (bb->index, false);
ba4f7968 339 update_bb_for_insn (bb);
076c7ab8 340 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
ca6c03ca
JH
341
342 /* Tag the block so that we know it has been used when considering
343 other basic block notes. */
344 bb->aux = bb;
345
346 return bb;
347}
348
5f0d2358 349/* Create new basic block consisting of instructions in between HEAD and END
ffe14686
AM
350 and place it to the BB chain after block AFTER. END can be NULL to
351 create a new empty basic block before HEAD. Both END and HEAD can be
352 NULL to create basic block at the end of INSN chain. */
ca6c03ca 353
bc35512f
JH
354static basic_block
355rtl_create_basic_block (void *headp, void *endp, basic_block after)
ca6c03ca 356{
e00022e9
DM
357 rtx_insn *head = (rtx_insn *) headp;
358 rtx_insn *end = (rtx_insn *) endp;
ca6c03ca 359 basic_block bb;
0b17ab2f 360
7eca0767 361 /* Grow the basic block array if needed. */
8b1c6fd7
DM
362 if ((size_t) last_basic_block_for_fn (cfun)
363 >= basic_block_info_for_fn (cfun)->length ())
7eca0767 364 {
8b1c6fd7
DM
365 size_t new_size =
366 (last_basic_block_for_fn (cfun)
367 + (last_basic_block_for_fn (cfun) + 3) / 4);
c5f05f68 368 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
7eca0767 369 }
0b17ab2f 370
0cae8d31 371 n_basic_blocks_for_fn (cfun)++;
ca6c03ca 372
852c6ec7 373 bb = create_basic_block_structure (head, end, NULL, after);
ca6c03ca
JH
374 bb->aux = NULL;
375 return bb;
376}
bc35512f
JH
377
378static basic_block
379cfg_layout_create_basic_block (void *head, void *end, basic_block after)
380{
381 basic_block newbb = rtl_create_basic_block (head, end, after);
382
bc35512f
JH
383 return newbb;
384}
ca6c03ca
JH
385\f
386/* Delete the insns in a (non-live) block. We physically delete every
387 non-deleted-note insn, and update the flow graph appropriately.
388
389 Return nonzero if we deleted an exception handler. */
390
391/* ??? Preserving all such notes strikes me as wrong. It would be nice
392 to post-process the stream to remove empty blocks, loops, ranges, etc. */
393
f0fda11c 394static void
d329e058 395rtl_delete_block (basic_block b)
ca6c03ca 396{
8879d71c 397 rtx_insn *insn, *end;
ca6c03ca
JH
398
399 /* If the head of this block is a CODE_LABEL, then it might be the
f39e46ba
SB
400 label for an exception handler which can't be reached. We need
401 to remove the label from the exception_handler_label list. */
a813c111 402 insn = BB_HEAD (b);
ca6c03ca 403
96370780 404 end = get_last_bb_insn (b);
ca6c03ca
JH
405
406 /* Selectively delete the entire chain. */
1130d5e3 407 BB_HEAD (b) = NULL;
a7b87f73
ZD
408 delete_insn_chain (insn, end, true);
409
6fb5fa3c
DB
410
411 if (dump_file)
412 fprintf (dump_file, "deleting block %d\n", b->index);
413 df_bb_delete (b->index);
ca6c03ca
JH
414}
415\f
852c6ec7 416/* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
ca6c03ca
JH
417
418void
d329e058 419compute_bb_for_insn (void)
ca6c03ca 420{
e0082a72 421 basic_block bb;
ca6c03ca 422
11cd3bed 423 FOR_EACH_BB_FN (bb, cfun)
ca6c03ca 424 {
8879d71c
DM
425 rtx_insn *end = BB_END (bb);
426 rtx_insn *insn;
ca6c03ca 427
a813c111 428 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
ca6c03ca 429 {
ba4f7968 430 BLOCK_FOR_INSN (insn) = bb;
ca6c03ca
JH
431 if (insn == end)
432 break;
ca6c03ca
JH
433 }
434 }
435}
436
437/* Release the basic_block_for_insn array. */
438
c2924966 439unsigned int
d329e058 440free_bb_for_insn (void)
ca6c03ca 441{
8879d71c 442 rtx_insn *insn;
ba4f7968 443 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 444 if (!BARRIER_P (insn))
ba4f7968 445 BLOCK_FOR_INSN (insn) = NULL;
c2924966 446 return 0;
ca6c03ca
JH
447}
448
27a4cd48
DM
449namespace {
450
451const pass_data pass_data_free_cfg =
452{
453 RTL_PASS, /* type */
454 "*free_cfg", /* name */
455 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
456 TV_NONE, /* tv_id */
457 0, /* properties_required */
458 0, /* properties_provided */
459 PROP_cfg, /* properties_destroyed */
460 0, /* todo_flags_start */
461 0, /* todo_flags_finish */
ef330312
PB
462};
463
27a4cd48
DM
464class pass_free_cfg : public rtl_opt_pass
465{
466public:
c3284718
RS
467 pass_free_cfg (gcc::context *ctxt)
468 : rtl_opt_pass (pass_data_free_cfg, ctxt)
27a4cd48
DM
469 {}
470
471 /* opt_pass methods: */
be55bfe6 472 virtual unsigned int execute (function *);
27a4cd48
DM
473
474}; // class pass_free_cfg
475
be55bfe6
TS
476unsigned int
477pass_free_cfg::execute (function *)
478{
be55bfe6
TS
479 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
480 valid at that point so it would be too late to call df_analyze. */
e90bedf5 481 if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
be55bfe6
TS
482 {
483 df_note_add_problem ();
484 df_analyze ();
485 }
be55bfe6
TS
486
487 if (crtl->has_bb_partition)
488 insert_section_boundary_note ();
489
490 free_bb_for_insn ();
491 return 0;
492}
493
27a4cd48
DM
494} // anon namespace
495
496rtl_opt_pass *
497make_pass_free_cfg (gcc::context *ctxt)
498{
499 return new pass_free_cfg (ctxt);
500}
501
91278841 502/* Return RTX to emit after when we want to emit code on the entry of function. */
8a2e8325 503rtx_insn *
91278841
AP
504entry_of_function (void)
505{
0cae8d31 506 return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
fefa31b5 507 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
91278841
AP
508}
509
11b904a1
BS
510/* Emit INSN at the entry point of the function, ensuring that it is only
511 executed once per function. */
512void
513emit_insn_at_entry (rtx insn)
514{
fefa31b5 515 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
11b904a1 516 edge e = ei_safe_edge (ei);
5419bc7f 517 gcc_assert (e->flags & EDGE_FALLTHRU);
11b904a1
BS
518
519 insert_insn_on_edge (insn, e);
520 commit_edge_insertions ();
521}
522
6c3d0e31 523/* Update BLOCK_FOR_INSN of insns between BEGIN and END
b8698a0f 524 (or BARRIER if found) and notify df of the bb change.
6c3d0e31
SP
525 The insn chain range is inclusive
526 (i.e. both BEGIN and END will be updated. */
ca6c03ca 527
6c3d0e31 528static void
8879d71c 529update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
ca6c03ca 530{
8879d71c 531 rtx_insn *insn;
ca6c03ca 532
6c3d0e31
SP
533 end = NEXT_INSN (end);
534 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
63642d5a
AO
535 if (!BARRIER_P (insn))
536 df_insn_change_bb (insn, bb);
ca6c03ca 537}
6c3d0e31
SP
538
539/* Update BLOCK_FOR_INSN of insns in BB to BB,
540 and notify df of the change. */
541
542void
543update_bb_for_insn (basic_block bb)
544{
545 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
546}
547
532aafad
SB
548\f
549/* Like active_insn_p, except keep the return value clobber around
550 even after reload. */
551
552static bool
8879d71c 553flow_active_insn_p (const rtx_insn *insn)
532aafad
SB
554{
555 if (active_insn_p (insn))
556 return true;
557
558 /* A clobber of the function return value exists for buggy
559 programs that fail to return a value. Its effect is to
560 keep the return value from being live across the entire
561 function. If we allow it to be skipped, we introduce the
562 possibility for register lifetime confusion. */
563 if (GET_CODE (PATTERN (insn)) == CLOBBER
564 && REG_P (XEXP (PATTERN (insn), 0))
565 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
566 return true;
567
568 return false;
569}
570
571/* Return true if the block has no effect and only forwards control flow to
572 its single destination. */
532aafad
SB
573
574bool
2407343c 575contains_no_active_insn_p (const_basic_block bb)
532aafad 576{
8879d71c 577 rtx_insn *insn;
532aafad 578
4833f561
JJ
579 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
580 || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
581 || !single_succ_p (bb)
582 || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
532aafad
SB
583 return false;
584
2407343c
JR
585 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
586 if (INSN_P (insn) && flow_active_insn_p (insn))
587 return false;
588
589 return (!INSN_P (insn)
590 || (JUMP_P (insn) && simplejump_p (insn))
591 || !flow_active_insn_p (insn));
592}
593
594/* Likewise, but protect loop latches, headers and preheaders. */
595/* FIXME: Make this a cfg hook. */
596
597bool
598forwarder_block_p (const_basic_block bb)
599{
600 if (!contains_no_active_insn_p (bb))
601 return false;
602
532aafad
SB
603 /* Protect loop latches, headers and preheaders. */
604 if (current_loops)
605 {
606 basic_block dest;
607 if (bb->loop_father->header == bb)
608 return false;
609 dest = EDGE_SUCC (bb, 0)->dest;
610 if (dest->loop_father->header == dest)
611 return false;
612 }
613
2407343c 614 return true;
532aafad
SB
615}
616
617/* Return nonzero if we can reach target from src by falling through. */
d7b6661b 618/* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
532aafad
SB
619
620bool
621can_fallthru (basic_block src, basic_block target)
622{
8879d71c
DM
623 rtx_insn *insn = BB_END (src);
624 rtx_insn *insn2;
532aafad
SB
625 edge e;
626 edge_iterator ei;
627
fefa31b5 628 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
532aafad
SB
629 return true;
630 if (src->next_bb != target)
d7b6661b
SB
631 return false;
632
633 /* ??? Later we may add code to move jump tables offline. */
634 if (tablejump_p (insn, NULL, NULL))
635 return false;
636
532aafad 637 FOR_EACH_EDGE (e, ei, src->succs)
fefa31b5 638 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
532aafad 639 && e->flags & EDGE_FALLTHRU)
d7b6661b 640 return false;
532aafad
SB
641
642 insn2 = BB_HEAD (target);
d7b6661b 643 if (!active_insn_p (insn2))
532aafad
SB
644 insn2 = next_active_insn (insn2);
645
532aafad
SB
646 return next_active_insn (insn) == insn2;
647}
648
649/* Return nonzero if we could reach target from src by falling through,
650 if the target was made adjacent. If we already have a fall-through
651 edge to the exit block, we can't do that. */
652static bool
653could_fall_through (basic_block src, basic_block target)
654{
655 edge e;
656 edge_iterator ei;
657
fefa31b5 658 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
532aafad
SB
659 return true;
660 FOR_EACH_EDGE (e, ei, src->succs)
fefa31b5 661 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
532aafad
SB
662 && e->flags & EDGE_FALLTHRU)
663 return 0;
664 return true;
665}
ca6c03ca 666\f
ef2be249 667/* Return the NOTE_INSN_BASIC_BLOCK of BB. */
ed270997 668rtx_note *
ef2be249
RS
669bb_note (basic_block bb)
670{
8879d71c 671 rtx_insn *note;
ef2be249
RS
672
673 note = BB_HEAD (bb);
674 if (LABEL_P (note))
675 note = NEXT_INSN (note);
676
677 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
ed270997 678 return as_a <rtx_note *> (note);
ef2be249
RS
679}
680
6fb5fa3c
DB
681/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
682 note associated with the BLOCK. */
683
8879d71c 684static rtx_insn *
6fb5fa3c
DB
685first_insn_after_basic_block_note (basic_block block)
686{
8879d71c 687 rtx_insn *insn;
6fb5fa3c
DB
688
689 /* Get the first instruction in the block. */
690 insn = BB_HEAD (block);
691
692 if (insn == NULL_RTX)
8879d71c 693 return NULL;
6fb5fa3c
DB
694 if (LABEL_P (insn))
695 insn = NEXT_INSN (insn);
696 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
697
698 return NEXT_INSN (insn);
699}
700
2b9c63a2
FY
701/* Creates a new basic block just after basic block BB by splitting
702 everything after specified instruction INSNP. */
ca6c03ca 703
f470c378 704static basic_block
d329e058 705rtl_split_block (basic_block bb, void *insnp)
ca6c03ca
JH
706{
707 basic_block new_bb;
8879d71c 708 rtx_insn *insn = (rtx_insn *) insnp;
f470c378 709 edge e;
628f6a4e 710 edge_iterator ei;
ca6c03ca 711
f470c378
ZD
712 if (!insn)
713 {
714 insn = first_insn_after_basic_block_note (bb);
715
716 if (insn)
b5b8b0ac 717 {
8879d71c 718 rtx_insn *next = insn;
b5b8b0ac
AO
719
720 insn = PREV_INSN (insn);
721
722 /* If the block contains only debug insns, insn would have
723 been NULL in a non-debug compilation, and then we'd end
724 up emitting a DELETED note. For -fcompare-debug
725 stability, emit the note too. */
726 if (insn != BB_END (bb)
727 && DEBUG_INSN_P (next)
728 && DEBUG_INSN_P (BB_END (bb)))
729 {
730 while (next != BB_END (bb) && DEBUG_INSN_P (next))
731 next = NEXT_INSN (next);
732
733 if (next == BB_END (bb))
734 emit_note_after (NOTE_INSN_DELETED, next);
735 }
736 }
f470c378
ZD
737 else
738 insn = get_last_insn ();
739 }
740
741 /* We probably should check type of the insn so that we do not create
742 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
743 bother. */
744 if (insn == BB_END (bb))
745 emit_note_after (NOTE_INSN_DELETED, insn);
ca6c03ca
JH
746
747 /* Create the new basic block. */
a813c111 748 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
076c7ab8 749 BB_COPY_PARTITION (new_bb, bb);
1130d5e3 750 BB_END (bb) = insn;
ca6c03ca
JH
751
752 /* Redirect the outgoing edges. */
628f6a4e
BE
753 new_bb->succs = bb->succs;
754 bb->succs = NULL;
755 FOR_EACH_EDGE (e, ei, new_bb->succs)
ca6c03ca
JH
756 e->src = new_bb;
757
6fb5fa3c
DB
758 /* The new block starts off being dirty. */
759 df_set_bb_dirty (bb);
f470c378 760 return new_bb;
bc35512f
JH
761}
762
9be94227
EB
763/* Return true if the single edge between blocks A and B is the only place
764 in RTL which holds some unique locus. */
765
766static bool
767unique_locus_on_edge_between_p (basic_block a, basic_block b)
768{
5368224f 769 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
8879d71c 770 rtx_insn *insn, *end;
9be94227 771
2f13f2de 772 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
9be94227
EB
773 return false;
774
775 /* First scan block A backward. */
776 insn = BB_END (a);
777 end = PREV_INSN (BB_HEAD (a));
5368224f 778 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
9be94227
EB
779 insn = PREV_INSN (insn);
780
5368224f 781 if (insn != end && INSN_LOCATION (insn) == goto_locus)
9be94227
EB
782 return false;
783
784 /* Then scan block B forward. */
785 insn = BB_HEAD (b);
786 if (insn)
787 {
788 end = NEXT_INSN (BB_END (b));
789 while (insn != end && !NONDEBUG_INSN_P (insn))
790 insn = NEXT_INSN (insn);
791
5368224f
DC
792 if (insn != end && INSN_HAS_LOCATION (insn)
793 && INSN_LOCATION (insn) == goto_locus)
9be94227
EB
794 return false;
795 }
796
797 return true;
798}
799
800/* If the single edge between blocks A and B is the only place in RTL which
801 holds some unique locus, emit a nop with that locus between the blocks. */
802
803static void
804emit_nop_for_unique_locus_between (basic_block a, basic_block b)
805{
806 if (!unique_locus_on_edge_between_p (a, b))
807 return;
808
1130d5e3 809 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
5368224f 810 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
9be94227
EB
811}
812
ca6c03ca 813/* Blocks A and B are to be merged into a single block A. The insns
bc35512f 814 are already contiguous. */
ca6c03ca 815
bc35512f
JH
816static void
817rtl_merge_blocks (basic_block a, basic_block b)
ca6c03ca 818{
8879d71c
DM
819 rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
820 rtx_insn *del_first = NULL, *del_last = NULL;
821 rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
50a36e42 822 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
ca6c03ca
JH
823 int b_empty = 0;
824
6fb5fa3c 825 if (dump_file)
50a36e42
EB
826 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
827 a->index);
6fb5fa3c 828
b5b8b0ac
AO
829 while (DEBUG_INSN_P (b_end))
830 b_end = PREV_INSN (b_debug_start = b_end);
831
ca6c03ca 832 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 833 if (LABEL_P (b_head))
ca6c03ca
JH
834 {
835 /* Detect basic blocks with nothing but a label. This can happen
836 in particular at the end of a function. */
837 if (b_head == b_end)
838 b_empty = 1;
5f0d2358 839
ca6c03ca
JH
840 del_first = del_last = b_head;
841 b_head = NEXT_INSN (b_head);
842 }
843
5f0d2358
RK
844 /* Delete the basic block note and handle blocks containing just that
845 note. */
ca6c03ca
JH
846 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
847 {
848 if (b_head == b_end)
849 b_empty = 1;
850 if (! del_last)
851 del_first = b_head;
5f0d2358 852
ca6c03ca
JH
853 del_last = b_head;
854 b_head = NEXT_INSN (b_head);
855 }
856
857 /* If there was a jump out of A, delete it. */
4b4bf941 858 if (JUMP_P (a_end))
ca6c03ca 859 {
8879d71c 860 rtx_insn *prev;
ca6c03ca
JH
861
862 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
4b4bf941 863 if (!NOTE_P (prev)
a38e7aa5 864 || NOTE_INSN_BASIC_BLOCK_P (prev)
a813c111 865 || prev == BB_HEAD (a))
ca6c03ca
JH
866 break;
867
868 del_first = a_end;
869
ca6c03ca
JH
870 /* If this was a conditional jump, we need to also delete
871 the insn that set cc0. */
058eb3b0 872 if (HAVE_cc0 && only_sets_cc0_p (prev))
ca6c03ca 873 {
8879d71c 874 rtx_insn *tmp = prev;
5f0d2358 875
ca6c03ca
JH
876 prev = prev_nonnote_insn (prev);
877 if (!prev)
a813c111 878 prev = BB_HEAD (a);
ca6c03ca
JH
879 del_first = tmp;
880 }
ca6c03ca
JH
881
882 a_end = PREV_INSN (del_first);
883 }
4b4bf941 884 else if (BARRIER_P (NEXT_INSN (a_end)))
ca6c03ca
JH
885 del_first = NEXT_INSN (a_end);
886
ca6c03ca
JH
887 /* Delete everything marked above as well as crap that might be
888 hanging out between the two blocks. */
1130d5e3
DM
889 BB_END (a) = a_end;
890 BB_HEAD (b) = b_empty ? NULL : b_head;
a7b87f73 891 delete_insn_chain (del_first, del_last, true);
ca6c03ca 892
8a829274 893 /* When not optimizing and the edge is the only place in RTL which holds
9be94227
EB
894 some unique locus, emit a nop with that locus in between. */
895 if (!optimize)
896 {
897 emit_nop_for_unique_locus_between (a, b);
898 a_end = BB_END (a);
899 }
900
ca6c03ca
JH
901 /* Reassociate the insns of B with A. */
902 if (!b_empty)
903 {
b5b8b0ac 904 update_bb_for_insn_chain (a_end, b_debug_end, a);
5f0d2358 905
1130d5e3
DM
906 BB_END (a) = b_debug_end;
907 BB_HEAD (b) = NULL;
b5b8b0ac
AO
908 }
909 else if (b_end != b_debug_end)
910 {
911 /* Move any deleted labels and other notes between the end of A
912 and the debug insns that make up B after the debug insns,
913 bringing the debug insns into A while keeping the notes after
914 the end of A. */
915 if (NEXT_INSN (a_end) != b_debug_start)
916 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
917 b_debug_end);
918 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
1130d5e3 919 BB_END (a) = b_debug_end;
ca6c03ca 920 }
5f0d2358 921
6fb5fa3c 922 df_bb_delete (b->index);
50a36e42
EB
923
924 /* If B was a forwarder block, propagate the locus on the edge. */
fbc68f2a
DC
925 if (forwarder_p
926 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
50a36e42
EB
927 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
928
929 if (dump_file)
930 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
ca6c03ca 931}
bc35512f 932
6fb5fa3c 933
bc35512f 934/* Return true when block A and B can be merged. */
9678086d 935
b48d0358
DN
936static bool
937rtl_can_merge_blocks (basic_block a, basic_block b)
bc35512f 938{
750054a2
CT
939 /* If we are partitioning hot/cold basic blocks, we don't want to
940 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
941 and cold sections.
942
8e8d5162 943 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
944 be optimizable (or blocks that appear to be mergeable), but which really
945 must be left untouched (they are required to make it safely across
946 partition boundaries). See the comments at the top of
8e8d5162 947 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
076c7ab8 948
87c8b4be 949 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 950 return false;
750054a2 951
7d776ee2
RG
952 /* Protect the loop latches. */
953 if (current_loops && b->loop_father->latch == b)
954 return false;
955
bc35512f 956 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
957 return (single_succ_p (a)
958 && single_succ (a) == b
959 && single_pred_p (b)
628f6a4e 960 && a != b
bc35512f 961 /* Must be simple edge. */
c5cbcccf 962 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
bc35512f 963 && a->next_bb == b
fefa31b5
DM
964 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
965 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
bc35512f
JH
966 /* If the jump insn has side effects,
967 we can't kill the edge. */
4b4bf941 968 && (!JUMP_P (BB_END (a))
e24e7211 969 || (reload_completed
a813c111 970 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f 971}
ca6c03ca 972\f
5f0d2358
RK
973/* Return the label in the head of basic block BLOCK. Create one if it doesn't
974 exist. */
ca6c03ca 975
1476d1bd 976rtx_code_label *
d329e058 977block_label (basic_block block)
ca6c03ca 978{
fefa31b5 979 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
1476d1bd 980 return NULL;
5f0d2358 981
4b4bf941 982 if (!LABEL_P (BB_HEAD (block)))
ca6c03ca 983 {
1130d5e3 984 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
ca6c03ca 985 }
5f0d2358 986
1476d1bd 987 return as_a <rtx_code_label *> (BB_HEAD (block));
ca6c03ca
JH
988}
989
990/* Attempt to perform edge redirection by replacing possibly complex jump
5f0d2358
RK
991 instruction by unconditional jump or removing jump completely. This can
992 apply only if all edges now point to the same block. The parameters and
993 return values are equivalent to redirect_edge_and_branch. */
ca6c03ca 994
6de9cd9a 995edge
bc35512f 996try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
ca6c03ca
JH
997{
998 basic_block src = e->src;
8879d71c 999 rtx_insn *insn = BB_END (src), *kill_from;
e1233a7d 1000 rtx set;
ca6c03ca 1001 int fallthru = 0;
750054a2
CT
1002
1003 /* If we are partitioning hot/cold basic blocks, we don't want to
1004 mess up unconditional or indirect jumps that cross between hot
8e8d5162
CT
1005 and cold sections.
1006
1007 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
1008 be optimizable (or blocks that appear to be mergeable), but which really
1009 must be left untouched (they are required to make it safely across
1010 partition boundaries). See the comments at the top of
8e8d5162 1011 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
c22cacf3 1012
3371a64f 1013 if (BB_PARTITION (src) != BB_PARTITION (target))
9cf84a3c 1014 return NULL;
750054a2 1015
6a66a8a7
KH
1016 /* We can replace or remove a complex jump only when we have exactly
1017 two edges. Also, if we have exactly one outgoing edge, we can
1018 redirect that. */
1019 if (EDGE_COUNT (src->succs) >= 3
1020 /* Verify that all targets will be TARGET. Specifically, the
1021 edge that is not E must also go to TARGET. */
1022 || (EDGE_COUNT (src->succs) == 2
1023 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1024 return NULL;
5f0d2358 1025
6a66a8a7 1026 if (!onlyjump_p (insn))
6de9cd9a 1027 return NULL;
3348b696 1028 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
6de9cd9a 1029 return NULL;
ca6c03ca
JH
1030
1031 /* Avoid removing branch with side effects. */
1032 set = single_set (insn);
1033 if (!set || side_effects_p (set))
6de9cd9a 1034 return NULL;
ca6c03ca
JH
1035
1036 /* In case we zap a conditional jump, we'll need to kill
1037 the cc0 setter too. */
1038 kill_from = insn;
058eb3b0 1039 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn))
9caea4a7 1040 && only_sets_cc0_p (PREV_INSN (insn)))
ca6c03ca 1041 kill_from = PREV_INSN (insn);
ca6c03ca
JH
1042
1043 /* See if we can create the fallthru edge. */
bc35512f 1044 if (in_cfglayout || can_fallthru (src, target))
ca6c03ca 1045 {
c263766c
RH
1046 if (dump_file)
1047 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
ca6c03ca
JH
1048 fallthru = 1;
1049
eaec9b3d 1050 /* Selectively unlink whole insn chain. */
bc35512f
JH
1051 if (in_cfglayout)
1052 {
8879d71c 1053 rtx_insn *insn = BB_FOOTER (src);
bc35512f 1054
a7b87f73 1055 delete_insn_chain (kill_from, BB_END (src), false);
bc35512f
JH
1056
1057 /* Remove barriers but keep jumptables. */
1058 while (insn)
1059 {
4b4bf941 1060 if (BARRIER_P (insn))
bc35512f
JH
1061 {
1062 if (PREV_INSN (insn))
0f82e5c9 1063 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
bc35512f 1064 else
d8ce2eae 1065 BB_FOOTER (src) = NEXT_INSN (insn);
bc35512f 1066 if (NEXT_INSN (insn))
0f82e5c9 1067 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
bc35512f 1068 }
4b4bf941 1069 if (LABEL_P (insn))
bc35512f
JH
1070 break;
1071 insn = NEXT_INSN (insn);
1072 }
1073 }
1074 else
a7b87f73
ZD
1075 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1076 false);
ca6c03ca 1077 }
5f0d2358 1078
ca6c03ca
JH
1079 /* If this already is simplejump, redirect it. */
1080 else if (simplejump_p (insn))
1081 {
1082 if (e->dest == target)
6de9cd9a 1083 return NULL;
c263766c
RH
1084 if (dump_file)
1085 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
0b17ab2f 1086 INSN_UID (insn), e->dest->index, target->index);
1476d1bd
MM
1087 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1088 block_label (target), 0))
6ee3c8e4 1089 {
fefa31b5 1090 gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
341c100f 1091 return NULL;
6ee3c8e4 1092 }
ca6c03ca 1093 }
5f0d2358 1094
6ee3c8e4 1095 /* Cannot do anything for target exit block. */
fefa31b5 1096 else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
6de9cd9a 1097 return NULL;
6ee3c8e4 1098
ca6c03ca
JH
1099 /* Or replace possibly complicated jump insn by simple jump insn. */
1100 else
1101 {
e67d1102 1102 rtx_code_label *target_label = block_label (target);
8879d71c 1103 rtx_insn *barrier;
dfe08bc4 1104 rtx_insn *label;
8942ee0f 1105 rtx_jump_table_data *table;
ca6c03ca 1106
ec4a505f 1107 emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
a813c111 1108 JUMP_LABEL (BB_END (src)) = target_label;
ca6c03ca 1109 LABEL_NUSES (target_label)++;
c263766c
RH
1110 if (dump_file)
1111 fprintf (dump_file, "Replacing insn %i by jump %i\n",
a813c111 1112 INSN_UID (insn), INSN_UID (BB_END (src)));
ca6c03ca 1113
4da2eb6b 1114
ba4807a0
LB
1115 delete_insn_chain (kill_from, insn, false);
1116
4da2eb6b
RH
1117 /* Recognize a tablejump that we are converting to a
1118 simple jump and remove its associated CODE_LABEL
1119 and ADDR_VEC or ADDR_DIFF_VEC. */
ba4807a0
LB
1120 if (tablejump_p (insn, &label, &table))
1121 delete_insn_chain (label, table, false);
eb5b8ad4 1122
a813c111 1123 barrier = next_nonnote_insn (BB_END (src));
4b4bf941 1124 if (!barrier || !BARRIER_P (barrier))
a813c111 1125 emit_barrier_after (BB_END (src));
5d693491
JZ
1126 else
1127 {
a813c111 1128 if (barrier != NEXT_INSN (BB_END (src)))
5d693491
JZ
1129 {
1130 /* Move the jump before barrier so that the notes
1131 which originally were or were created before jump table are
1132 inside the basic block. */
8879d71c 1133 rtx_insn *new_insn = BB_END (src);
5d693491 1134
6c3d0e31
SP
1135 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1136 PREV_INSN (barrier), src);
5d693491 1137
0f82e5c9
DM
1138 SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1139 SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
5d693491 1140
0f82e5c9
DM
1141 SET_NEXT_INSN (new_insn) = barrier;
1142 SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
5d693491 1143
0f82e5c9
DM
1144 SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1145 SET_PREV_INSN (barrier) = new_insn;
5d693491
JZ
1146 }
1147 }
ca6c03ca
JH
1148 }
1149
1150 /* Keep only one edge out and set proper flags. */
c5cbcccf 1151 if (!single_succ_p (src))
628f6a4e 1152 remove_edge (e);
c5cbcccf 1153 gcc_assert (single_succ_p (src));
628f6a4e 1154
c5cbcccf 1155 e = single_succ_edge (src);
ca6c03ca
JH
1156 if (fallthru)
1157 e->flags = EDGE_FALLTHRU;
1158 else
1159 e->flags = 0;
5f0d2358 1160
ca6c03ca
JH
1161 e->probability = REG_BR_PROB_BASE;
1162 e->count = src->count;
1163
ca6c03ca
JH
1164 if (e->dest != target)
1165 redirect_edge_succ (e, target);
6de9cd9a 1166 return e;
ca6c03ca
JH
1167}
1168
4e3825db
MM
1169/* Subroutine of redirect_branch_edge that tries to patch the jump
1170 instruction INSN so that it reaches block NEW. Do this
1171 only when it originally reached block OLD. Return true if this
1172 worked or the original target wasn't OLD, return false if redirection
1173 doesn't work. */
1174
1175static bool
8879d71c 1176patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
ca6c03ca 1177{
8942ee0f 1178 rtx_jump_table_data *table;
ca6c03ca 1179 rtx tmp;
ca6c03ca 1180 /* Recognize a tablejump and adjust all matching cases. */
8942ee0f 1181 if (tablejump_p (insn, NULL, &table))
ca6c03ca
JH
1182 {
1183 rtvec vec;
1184 int j;
e67d1102 1185 rtx_code_label *new_label = block_label (new_bb);
ca6c03ca 1186
fefa31b5 1187 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4e3825db 1188 return false;
95c43227 1189 vec = table->get_labels ();
ca6c03ca
JH
1190
1191 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1192 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1193 {
1194 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1195 --LABEL_NUSES (old_label);
1196 ++LABEL_NUSES (new_label);
1197 }
1198
f9da5064 1199 /* Handle casesi dispatch insns. */
ca6c03ca
JH
1200 if ((tmp = single_set (insn)) != NULL
1201 && SET_DEST (tmp) == pc_rtx
1202 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1203 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
04a121a7 1204 && label_ref_label (XEXP (SET_SRC (tmp), 2)) == old_label)
ca6c03ca 1205 {
4c33cb26 1206 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
ca6c03ca
JH
1207 new_label);
1208 --LABEL_NUSES (old_label);
1209 ++LABEL_NUSES (new_label);
1210 }
1211 }
1c384bf1
RH
1212 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1213 {
1214 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
e67d1102 1215 rtx note;
1c384bf1 1216
fefa31b5 1217 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1c384bf1 1218 return false;
e67d1102 1219 rtx_code_label *new_label = block_label (new_bb);
1c384bf1
RH
1220
1221 for (i = 0; i < n; ++i)
1222 {
1223 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1224 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1225 if (XEXP (old_ref, 0) == old_label)
1226 {
1227 ASM_OPERANDS_LABEL (tmp, i)
1228 = gen_rtx_LABEL_REF (Pmode, new_label);
1229 --LABEL_NUSES (old_label);
1230 ++LABEL_NUSES (new_label);
1231 }
1232 }
1233
1234 if (JUMP_LABEL (insn) == old_label)
1235 {
1236 JUMP_LABEL (insn) = new_label;
1237 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1238 if (note)
1239 remove_note (insn, note);
1240 }
1241 else
1242 {
1243 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1244 if (note)
1245 remove_note (insn, note);
1246 if (JUMP_LABEL (insn) != new_label
1247 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1248 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1249 }
3b5fda81
JJ
1250 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1251 != NULL_RTX)
1252 XEXP (note, 0) = new_label;
1c384bf1 1253 }
ca6c03ca
JH
1254 else
1255 {
1256 /* ?? We may play the games with moving the named labels from
1257 one basic block to the other in case only one computed_jump is
1258 available. */
5f0d2358
RK
1259 if (computed_jump_p (insn)
1260 /* A return instruction can't be redirected. */
1261 || returnjump_p (insn))
4e3825db 1262 return false;
6ee3c8e4 1263
4e3825db 1264 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
6ee3c8e4 1265 {
4e3825db
MM
1266 /* If the insn doesn't go where we think, we're confused. */
1267 gcc_assert (JUMP_LABEL (insn) == old_label);
1268
1269 /* If the substitution doesn't succeed, die. This can happen
1270 if the back end emitted unrecognizable instructions or if
1271 target is exit block on some arches. */
1476d1bd
MM
1272 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1273 block_label (new_bb), 0))
4e3825db 1274 {
fefa31b5 1275 gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun));
4e3825db
MM
1276 return false;
1277 }
6ee3c8e4 1278 }
ca6c03ca 1279 }
4e3825db
MM
1280 return true;
1281}
1282
1283
1284/* Redirect edge representing branch of (un)conditional jump or tablejump,
1285 NULL on failure */
1286static edge
1287redirect_branch_edge (edge e, basic_block target)
1288{
8879d71c 1289 rtx_insn *old_label = BB_HEAD (e->dest);
4e3825db 1290 basic_block src = e->src;
8879d71c 1291 rtx_insn *insn = BB_END (src);
4e3825db
MM
1292
1293 /* We can only redirect non-fallthru edges of jump insn. */
1294 if (e->flags & EDGE_FALLTHRU)
1295 return NULL;
1296 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1297 return NULL;
1298
1299 if (!currently_expanding_to_rtl)
1300 {
1476d1bd 1301 if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
4e3825db
MM
1302 return NULL;
1303 }
1304 else
1305 /* When expanding this BB might actually contain multiple
1306 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1307 Redirect all of those that match our label. */
0f346928 1308 FOR_BB_INSNS (src, insn)
1476d1bd
MM
1309 if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
1310 old_label, target))
4e3825db 1311 return NULL;
ca6c03ca 1312
c263766c
RH
1313 if (dump_file)
1314 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
0b17ab2f 1315 e->src->index, e->dest->index, target->index);
5f0d2358 1316
ca6c03ca 1317 if (e->dest != target)
6de9cd9a 1318 e = redirect_edge_succ_nodup (e, target);
6fb5fa3c 1319
6de9cd9a 1320 return e;
bc35512f
JH
1321}
1322
3371a64f
TJ
1323/* Called when edge E has been redirected to a new destination,
1324 in order to update the region crossing flag on the edge and
1325 jump. */
1326
1327static void
1328fixup_partition_crossing (edge e)
1329{
fefa31b5
DM
1330 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1331 == EXIT_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1332 return;
1333 /* If we redirected an existing edge, it may already be marked
1334 crossing, even though the new src is missing a reg crossing note.
1335 But make sure reg crossing note doesn't already exist before
1336 inserting. */
1337 if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1338 {
1339 e->flags |= EDGE_CROSSING;
3371a64f 1340 if (JUMP_P (BB_END (e->src))
339ba33b
RS
1341 && !CROSSING_JUMP_P (BB_END (e->src)))
1342 CROSSING_JUMP_P (BB_END (e->src)) = 1;
3371a64f
TJ
1343 }
1344 else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1345 {
1346 e->flags &= ~EDGE_CROSSING;
1347 /* Remove the section crossing note from jump at end of
1348 src if it exists, and if no other successors are
1349 still crossing. */
339ba33b 1350 if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
3371a64f
TJ
1351 {
1352 bool has_crossing_succ = false;
1353 edge e2;
1354 edge_iterator ei;
1355 FOR_EACH_EDGE (e2, ei, e->src->succs)
1356 {
1357 has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1358 if (has_crossing_succ)
1359 break;
1360 }
1361 if (!has_crossing_succ)
339ba33b 1362 CROSSING_JUMP_P (BB_END (e->src)) = 0;
3371a64f
TJ
1363 }
1364 }
1365}
1366
600b5b1d
TJ
1367/* Called when block BB has been reassigned to the cold partition,
1368 because it is now dominated by another cold block,
1369 to ensure that the region crossing attributes are updated. */
1370
1371static void
1372fixup_new_cold_bb (basic_block bb)
1373{
1374 edge e;
1375 edge_iterator ei;
1376
1377 /* This is called when a hot bb is found to now be dominated
1378 by a cold bb and therefore needs to become cold. Therefore,
1379 its preds will no longer be region crossing. Any non-dominating
1380 preds that were previously hot would also have become cold
1381 in the caller for the same region. Any preds that were previously
1382 region-crossing will be adjusted in fixup_partition_crossing. */
1383 FOR_EACH_EDGE (e, ei, bb->preds)
1384 {
1385 fixup_partition_crossing (e);
1386 }
1387
1388 /* Possibly need to make bb's successor edges region crossing,
1389 or remove stale region crossing. */
1390 FOR_EACH_EDGE (e, ei, bb->succs)
1391 {
1392 /* We can't have fall-through edges across partition boundaries.
1393 Note that force_nonfallthru will do any necessary partition
1394 boundary fixup by calling fixup_partition_crossing itself. */
1395 if ((e->flags & EDGE_FALLTHRU)
1396 && BB_PARTITION (bb) != BB_PARTITION (e->dest)
fefa31b5 1397 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
600b5b1d
TJ
1398 force_nonfallthru (e);
1399 else
1400 fixup_partition_crossing (e);
1401 }
1402}
1403
bc35512f
JH
1404/* Attempt to change code to redirect edge E to TARGET. Don't do that on
1405 expense of adding new instructions or reordering basic blocks.
1406
1407 Function can be also called with edge destination equivalent to the TARGET.
1408 Then it should try the simplifications and do nothing if none is possible.
1409
6de9cd9a
DN
1410 Return edge representing the branch if transformation succeeded. Return NULL
1411 on failure.
1412 We still return NULL in case E already destinated TARGET and we didn't
1413 managed to simplify instruction stream. */
bc35512f 1414
6de9cd9a 1415static edge
5671bf27 1416rtl_redirect_edge_and_branch (edge e, basic_block target)
bc35512f 1417{
6de9cd9a 1418 edge ret;
f345f21a 1419 basic_block src = e->src;
3371a64f 1420 basic_block dest = e->dest;
f345f21a 1421
bc35512f 1422 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 1423 return NULL;
bc35512f 1424
3371a64f 1425 if (dest == target)
6de9cd9a 1426 return e;
3348b696 1427
6de9cd9a 1428 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
f345f21a 1429 {
6fb5fa3c 1430 df_set_bb_dirty (src);
3371a64f 1431 fixup_partition_crossing (ret);
6de9cd9a 1432 return ret;
f345f21a 1433 }
bc35512f 1434
6de9cd9a
DN
1435 ret = redirect_branch_edge (e, target);
1436 if (!ret)
1437 return NULL;
5f0d2358 1438
6fb5fa3c 1439 df_set_bb_dirty (src);
3371a64f 1440 fixup_partition_crossing (ret);
6de9cd9a 1441 return ret;
ca6c03ca
JH
1442}
1443
3371a64f
TJ
1444/* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1445
1446void
1447emit_barrier_after_bb (basic_block bb)
1448{
cd459bf8 1449 rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
c3284718 1450 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
3371a64f
TJ
1451 || current_ir_type () == IR_RTL_CFGLAYOUT);
1452 if (current_ir_type () == IR_RTL_CFGLAYOUT)
0fb3402f
RX
1453 {
1454 rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1455
1456 if (BB_FOOTER (bb))
1457 {
1458 rtx_insn *footer_tail = BB_FOOTER (bb);
1459
1460 while (NEXT_INSN (footer_tail))
1461 footer_tail = NEXT_INSN (footer_tail);
1462 if (!BARRIER_P (footer_tail))
1463 {
1464 SET_NEXT_INSN (footer_tail) = insn;
1465 SET_PREV_INSN (insn) = footer_tail;
1466 }
1467 }
1468 else
1469 BB_FOOTER (bb) = insn;
1470 }
3371a64f
TJ
1471}
1472
4fe9b91c 1473/* Like force_nonfallthru below, but additionally performs redirection
26898771
BS
1474 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1475 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1476 simple_return_rtx, indicating which kind of returnjump to create.
1477 It should be NULL otherwise. */
ca6c03ca 1478
dc0ff1c8 1479basic_block
26898771 1480force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
ca6c03ca 1481{
a3716585 1482 basic_block jump_block, new_bb = NULL, src = e->src;
ca6c03ca
JH
1483 rtx note;
1484 edge new_edge;
a3716585 1485 int abnormal_edge_flags = 0;
a3e6a37b 1486 bool asm_goto_edge = false;
7241571e 1487 int loc;
ca6c03ca 1488
cb9a1d9b
JH
1489 /* In the case the last instruction is conditional jump to the next
1490 instruction, first redirect the jump itself and then continue
b20b352b 1491 by creating a basic block afterwards to redirect fallthru edge. */
fefa31b5
DM
1492 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1493 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
a813c111 1494 && any_condjump_p (BB_END (e->src))
a813c111 1495 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
cb9a1d9b
JH
1496 {
1497 rtx note;
58e6ae30 1498 edge b = unchecked_make_edge (e->src, target, 0);
341c100f 1499 bool redirected;
cb9a1d9b 1500
1476d1bd
MM
1501 redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
1502 block_label (target), 0);
341c100f 1503 gcc_assert (redirected);
c22cacf3 1504
a813c111 1505 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
cb9a1d9b
JH
1506 if (note)
1507 {
e5af9ddd 1508 int prob = XINT (note, 0);
cb9a1d9b
JH
1509
1510 b->probability = prob;
8ddb5a29 1511 /* Update this to use GCOV_COMPUTE_SCALE. */
cb9a1d9b
JH
1512 b->count = e->count * prob / REG_BR_PROB_BASE;
1513 e->probability -= e->probability;
1514 e->count -= b->count;
1515 if (e->probability < 0)
1516 e->probability = 0;
1517 if (e->count < 0)
1518 e->count = 0;
1519 }
1520 }
1521
ca6c03ca 1522 if (e->flags & EDGE_ABNORMAL)
a3716585
JH
1523 {
1524 /* Irritating special case - fallthru edge to the same block as abnormal
1525 edge.
1526 We can't redirect abnormal edge, but we still can split the fallthru
d329e058 1527 one and create separate abnormal edge to original destination.
a3716585 1528 This allows bb-reorder to make such edge non-fallthru. */
341c100f 1529 gcc_assert (e->dest == target);
ee44c28d
SB
1530 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1531 e->flags &= EDGE_FALLTHRU;
a3716585 1532 }
341c100f 1533 else
24c545ff 1534 {
341c100f 1535 gcc_assert (e->flags & EDGE_FALLTHRU);
fefa31b5 1536 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
341c100f
NS
1537 {
1538 /* We can't redirect the entry block. Create an empty block
628f6a4e
BE
1539 at the start of the function which we use to add the new
1540 jump. */
1541 edge tmp;
1542 edge_iterator ei;
1543 bool found = false;
c22cacf3 1544
fefa31b5
DM
1545 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1546 ENTRY_BLOCK_PTR_FOR_FN (cfun));
c22cacf3 1547
341c100f
NS
1548 /* Change the existing edge's source to be the new block, and add
1549 a new edge from the entry block to the new block. */
1550 e->src = bb;
fefa31b5
DM
1551 for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1552 (tmp = ei_safe_edge (ei)); )
628f6a4e
BE
1553 {
1554 if (tmp == e)
1555 {
fefa31b5 1556 ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
628f6a4e
BE
1557 found = true;
1558 break;
1559 }
1560 else
1561 ei_next (&ei);
1562 }
c22cacf3 1563
628f6a4e 1564 gcc_assert (found);
c22cacf3 1565
9771b263 1566 vec_safe_push (bb->succs, e);
fefa31b5
DM
1567 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1568 EDGE_FALLTHRU);
341c100f 1569 }
24c545ff
BS
1570 }
1571
a3e6a37b 1572 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
891ca07d 1573 don't point to the target or fallthru label. */
a3e6a37b 1574 if (JUMP_P (BB_END (e->src))
fefa31b5 1575 && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
a3e6a37b
JJ
1576 && (e->flags & EDGE_FALLTHRU)
1577 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
ca6c03ca 1578 {
a3e6a37b 1579 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
5d68b7e6 1580 bool adjust_jump_target = false;
a3e6a37b
JJ
1581
1582 for (i = 0; i < n; ++i)
891ca07d
JJ
1583 {
1584 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
5d68b7e6
JJ
1585 {
1586 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1587 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1588 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1589 adjust_jump_target = true;
1590 }
891ca07d 1591 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
a3e6a37b 1592 asm_goto_edge = true;
891ca07d 1593 }
5d68b7e6
JJ
1594 if (adjust_jump_target)
1595 {
8879d71c
DM
1596 rtx_insn *insn = BB_END (e->src);
1597 rtx note;
1598 rtx_insn *old_label = BB_HEAD (e->dest);
1599 rtx_insn *new_label = BB_HEAD (target);
5d68b7e6
JJ
1600
1601 if (JUMP_LABEL (insn) == old_label)
1602 {
1603 JUMP_LABEL (insn) = new_label;
1604 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1605 if (note)
1606 remove_note (insn, note);
1607 }
1608 else
1609 {
1610 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1611 if (note)
1612 remove_note (insn, note);
1613 if (JUMP_LABEL (insn) != new_label
1614 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1615 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1616 }
1617 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1618 != NULL_RTX)
1619 XEXP (note, 0) = new_label;
1620 }
a3e6a37b
JJ
1621 }
1622
1623 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1624 {
dc01c3d1 1625 rtx_insn *new_head;
a3e6a37b
JJ
1626 gcov_type count = e->count;
1627 int probability = e->probability;
ca6c03ca 1628 /* Create the new structures. */
31a78298 1629
79019985
RH
1630 /* If the old block ended with a tablejump, skip its table
1631 by searching forward from there. Otherwise start searching
1632 forward from the last instruction of the old block. */
8942ee0f
DM
1633 rtx_jump_table_data *table;
1634 if (tablejump_p (BB_END (e->src), NULL, &table))
dc01c3d1 1635 new_head = table;
8942ee0f 1636 else
dc01c3d1
DM
1637 new_head = BB_END (e->src);
1638 new_head = NEXT_INSN (new_head);
31a78298 1639
dc01c3d1 1640 jump_block = create_basic_block (new_head, NULL, e->src);
a3e6a37b 1641 jump_block->count = count;
ca6c03ca 1642 jump_block->frequency = EDGE_FREQUENCY (e);
ca6c03ca 1643
750054a2
CT
1644 /* Make sure new block ends up in correct hot/cold section. */
1645
076c7ab8 1646 BB_COPY_PARTITION (jump_block, e->src);
c22cacf3 1647
ca6c03ca
JH
1648 /* Wire edge in. */
1649 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
a3e6a37b
JJ
1650 new_edge->probability = probability;
1651 new_edge->count = count;
ca6c03ca
JH
1652
1653 /* Redirect old edge. */
1654 redirect_edge_pred (e, jump_block);
1655 e->probability = REG_BR_PROB_BASE;
1656
3371a64f
TJ
1657 /* If e->src was previously region crossing, it no longer is
1658 and the reg crossing note should be removed. */
1659 fixup_partition_crossing (new_edge);
1660
a3e6a37b
JJ
1661 /* If asm goto has any label refs to target's label,
1662 add also edge from asm goto bb to target. */
1663 if (asm_goto_edge)
1664 {
1665 new_edge->probability /= 2;
1666 new_edge->count /= 2;
1667 jump_block->count /= 2;
1668 jump_block->frequency /= 2;
1669 new_edge = make_edge (new_edge->src, target,
1670 e->flags & ~EDGE_FALLTHRU);
1671 new_edge->probability = probability - probability / 2;
1672 new_edge->count = count - count / 2;
1673 }
1674
ca6c03ca
JH
1675 new_bb = jump_block;
1676 }
1677 else
1678 jump_block = e->src;
5f0d2358 1679
2f13f2de 1680 loc = e->goto_locus;
ca6c03ca 1681 e->flags &= ~EDGE_FALLTHRU;
fefa31b5 1682 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
ca6c03ca 1683 {
26898771 1684 if (jump_label == ret_rtx)
4bccb39e
RS
1685 emit_jump_insn_after_setloc (targetm.gen_return (),
1686 BB_END (jump_block), loc);
26898771
BS
1687 else
1688 {
1689 gcc_assert (jump_label == simple_return_rtx);
4bccb39e 1690 emit_jump_insn_after_setloc (targetm.gen_simple_return (),
26898771 1691 BB_END (jump_block), loc);
26898771 1692 }
387748de 1693 set_return_jump_label (BB_END (jump_block));
ca6c03ca
JH
1694 }
1695 else
1696 {
e67d1102 1697 rtx_code_label *label = block_label (target);
ec4a505f
RS
1698 emit_jump_insn_after_setloc (targetm.gen_jump (label),
1699 BB_END (jump_block), loc);
a813c111 1700 JUMP_LABEL (BB_END (jump_block)) = label;
ca6c03ca
JH
1701 LABEL_NUSES (label)++;
1702 }
5f0d2358 1703
3371a64f
TJ
1704 /* We might be in cfg layout mode, and if so, the following routine will
1705 insert the barrier correctly. */
1706 emit_barrier_after_bb (jump_block);
ca6c03ca
JH
1707 redirect_edge_succ_nodup (e, target);
1708
a3716585
JH
1709 if (abnormal_edge_flags)
1710 make_edge (src, target, abnormal_edge_flags);
1711
b8698a0f 1712 df_mark_solutions_dirty ();
3371a64f 1713 fixup_partition_crossing (e);
ca6c03ca
JH
1714 return new_bb;
1715}
1716
1717/* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1718 (and possibly create new basic block) to make edge non-fallthru.
1719 Return newly created BB or NULL if none. */
5f0d2358 1720
cf103ca4
EB
1721static basic_block
1722rtl_force_nonfallthru (edge e)
ca6c03ca 1723{
26898771 1724 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
ca6c03ca
JH
1725}
1726
1727/* Redirect edge even at the expense of creating new jump insn or
1728 basic block. Return new basic block if created, NULL otherwise.
41806d92 1729 Conversion must be possible. */
ca6c03ca 1730
9ee634e3 1731static basic_block
d329e058 1732rtl_redirect_edge_and_branch_force (edge e, basic_block target)
ca6c03ca 1733{
5f0d2358
RK
1734 if (redirect_edge_and_branch (e, target)
1735 || e->dest == target)
ca6c03ca
JH
1736 return NULL;
1737
1738 /* In case the edge redirection failed, try to force it to be non-fallthru
1739 and redirect newly created simplejump. */
6fb5fa3c 1740 df_set_bb_dirty (e->src);
26898771 1741 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
ca6c03ca
JH
1742}
1743
1744/* The given edge should potentially be a fallthru edge. If that is in
1745 fact true, delete the jump and barriers that are in the way. */
1746
f470c378
ZD
1747static void
1748rtl_tidy_fallthru_edge (edge e)
ca6c03ca 1749{
8879d71c 1750 rtx_insn *q;
f470c378
ZD
1751 basic_block b = e->src, c = b->next_bb;
1752
ca6c03ca
JH
1753 /* ??? In a late-running flow pass, other folks may have deleted basic
1754 blocks by nopping out blocks, leaving multiple BARRIERs between here
0fa2e4df 1755 and the target label. They ought to be chastised and fixed.
ca6c03ca
JH
1756
1757 We can also wind up with a sequence of undeletable labels between
1758 one block and the next.
1759
1760 So search through a sequence of barriers, labels, and notes for
1761 the head of block C and assert that we really do fall through. */
1762
a813c111 1763 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
9c0a0632
RH
1764 if (INSN_P (q))
1765 return;
ca6c03ca
JH
1766
1767 /* Remove what will soon cease being the jump insn from the source block.
1768 If block B consisted only of this single jump, turn it into a deleted
1769 note. */
a813c111 1770 q = BB_END (b);
4b4bf941 1771 if (JUMP_P (q)
ca6c03ca
JH
1772 && onlyjump_p (q)
1773 && (any_uncondjump_p (q)
c5cbcccf 1774 || single_succ_p (b)))
ca6c03ca 1775 {
dfe08bc4 1776 rtx_insn *label;
adfac8df
JJ
1777 rtx_jump_table_data *table;
1778
1779 if (tablejump_p (q, &label, &table))
1780 {
1781 /* The label is likely mentioned in some instruction before
1782 the tablejump and might not be DCEd, so turn it into
1783 a note instead and move before the tablejump that is going to
1784 be deleted. */
1785 const char *name = LABEL_NAME (label);
1786 PUT_CODE (label, NOTE);
1787 NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1788 NOTE_DELETED_LABEL_NAME (label) = name;
dfe08bc4 1789 reorder_insns (label, label, PREV_INSN (q));
adfac8df
JJ
1790 delete_insn (table);
1791 }
1792
ca6c03ca
JH
1793 /* If this was a conditional jump, we need to also delete
1794 the insn that set cc0. */
058eb3b0 1795 if (HAVE_cc0 && any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
ca6c03ca 1796 q = PREV_INSN (q);
ca6c03ca
JH
1797
1798 q = PREV_INSN (q);
ca6c03ca
JH
1799 }
1800
1801 /* Selectively unlink the sequence. */
a813c111 1802 if (q != PREV_INSN (BB_HEAD (c)))
a7b87f73 1803 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
ca6c03ca
JH
1804
1805 e->flags |= EDGE_FALLTHRU;
1806}
ca6c03ca 1807\f
f470c378
ZD
1808/* Should move basic block BB after basic block AFTER. NIY. */
1809
1810static bool
1811rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1812 basic_block after ATTRIBUTE_UNUSED)
1813{
1814 return false;
1815}
1816
3371a64f
TJ
1817/* Locate the last bb in the same partition as START_BB. */
1818
1819static basic_block
1820last_bb_in_partition (basic_block start_bb)
1821{
1822 basic_block bb;
fefa31b5 1823 FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
3371a64f
TJ
1824 {
1825 if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1826 return bb;
1827 }
6626665f 1828 /* Return bb before the exit block. */
3371a64f
TJ
1829 return bb->prev_bb;
1830}
1831
ca6c03ca 1832/* Split a (typically critical) edge. Return the new block.
41806d92 1833 The edge must not be abnormal.
ca6c03ca
JH
1834
1835 ??? The code generally expects to be called on critical edges.
1836 The case of a block ending in an unconditional jump to a
1837 block with multiple predecessors is not handled optimally. */
1838
8ce33230 1839static basic_block
d329e058 1840rtl_split_edge (edge edge_in)
ca6c03ca 1841{
3371a64f 1842 basic_block bb, new_bb;
8879d71c 1843 rtx_insn *before;
ca6c03ca
JH
1844
1845 /* Abnormal edges cannot be split. */
341c100f 1846 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
ca6c03ca
JH
1847
1848 /* We are going to place the new block in front of edge destination.
eaec9b3d 1849 Avoid existence of fallthru predecessors. */
ca6c03ca
JH
1850 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1851 {
0fd4b31d 1852 edge e = find_fallthru_edge (edge_in->dest->preds);
ca6c03ca
JH
1853
1854 if (e)
1855 force_nonfallthru (e);
1856 }
1857
96e82e0a 1858 /* Create the basic block note. */
fefa31b5 1859 if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
a813c111 1860 before = BB_HEAD (edge_in->dest);
ca6c03ca 1861 else
8879d71c 1862 before = NULL;
ca6c03ca 1863
623a66fa 1864 /* If this is a fall through edge to the exit block, the blocks might be
ffe14686 1865 not adjacent, and the right place is after the source. */
fefa31b5
DM
1866 if ((edge_in->flags & EDGE_FALLTHRU)
1867 && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
623a66fa
R
1868 {
1869 before = NEXT_INSN (BB_END (edge_in->src));
623a66fa 1870 bb = create_basic_block (before, NULL, edge_in->src);
076c7ab8 1871 BB_COPY_PARTITION (bb, edge_in->src);
623a66fa
R
1872 }
1873 else
9fb32434 1874 {
fefa31b5 1875 if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1876 {
1877 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1878 BB_COPY_PARTITION (bb, edge_in->dest);
1879 }
1880 else
1881 {
1882 basic_block after = edge_in->dest->prev_bb;
1883 /* If this is post-bb reordering, and the edge crosses a partition
1884 boundary, the new block needs to be inserted in the bb chain
1885 at the end of the src partition (since we put the new bb into
1886 that partition, see below). Otherwise we may end up creating
1887 an extra partition crossing in the chain, which is illegal.
1888 It can't go after the src, because src may have a fall-through
1889 to a different block. */
1890 if (crtl->bb_reorder_complete
1891 && (edge_in->flags & EDGE_CROSSING))
1892 {
1893 after = last_bb_in_partition (edge_in->src);
b3a3ae09 1894 before = get_last_bb_insn (after);
3371a64f
TJ
1895 /* The instruction following the last bb in partition should
1896 be a barrier, since it cannot end in a fall-through. */
1897 gcc_checking_assert (BARRIER_P (before));
1898 before = NEXT_INSN (before);
1899 }
1900 bb = create_basic_block (before, NULL, after);
1901 /* Put the split bb into the src partition, to avoid creating
1902 a situation where a cold bb dominates a hot bb, in the case
1903 where src is cold and dest is hot. The src will dominate
1904 the new bb (whereas it might not have dominated dest). */
1905 BB_COPY_PARTITION (bb, edge_in->src);
1906 }
9fb32434 1907 }
ca6c03ca 1908
4977bab6 1909 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
ca6c03ca 1910
3371a64f
TJ
1911 /* Can't allow a region crossing edge to be fallthrough. */
1912 if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
fefa31b5 1913 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1914 {
1915 new_bb = force_nonfallthru (single_succ_edge (bb));
1916 gcc_assert (!new_bb);
1917 }
1918
4d6922ee 1919 /* For non-fallthru edges, we must adjust the predecessor's
ca6c03ca
JH
1920 jump instruction to target our new block. */
1921 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1922 {
341c100f
NS
1923 edge redirected = redirect_edge_and_branch (edge_in, bb);
1924 gcc_assert (redirected);
ca6c03ca
JH
1925 }
1926 else
3b5fda81 1927 {
fefa31b5 1928 if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
3b5fda81
JJ
1929 {
1930 /* For asm goto even splitting of fallthru edge might
1931 need insn patching, as other labels might point to the
1932 old label. */
8879d71c 1933 rtx_insn *last = BB_END (edge_in->src);
3b5fda81
JJ
1934 if (last
1935 && JUMP_P (last)
fefa31b5 1936 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3b5fda81
JJ
1937 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1938 && patch_jump_insn (last, before, bb))
1939 df_set_bb_dirty (edge_in->src);
1940 }
1941 redirect_edge_succ (edge_in, bb);
1942 }
ca6c03ca
JH
1943
1944 return bb;
1945}
1946
1947/* Queue instructions for insertion on an edge between two basic blocks.
1948 The new instructions and basic blocks (if any) will not appear in the
1949 CFG until commit_edge_insertions is called. */
1950
1951void
d329e058 1952insert_insn_on_edge (rtx pattern, edge e)
ca6c03ca
JH
1953{
1954 /* We cannot insert instructions on an abnormal critical edge.
1955 It will be easier to find the culprit if we die now. */
341c100f 1956 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
ca6c03ca 1957
6de9cd9a 1958 if (e->insns.r == NULL_RTX)
ca6c03ca
JH
1959 start_sequence ();
1960 else
6de9cd9a 1961 push_to_sequence (e->insns.r);
ca6c03ca
JH
1962
1963 emit_insn (pattern);
1964
6de9cd9a 1965 e->insns.r = get_insns ();
ca6c03ca
JH
1966 end_sequence ();
1967}
1968
1969/* Update the CFG for the instructions queued on edge E. */
1970
4e3825db 1971void
2ac66157 1972commit_one_edge_insertion (edge e)
ca6c03ca 1973{
8879d71c 1974 rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
898c90c0 1975 basic_block bb;
ca6c03ca
JH
1976
1977 /* Pull the insns off the edge now since the edge might go away. */
3ffa95c2
DM
1978 insns = e->insns.r;
1979 e->insns.r = NULL;
ca6c03ca 1980
898c90c0
EB
1981 /* Figure out where to put these insns. If the destination has
1982 one predecessor, insert there. Except for the exit block. */
fefa31b5 1983 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
ca6c03ca 1984 {
898c90c0
EB
1985 bb = e->dest;
1986
1987 /* Get the location correct wrt a code label, and "nice" wrt
1988 a basic block note, and before everything else. */
1989 tmp = BB_HEAD (bb);
1990 if (LABEL_P (tmp))
1991 tmp = NEXT_INSN (tmp);
1992 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1993 tmp = NEXT_INSN (tmp);
1994 if (tmp == BB_HEAD (bb))
1995 before = tmp;
1996 else if (tmp)
1997 after = PREV_INSN (tmp);
1998 else
1999 after = get_last_insn ();
2000 }
3dec4024 2001
898c90c0 2002 /* If the source has one successor and the edge is not abnormal,
a1d50386
JJ
2003 insert there. Except for the entry block.
2004 Don't do this if the predecessor ends in a jump other than
2005 unconditional simple jump. E.g. for asm goto that points all
2006 its labels at the fallthru basic block, we can't insert instructions
2007 before the asm goto, as the asm goto can have various of side effects,
2008 and can't emit instructions after the asm goto, as it must end
2009 the basic block. */
898c90c0
EB
2010 else if ((e->flags & EDGE_ABNORMAL) == 0
2011 && single_succ_p (e->src)
fefa31b5 2012 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
a1d50386
JJ
2013 && (!JUMP_P (BB_END (e->src))
2014 || simplejump_p (BB_END (e->src))))
898c90c0
EB
2015 {
2016 bb = e->src;
3dec4024 2017
898c90c0
EB
2018 /* It is possible to have a non-simple jump here. Consider a target
2019 where some forms of unconditional jumps clobber a register. This
2020 happens on the fr30 for example.
ca6c03ca 2021
898c90c0
EB
2022 We know this block has a single successor, so we can just emit
2023 the queued insns before the jump. */
2024 if (JUMP_P (BB_END (bb)))
2025 before = BB_END (bb);
3dec4024
JH
2026 else
2027 {
898c90c0
EB
2028 /* We'd better be fallthru, or we've lost track of what's what. */
2029 gcc_assert (e->flags & EDGE_FALLTHRU);
750054a2 2030
898c90c0 2031 after = BB_END (bb);
ca6c03ca
JH
2032 }
2033 }
2034
898c90c0
EB
2035 /* Otherwise we must split the edge. */
2036 else
2037 {
2038 bb = split_edge (e);
3371a64f
TJ
2039
2040 /* If E crossed a partition boundary, we needed to make bb end in
2041 a region-crossing jump, even though it was originally fallthru. */
2042 if (JUMP_P (BB_END (bb)))
2043 before = BB_END (bb);
2044 else
2045 after = BB_END (bb);
898c90c0
EB
2046 }
2047
2048 /* Now that we've found the spot, do the insertion. */
ca6c03ca
JH
2049 if (before)
2050 {
6fb5fa3c 2051 emit_insn_before_noloc (insns, before, bb);
ca6c03ca
JH
2052 last = prev_nonnote_insn (before);
2053 }
2054 else
6fb5fa3c 2055 last = emit_insn_after_noloc (insns, after, bb);
ca6c03ca
JH
2056
2057 if (returnjump_p (last))
2058 {
2059 /* ??? Remove all outgoing edges from BB and add one for EXIT.
c22cacf3
MS
2060 This is not currently a problem because this only happens
2061 for the (single) epilogue, which already has a fallthru edge
2062 to EXIT. */
ca6c03ca 2063
c5cbcccf 2064 e = single_succ_edge (bb);
fefa31b5 2065 gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
c5cbcccf 2066 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
ca6c03ca 2067
5f0d2358 2068 e->flags &= ~EDGE_FALLTHRU;
ca6c03ca 2069 emit_barrier_after (last);
0b17ab2f 2070
ca6c03ca
JH
2071 if (before)
2072 delete_insn (before);
2073 }
341c100f
NS
2074 else
2075 gcc_assert (!JUMP_P (last));
ca6c03ca
JH
2076}
2077
2078/* Update the CFG for all queued instructions. */
2079
2080void
d329e058 2081commit_edge_insertions (void)
ca6c03ca 2082{
ca6c03ca
JH
2083 basic_block bb;
2084
600b5b1d
TJ
2085 /* Optimization passes that invoke this routine can cause hot blocks
2086 previously reached by both hot and cold blocks to become dominated only
2087 by cold blocks. This will cause the verification below to fail,
2088 and lead to now cold code in the hot section. In some cases this
2089 may only be visible after newly unreachable blocks are deleted,
2090 which will be done by fixup_partitions. */
2091 fixup_partitions ();
2092
b2b29377 2093 checking_verify_flow_info ();
ca6c03ca 2094
fefa31b5
DM
2095 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2096 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
ca6c03ca 2097 {
628f6a4e
BE
2098 edge e;
2099 edge_iterator ei;
ca6c03ca 2100
628f6a4e
BE
2101 FOR_EACH_EDGE (e, ei, bb->succs)
2102 if (e->insns.r)
cf103ca4 2103 commit_one_edge_insertion (e);
3dec4024 2104 }
ca6c03ca
JH
2105}
2106\f
6fb5fa3c 2107
f470c378 2108/* Print out RTL-specific basic block information (live information
a315c44c
SB
2109 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2110 documented in dumpfile.h. */
ca6c03ca 2111
10e9fecc 2112static void
a315c44c 2113rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
ca6c03ca 2114{
8879d71c
DM
2115 rtx_insn *insn;
2116 rtx_insn *last;
f470c378 2117 char *s_indent;
ca6c03ca 2118
ae50c0cb 2119 s_indent = (char *) alloca ((size_t) indent + 1);
400e39e3 2120 memset (s_indent, ' ', (size_t) indent);
f470c378 2121 s_indent[indent] = '\0';
b8698a0f 2122
a315c44c 2123 if (df && (flags & TDF_DETAILS))
6fb5fa3c
DB
2124 {
2125 df_dump_top (bb, outf);
2126 putc ('\n', outf);
2127 }
ca6c03ca 2128
68cc3174
EB
2129 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2130 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
2131 insn = NEXT_INSN (insn))
a315c44c 2132 {
7b19209f
SB
2133 if (flags & TDF_DETAILS)
2134 df_dump_insn_top (insn, outf);
a315c44c
SB
2135 if (! (flags & TDF_SLIM))
2136 print_rtl_single (outf, insn);
2137 else
2138 dump_insn_slim (outf, insn);
7b19209f
SB
2139 if (flags & TDF_DETAILS)
2140 df_dump_insn_bottom (insn, outf);
a315c44c
SB
2141 }
2142
2143 if (df && (flags & TDF_DETAILS))
6fb5fa3c
DB
2144 {
2145 df_dump_bottom (bb, outf);
2146 putc ('\n', outf);
2147 }
2148
ca6c03ca
JH
2149}
2150\f
c4669594
SB
2151/* Like dump_function_to_file, but for RTL. Print out dataflow information
2152 for the start of each basic block. FLAGS are the TDF_* masks documented
2153 in dumpfile.h. */
ca6c03ca
JH
2154
2155void
b2908ba6 2156print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, int flags)
ca6c03ca 2157{
b2908ba6 2158 const rtx_insn *tmp_rtx;
ca6c03ca
JH
2159 if (rtx_first == 0)
2160 fprintf (outf, "(nil)\n");
2161 else
2162 {
ca6c03ca
JH
2163 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2164 int max_uid = get_max_uid ();
5ed6ace5
MD
2165 basic_block *start = XCNEWVEC (basic_block, max_uid);
2166 basic_block *end = XCNEWVEC (basic_block, max_uid);
2167 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
e0082a72
ZD
2168 basic_block bb;
2169
c4669594
SB
2170 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2171 insns, but the CFG is not maintained so the basic block info
2172 is not reliable. Therefore it's omitted from the dumps. */
2173 if (! (cfun->curr_properties & PROP_cfg))
2174 flags &= ~TDF_BLOCKS;
2175
6fb5fa3c
DB
2176 if (df)
2177 df_dump_start (outf);
2178
c4669594 2179 if (flags & TDF_BLOCKS)
ca6c03ca 2180 {
4f42035e 2181 FOR_EACH_BB_REVERSE_FN (bb, cfun)
ca6c03ca 2182 {
8879d71c 2183 rtx_insn *x;
c4669594
SB
2184
2185 start[INSN_UID (BB_HEAD (bb))] = bb;
2186 end[INSN_UID (BB_END (bb))] = bb;
2187 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2188 {
2189 enum bb_state state = IN_MULTIPLE_BB;
5f0d2358 2190
c4669594
SB
2191 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2192 state = IN_ONE_BB;
2193 in_bb_p[INSN_UID (x)] = state;
ca6c03ca 2194
c4669594
SB
2195 if (x == BB_END (bb))
2196 break;
2197 }
ca6c03ca
JH
2198 }
2199 }
2200
2201 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
2202 {
c4669594
SB
2203 if (flags & TDF_BLOCKS)
2204 {
2205 bb = start[INSN_UID (tmp_rtx)];
2206 if (bb != NULL)
2207 {
2208 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
2209 if (df && (flags & TDF_DETAILS))
2210 df_dump_top (bb, outf);
2211 }
ca6c03ca 2212
c4669594
SB
2213 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2214 && !NOTE_P (tmp_rtx)
2215 && !BARRIER_P (tmp_rtx))
2216 fprintf (outf, ";; Insn is not within a basic block\n");
2217 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2218 fprintf (outf, ";; Insn is in multiple basic blocks\n");
2219 }
ca6c03ca 2220
7b19209f
SB
2221 if (flags & TDF_DETAILS)
2222 df_dump_insn_top (tmp_rtx, outf);
a315c44c
SB
2223 if (! (flags & TDF_SLIM))
2224 print_rtl_single (outf, tmp_rtx);
2225 else
2226 dump_insn_slim (outf, tmp_rtx);
7b19209f
SB
2227 if (flags & TDF_DETAILS)
2228 df_dump_insn_bottom (tmp_rtx, outf);
ca6c03ca 2229
c4669594
SB
2230 if (flags & TDF_BLOCKS)
2231 {
2232 bb = end[INSN_UID (tmp_rtx)];
2233 if (bb != NULL)
2234 {
2235 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
2236 if (df && (flags & TDF_DETAILS))
2237 df_dump_bottom (bb, outf);
1b03a96d 2238 putc ('\n', outf);
c4669594
SB
2239 }
2240 }
ca6c03ca
JH
2241 }
2242
2243 free (start);
2244 free (end);
2245 free (in_bb_p);
2246 }
ca6c03ca
JH
2247}
2248\f
532aafad
SB
2249/* Update the branch probability of BB if a REG_BR_PROB is present. */
2250
b446e5a2 2251void
d329e058 2252update_br_prob_note (basic_block bb)
b446e5a2
JH
2253{
2254 rtx note;
4b4bf941 2255 if (!JUMP_P (BB_END (bb)))
b446e5a2 2256 return;
a813c111 2257 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
e5af9ddd 2258 if (!note || XINT (note, 0) == BRANCH_EDGE (bb)->probability)
b446e5a2 2259 return;
e5af9ddd 2260 XINT (note, 0) = BRANCH_EDGE (bb)->probability;
b446e5a2 2261}
96370780
MK
2262
2263/* Get the last insn associated with block BB (that includes barriers and
2264 tablejumps after BB). */
636eb204 2265rtx_insn *
96370780
MK
2266get_last_bb_insn (basic_block bb)
2267{
8942ee0f 2268 rtx_jump_table_data *table;
636eb204
DM
2269 rtx_insn *tmp;
2270 rtx_insn *end = BB_END (bb);
96370780
MK
2271
2272 /* Include any jump table following the basic block. */
8942ee0f
DM
2273 if (tablejump_p (end, NULL, &table))
2274 end = table;
96370780
MK
2275
2276 /* Include any barriers that may follow the basic block. */
1e211590 2277 tmp = next_nonnote_insn_bb (end);
96370780
MK
2278 while (tmp && BARRIER_P (tmp))
2279 {
2280 end = tmp;
1e211590 2281 tmp = next_nonnote_insn_bb (end);
96370780
MK
2282 }
2283
2284 return end;
2285}
af205f67 2286
600b5b1d
TJ
2287/* Sanity check partition hotness to ensure that basic blocks in
2288   the cold partition don't dominate basic blocks in the hot partition.
2289 If FLAG_ONLY is true, report violations as errors. Otherwise
2290 re-mark the dominated blocks as cold, since this is run after
2291 cfg optimizations that may make hot blocks previously reached
2292 by both hot and cold blocks now only reachable along cold paths. */
2293
2294static vec<basic_block>
2295find_partition_fixes (bool flag_only)
2296{
2297 basic_block bb;
2298 vec<basic_block> bbs_in_cold_partition = vNULL;
2299 vec<basic_block> bbs_to_fix = vNULL;
2300
2301 /* Callers check this. */
2302 gcc_checking_assert (crtl->has_bb_partition);
2303
11cd3bed 2304 FOR_EACH_BB_FN (bb, cfun)
600b5b1d
TJ
2305 if ((BB_PARTITION (bb) == BB_COLD_PARTITION))
2306 bbs_in_cold_partition.safe_push (bb);
2307
2308 if (bbs_in_cold_partition.is_empty ())
2309 return vNULL;
2310
2311 bool dom_calculated_here = !dom_info_available_p (CDI_DOMINATORS);
2312
2313 if (dom_calculated_here)
2314 calculate_dominance_info (CDI_DOMINATORS);
2315
2316 while (! bbs_in_cold_partition.is_empty ())
2317 {
2318 bb = bbs_in_cold_partition.pop ();
2319 /* Any blocks dominated by a block in the cold section
2320 must also be cold. */
2321 basic_block son;
2322 for (son = first_dom_son (CDI_DOMINATORS, bb);
2323 son;
2324 son = next_dom_son (CDI_DOMINATORS, son))
2325 {
2326 /* If son is not yet cold, then mark it cold here and
2327 enqueue it for further processing. */
2328 if ((BB_PARTITION (son) != BB_COLD_PARTITION))
2329 {
2330 if (flag_only)
2331 error ("non-cold basic block %d dominated "
2332 "by a block in the cold partition (%d)", son->index, bb->index);
2333 else
2334 BB_SET_PARTITION (son, BB_COLD_PARTITION);
2335 bbs_to_fix.safe_push (son);
2336 bbs_in_cold_partition.safe_push (son);
2337 }
2338 }
2339 }
2340
2341 if (dom_calculated_here)
2342 free_dominance_info (CDI_DOMINATORS);
2343
2344 return bbs_to_fix;
2345}
2346
2347/* Perform cleanup on the hot/cold bb partitioning after optimization
2348 passes that modify the cfg. */
2349
2350void
2351fixup_partitions (void)
2352{
2353 basic_block bb;
2354
2355 if (!crtl->has_bb_partition)
2356 return;
2357
2358 /* Delete any blocks that became unreachable and weren't
2359 already cleaned up, for example during edge forwarding
2360 and convert_jumps_to_returns. This will expose more
2361 opportunities for fixing the partition boundaries here.
2362 Also, the calculation of the dominance graph during verification
2363 will assert if there are unreachable nodes. */
2364 delete_unreachable_blocks ();
2365
2366 /* If there are partitions, do a sanity check on them: A basic block in
2367   a cold partition cannot dominate a basic block in a hot partition.
2368 Fixup any that now violate this requirement, as a result of edge
2369 forwarding and unreachable block deletion.  */
2370 vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2371
2372 /* Do the partition fixup after all necessary blocks have been converted to
2373 cold, so that we only update the region crossings the minimum number of
2374 places, which can require forcing edges to be non fallthru. */
2375 while (! bbs_to_fix.is_empty ())
2376 {
2377 bb = bbs_to_fix.pop ();
2378 fixup_new_cold_bb (bb);
2379 }
2380}
2381
af205f67
TJ
2382/* Verify, in the basic block chain, that there is at most one switch
2383 between hot/cold partitions. This condition will not be true until
2384 after reorder_basic_blocks is called. */
2385
251a41b9 2386static int
af205f67
TJ
2387verify_hot_cold_block_grouping (void)
2388{
2389 basic_block bb;
2390 int err = 0;
2391 bool switched_sections = false;
2392 int current_partition = BB_UNPARTITIONED;
2393
3371a64f
TJ
2394 /* Even after bb reordering is complete, we go into cfglayout mode
2395 again (in compgoto). Ensure we don't call this before going back
2396 into linearized RTL when any layout fixes would have been committed. */
2397 if (!crtl->bb_reorder_complete
c3284718 2398 || current_ir_type () != IR_RTL_CFGRTL)
251a41b9 2399 return err;
af205f67 2400
11cd3bed 2401 FOR_EACH_BB_FN (bb, cfun)
af205f67
TJ
2402 {
2403 if (current_partition != BB_UNPARTITIONED
2404 && BB_PARTITION (bb) != current_partition)
2405 {
2406 if (switched_sections)
2407 {
2408 error ("multiple hot/cold transitions found (bb %i)",
2409 bb->index);
2410 err = 1;
2411 }
2412 else
2413 switched_sections = true;
2414
2415 if (!crtl->has_bb_partition)
2416 error ("partition found but function partition flag not set");
2417 }
2418 current_partition = BB_PARTITION (bb);
2419 }
2420
251a41b9 2421 return err;
af205f67 2422}
b446e5a2 2423\f
ca6c03ca 2424
251a41b9
TJ
2425/* Perform several checks on the edges out of each block, such as
2426 the consistency of the branch probabilities, the correctness
2427 of hot/cold partition crossing edges, and the number of expected
600b5b1d
TJ
2428 successor edges. Also verify that the dominance relationship
2429 between hot/cold blocks is sane. */
f470c378 2430
10e9fecc 2431static int
251a41b9 2432rtl_verify_edges (void)
ca6c03ca 2433{
10e9fecc 2434 int err = 0;
94eb5ddb 2435 basic_block bb;
ca6c03ca 2436
4f42035e 2437 FOR_EACH_BB_REVERSE_FN (bb, cfun)
ca6c03ca 2438 {
f99ffaa3
EB
2439 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2440 int n_eh = 0, n_abnormal = 0;
3cf54412 2441 edge e, fallthru = NULL;
628f6a4e 2442 edge_iterator ei;
251a41b9 2443 rtx note;
3371a64f 2444 bool has_crossing_edge = false;
ca6c03ca 2445
2085a21f 2446 if (JUMP_P (BB_END (bb))
a813c111 2447 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
628f6a4e 2448 && EDGE_COUNT (bb->succs) >= 2
a813c111 2449 && any_condjump_p (BB_END (bb)))
5a1a3e5e 2450 {
e5af9ddd 2451 if (XINT (note, 0) != BRANCH_EDGE (bb)->probability
0a6a6ac9 2452 && profile_status_for_fn (cfun) != PROFILE_ABSENT)
5a1a3e5e 2453 {
e5af9ddd
RS
2454 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2455 XINT (note, 0), BRANCH_EDGE (bb)->probability);
5a1a3e5e
JH
2456 err = 1;
2457 }
2458 }
251a41b9 2459
628f6a4e 2460 FOR_EACH_EDGE (e, ei, bb->succs)
ca6c03ca 2461 {
0be7e7a6
RH
2462 bool is_crossing;
2463
ca6c03ca 2464 if (e->flags & EDGE_FALLTHRU)
0be7e7a6
RH
2465 n_fallthru++, fallthru = e;
2466
2467 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
fefa31b5
DM
2468 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2469 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
3371a64f 2470 has_crossing_edge |= is_crossing;
0be7e7a6 2471 if (e->flags & EDGE_CROSSING)
750054a2 2472 {
0be7e7a6
RH
2473 if (!is_crossing)
2474 {
2475 error ("EDGE_CROSSING incorrectly set across same section");
2476 err = 1;
2477 }
2478 if (e->flags & EDGE_FALLTHRU)
2479 {
f99ffaa3 2480 error ("fallthru edge crosses section boundary in bb %i",
750054a2
CT
2481 e->src->index);
2482 err = 1;
2483 }
0be7e7a6
RH
2484 if (e->flags & EDGE_EH)
2485 {
f99ffaa3 2486 error ("EH edge crosses section boundary in bb %i",
0be7e7a6
RH
2487 e->src->index);
2488 err = 1;
2489 }
339ba33b 2490 if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
3371a64f
TJ
2491 {
2492 error ("No region crossing jump at section boundary in bb %i",
2493 bb->index);
2494 err = 1;
2495 }
0be7e7a6
RH
2496 }
2497 else if (is_crossing)
2498 {
2499 error ("EDGE_CROSSING missing across section boundary");
2500 err = 1;
750054a2 2501 }
3dec4024 2502
65f43cdf
ZD
2503 if ((e->flags & ~(EDGE_DFS_BACK
2504 | EDGE_CAN_FALLTHRU
2505 | EDGE_IRREDUCIBLE_LOOP
9beb1c84 2506 | EDGE_LOOP_EXIT
7e872b90
JJ
2507 | EDGE_CROSSING
2508 | EDGE_PRESERVE)) == 0)
3dec4024
JH
2509 n_branch++;
2510
2511 if (e->flags & EDGE_ABNORMAL_CALL)
f99ffaa3
EB
2512 n_abnormal_call++;
2513
2514 if (e->flags & EDGE_SIBCALL)
2515 n_sibcall++;
3dec4024
JH
2516
2517 if (e->flags & EDGE_EH)
2518 n_eh++;
f99ffaa3
EB
2519
2520 if (e->flags & EDGE_ABNORMAL)
3dec4024 2521 n_abnormal++;
ca6c03ca 2522 }
5f0d2358 2523
3371a64f 2524 if (!has_crossing_edge
339ba33b
RS
2525 && JUMP_P (BB_END (bb))
2526 && CROSSING_JUMP_P (BB_END (bb)))
3371a64f
TJ
2527 {
2528 print_rtl_with_bb (stderr, get_insns (), TDF_RTL | TDF_BLOCKS | TDF_DETAILS);
2529 error ("Region crossing jump across same section in bb %i",
2530 bb->index);
2531 err = 1;
2532 }
2533
1d65f45c 2534 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
3dec4024 2535 {
f99ffaa3 2536 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
3dec4024
JH
2537 err = 1;
2538 }
1d65f45c
RH
2539 if (n_eh > 1)
2540 {
f99ffaa3 2541 error ("too many exception handling edges in bb %i", bb->index);
1d65f45c
RH
2542 err = 1;
2543 }
3dec4024 2544 if (n_branch
4b4bf941 2545 && (!JUMP_P (BB_END (bb))
a813c111
SB
2546 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2547 || any_condjump_p (BB_END (bb))))))
3dec4024 2548 {
ab532386 2549 error ("too many outgoing branch edges from bb %i", bb->index);
3dec4024
JH
2550 err = 1;
2551 }
a813c111 2552 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
3dec4024 2553 {
f99ffaa3 2554 error ("fallthru edge after unconditional jump in bb %i", bb->index);
3dec4024
JH
2555 err = 1;
2556 }
a813c111 2557 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
3dec4024 2558 {
f99ffaa3
EB
2559 error ("wrong number of branch edges after unconditional jump"
2560 " in bb %i", bb->index);
3dec4024
JH
2561 err = 1;
2562 }
a813c111 2563 if (n_branch != 1 && any_condjump_p (BB_END (bb))
c11fd0b2 2564 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
3dec4024 2565 {
f99ffaa3
EB
2566 error ("wrong amount of branch edges after conditional jump"
2567 " in bb %i", bb->index);
2568 err = 1;
2569 }
2570 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2571 {
2572 error ("abnormal call edges for non-call insn in bb %i", bb->index);
3dec4024
JH
2573 err = 1;
2574 }
f99ffaa3 2575 if (n_sibcall && !CALL_P (BB_END (bb)))
3dec4024 2576 {
f99ffaa3 2577 error ("sibcall edges for non-call insn in bb %i", bb->index);
3dec4024
JH
2578 err = 1;
2579 }
f99ffaa3
EB
2580 if (n_abnormal > n_eh
2581 && !(CALL_P (BB_END (bb))
2582 && n_abnormal == n_abnormal_call + n_sibcall)
4b4bf941 2583 && (!JUMP_P (BB_END (bb))
a813c111
SB
2584 || any_condjump_p (BB_END (bb))
2585 || any_uncondjump_p (BB_END (bb))))
3dec4024 2586 {
ab532386 2587 error ("abnormal edges for no purpose in bb %i", bb->index);
3dec4024
JH
2588 err = 1;
2589 }
251a41b9 2590 }
f87c27b4 2591
600b5b1d
TJ
2592 /* If there are partitions, do a sanity check on them: A basic block in
2593   a cold partition cannot dominate a basic block in a hot partition.  */
2594 if (crtl->has_bb_partition && !err)
2595 {
2596 vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2597 err = !bbs_to_fix.is_empty ();
2598 }
2599
251a41b9
TJ
2600 /* Clean up. */
2601 return err;
2602}
5f0d2358 2603
251a41b9
TJ
2604/* Checks on the instructions within blocks. Currently checks that each
2605 block starts with a basic block note, and that basic block notes and
2606 control flow jumps are not found in the middle of the block. */
ca6c03ca 2607
251a41b9
TJ
2608static int
2609rtl_verify_bb_insns (void)
2610{
8879d71c 2611 rtx_insn *x;
251a41b9
TJ
2612 int err = 0;
2613 basic_block bb;
2614
4f42035e 2615 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9
TJ
2616 {
2617 /* Now check the header of basic
c22cacf3 2618 block. It ought to contain optional CODE_LABEL followed
ca6c03ca 2619 by NOTE_BASIC_BLOCK. */
a813c111 2620 x = BB_HEAD (bb);
4b4bf941 2621 if (LABEL_P (x))
ca6c03ca 2622 {
a813c111 2623 if (BB_END (bb) == x)
ca6c03ca
JH
2624 {
2625 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 2626 bb->index);
ca6c03ca
JH
2627 err = 1;
2628 }
5f0d2358 2629
ca6c03ca
JH
2630 x = NEXT_INSN (x);
2631 }
5f0d2358 2632
ca6c03ca
JH
2633 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2634 {
2635 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 2636 bb->index);
ca6c03ca
JH
2637 err = 1;
2638 }
2639
a813c111 2640 if (BB_END (bb) == x)
49243025 2641 /* Do checks for empty blocks here. */
5f0d2358 2642 ;
ca6c03ca 2643 else
5f0d2358
RK
2644 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2645 {
2646 if (NOTE_INSN_BASIC_BLOCK_P (x))
2647 {
2648 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
0b17ab2f 2649 INSN_UID (x), bb->index);
5f0d2358
RK
2650 err = 1;
2651 }
2652
a813c111 2653 if (x == BB_END (bb))
5f0d2358 2654 break;
ca6c03ca 2655
83fd323c 2656 if (control_flow_insn_p (x))
5f0d2358 2657 {
0b17ab2f 2658 error ("in basic block %d:", bb->index);
5f0d2358
RK
2659 fatal_insn ("flow control insn inside a basic block", x);
2660 }
2661 }
ca6c03ca
JH
2662 }
2663
251a41b9
TJ
2664 /* Clean up. */
2665 return err;
2666}
2667
2668/* Verify that block pointers for instructions in basic blocks, headers and
2669 footers are set appropriately. */
2670
2671static int
2672rtl_verify_bb_pointers (void)
2673{
2674 int err = 0;
2675 basic_block bb;
2676
2677 /* Check the general integrity of the basic blocks. */
4f42035e 2678 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9 2679 {
8879d71c 2680 rtx_insn *insn;
251a41b9
TJ
2681
2682 if (!(bb->flags & BB_RTL))
2683 {
2684 error ("BB_RTL flag not set for block %d", bb->index);
2685 err = 1;
2686 }
2687
2688 FOR_BB_INSNS (bb, insn)
2689 if (BLOCK_FOR_INSN (insn) != bb)
2690 {
2691 error ("insn %d basic block pointer is %d, should be %d",
2692 INSN_UID (insn),
2693 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2694 bb->index);
2695 err = 1;
2696 }
2697
2698 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2699 if (!BARRIER_P (insn)
2700 && BLOCK_FOR_INSN (insn) != NULL)
2701 {
2702 error ("insn %d in header of bb %d has non-NULL basic block",
2703 INSN_UID (insn), bb->index);
2704 err = 1;
2705 }
2706 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2707 if (!BARRIER_P (insn)
2708 && BLOCK_FOR_INSN (insn) != NULL)
2709 {
2710 error ("insn %d in footer of bb %d has non-NULL basic block",
2711 INSN_UID (insn), bb->index);
2712 err = 1;
2713 }
2714 }
af205f67 2715
10e9fecc 2716 /* Clean up. */
10e9fecc
JH
2717 return err;
2718}
5f0d2358 2719
10e9fecc
JH
2720/* Verify the CFG and RTL consistency common for both underlying RTL and
2721 cfglayout RTL.
5f0d2358 2722
10e9fecc 2723 Currently it does following checks:
251a41b9
TJ
2724
2725 - overlapping of basic blocks
2726 - insns with wrong BLOCK_FOR_INSN pointers
2727 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2728 - tails of basic blocks (ensure that boundary is necessary)
2729 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2730 and NOTE_INSN_BASIC_BLOCK
2731 - verify that no fall_thru edge crosses hot/cold partition boundaries
2732 - verify that there are no pending RTL branch predictions
600b5b1d 2733 - verify that hot blocks are not dominated by cold blocks
251a41b9
TJ
2734
2735 In future it can be extended check a lot of other stuff as well
2736 (reachability of basic blocks, life information, etc. etc.). */
9eab6785 2737
10e9fecc 2738static int
251a41b9
TJ
2739rtl_verify_flow_info_1 (void)
2740{
2741 int err = 0;
2742
2743 err |= rtl_verify_bb_pointers ();
2744
2745 err |= rtl_verify_bb_insns ();
2746
2747 err |= rtl_verify_edges ();
2748
251a41b9
TJ
2749 return err;
2750}
2751
2752/* Walk the instruction chain and verify that bb head/end pointers
2753 are correct, and that instructions are in exactly one bb and have
2754 correct block pointers. */
2755
2756static int
2757rtl_verify_bb_insn_chain (void)
10e9fecc
JH
2758{
2759 basic_block bb;
251a41b9 2760 int err = 0;
8879d71c
DM
2761 rtx_insn *x;
2762 rtx_insn *last_head = get_last_insn ();
9eab6785 2763 basic_block *bb_info;
9eab6785
SB
2764 const int max_uid = get_max_uid ();
2765
2766 bb_info = XCNEWVEC (basic_block, max_uid);
ca6c03ca 2767
4f42035e 2768 FOR_EACH_BB_REVERSE_FN (bb, cfun)
10e9fecc 2769 {
8879d71c
DM
2770 rtx_insn *head = BB_HEAD (bb);
2771 rtx_insn *end = BB_END (bb);
628f6a4e 2772
9eab6785 2773 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
a7b87f73
ZD
2774 {
2775 /* Verify the end of the basic block is in the INSN chain. */
2776 if (x == end)
2777 break;
2778
251a41b9
TJ
2779 /* And that the code outside of basic blocks has NULL bb field. */
2780 if (!BARRIER_P (x)
2781 && BLOCK_FOR_INSN (x) != NULL)
2782 {
2783 error ("insn %d outside of basic blocks has non-NULL bb field",
2784 INSN_UID (x));
2785 err = 1;
2786 }
a7b87f73 2787 }
9eab6785
SB
2788
2789 if (!x)
2790 {
2791 error ("end insn %d for block %d not found in the insn stream",
2792 INSN_UID (end), bb->index);
2793 err = 1;
2794 }
2795
2796 /* Work backwards from the end to the head of the basic block
2797 to verify the head is in the RTL chain. */
2798 for (; x != NULL_RTX; x = PREV_INSN (x))
a00d11f0 2799 {
9eab6785
SB
2800 /* While walking over the insn chain, verify insns appear
2801 in only one basic block. */
2802 if (bb_info[INSN_UID (x)] != NULL)
2803 {
2804 error ("insn %d is in multiple basic blocks (%d and %d)",
2805 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2806 err = 1;
2807 }
2808
2809 bb_info[INSN_UID (x)] = bb;
2810
2811 if (x == head)
2812 break;
2813 }
2814 if (!x)
2815 {
2816 error ("head insn %d for block %d not found in the insn stream",
2817 INSN_UID (head), bb->index);
a00d11f0
JH
2818 err = 1;
2819 }
2820
a7b87f73 2821 last_head = PREV_INSN (x);
251a41b9
TJ
2822 }
2823
2824 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2825 {
2826 /* Check that the code before the first basic block has NULL
2827 bb field. */
2828 if (!BARRIER_P (x)
2829 && BLOCK_FOR_INSN (x) != NULL)
2830 {
2831 error ("insn %d outside of basic blocks has non-NULL bb field",
2832 INSN_UID (x));
2833 err = 1;
2834 }
2835 }
2836 free (bb_info);
2837
2838 return err;
2839}
2840
2841/* Verify that fallthru edges point to adjacent blocks in layout order and
2842 that barriers exist after non-fallthru blocks. */
2843
2844static int
2845rtl_verify_fallthru (void)
2846{
2847 basic_block bb;
2848 int err = 0;
2849
4f42035e 2850 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9
TJ
2851 {
2852 edge e;
9eab6785 2853
0fd4b31d 2854 e = find_fallthru_edge (bb->succs);
10e9fecc
JH
2855 if (!e)
2856 {
8879d71c 2857 rtx_insn *insn;
10e9fecc
JH
2858
2859 /* Ensure existence of barrier in BB with no fallthru edges. */
468059bc
DD
2860 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2861 {
2862 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
10e9fecc
JH
2863 {
2864 error ("missing barrier after block %i", bb->index);
2865 err = 1;
2866 break;
2867 }
468059bc
DD
2868 if (BARRIER_P (insn))
2869 break;
2870 }
10e9fecc 2871 }
fefa31b5
DM
2872 else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2873 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
c22cacf3 2874 {
8879d71c 2875 rtx_insn *insn;
10e9fecc
JH
2876
2877 if (e->src->next_bb != e->dest)
2878 {
2879 error
2880 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2881 e->src->index, e->dest->index);
2882 err = 1;
2883 }
2884 else
a813c111 2885 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
10e9fecc 2886 insn = NEXT_INSN (insn))
6be85b25 2887 if (BARRIER_P (insn) || INSN_P (insn))
10e9fecc
JH
2888 {
2889 error ("verify_flow_info: Incorrect fallthru %i->%i",
2890 e->src->index, e->dest->index);
2891 fatal_insn ("wrong insn in the fallthru edge", insn);
2892 err = 1;
2893 }
c22cacf3 2894 }
10e9fecc 2895 }
ca6c03ca 2896
251a41b9
TJ
2897 return err;
2898}
2899
2900/* Verify that blocks are laid out in consecutive order. While walking the
2901 instructions, verify that all expected instructions are inside the basic
2902 blocks, and that all returns are followed by barriers. */
2903
2904static int
2905rtl_verify_bb_layout (void)
2906{
2907 basic_block bb;
2908 int err = 0;
8879d71c 2909 rtx_insn *x;
251a41b9 2910 int num_bb_notes;
8879d71c 2911 rtx_insn * const rtx_first = get_insns ();
fefa31b5 2912 basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
9eab6785 2913
ca6c03ca 2914 num_bb_notes = 0;
fefa31b5 2915 last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
e0082a72 2916
5f0d2358 2917 for (x = rtx_first; x; x = NEXT_INSN (x))
ca6c03ca
JH
2918 {
2919 if (NOTE_INSN_BASIC_BLOCK_P (x))
2920 {
bf77398c 2921 bb = NOTE_BASIC_BLOCK (x);
5f0d2358 2922
ca6c03ca 2923 num_bb_notes++;
e0082a72 2924 if (bb != last_bb_seen->next_bb)
10e9fecc 2925 internal_error ("basic blocks not laid down consecutively");
ca6c03ca 2926
10e9fecc 2927 curr_bb = last_bb_seen = bb;
ca6c03ca
JH
2928 }
2929
10e9fecc 2930 if (!curr_bb)
ca6c03ca
JH
2931 {
2932 switch (GET_CODE (x))
2933 {
2934 case BARRIER:
2935 case NOTE:
2936 break;
2937
2938 case CODE_LABEL:
39718607 2939 /* An ADDR_VEC is placed outside any basic block. */
ca6c03ca 2940 if (NEXT_INSN (x)
481683e1 2941 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
5f0d2358 2942 x = NEXT_INSN (x);
ca6c03ca
JH
2943
2944 /* But in any case, non-deletable labels can appear anywhere. */
2945 break;
2946
2947 default:
1f978f5f 2948 fatal_insn ("insn outside basic block", x);
ca6c03ca
JH
2949 }
2950 }
2951
26cae194 2952 if (JUMP_P (x)
ca6c03ca 2953 && returnjump_p (x) && ! condjump_p (x)
15eb3a2e 2954 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
1f978f5f 2955 fatal_insn ("return not followed by barrier", x);
251a41b9 2956
a813c111 2957 if (curr_bb && x == BB_END (curr_bb))
10e9fecc 2958 curr_bb = NULL;
ca6c03ca
JH
2959 }
2960
0cae8d31 2961 if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
ca6c03ca 2962 internal_error
0b17ab2f 2963 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
0cae8d31 2964 num_bb_notes, n_basic_blocks_for_fn (cfun));
ca6c03ca 2965
10e9fecc 2966 return err;
ca6c03ca 2967}
251a41b9
TJ
2968
2969/* Verify the CFG and RTL consistency common for both underlying RTL and
2970 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
2971
2972 Currently it does following checks:
2973 - all checks of rtl_verify_flow_info_1
2974 - test head/end pointers
2975 - check that blocks are laid out in consecutive order
2976 - check that all insns are in the basic blocks
2977 (except the switch handling code, barriers and notes)
2978 - check that all returns are followed by barriers
600b5b1d
TJ
2979 - check that all fallthru edge points to the adjacent blocks
2980 - verify that there is a single hot/cold partition boundary after bbro */
251a41b9
TJ
2981
2982static int
2983rtl_verify_flow_info (void)
2984{
2985 int err = 0;
2986
2987 err |= rtl_verify_flow_info_1 ();
2988
2989 err |= rtl_verify_bb_insn_chain ();
2990
2991 err |= rtl_verify_fallthru ();
2992
2993 err |= rtl_verify_bb_layout ();
2994
3371a64f
TJ
2995 err |= verify_hot_cold_block_grouping ();
2996
251a41b9
TJ
2997 return err;
2998}
ca6c03ca 2999\f
eaec9b3d 3000/* Assume that the preceding pass has possibly eliminated jump instructions
ca6c03ca
JH
3001 or converted the unconditional jumps. Eliminate the edges from CFG.
3002 Return true if any edges are eliminated. */
3003
3004bool
d329e058 3005purge_dead_edges (basic_block bb)
ca6c03ca 3006{
628f6a4e 3007 edge e;
8879d71c
DM
3008 rtx_insn *insn = BB_END (bb);
3009 rtx note;
ca6c03ca 3010 bool purged = false;
628f6a4e
BE
3011 bool found;
3012 edge_iterator ei;
ca6c03ca 3013
b5b8b0ac
AO
3014 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
3015 do
3016 insn = PREV_INSN (insn);
3017 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3018
70da1d03 3019 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
4b4bf941 3020 if (NONJUMP_INSN_P (insn)
70da1d03
JH
3021 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3022 {
3023 rtx eqnote;
3024
3025 if (! may_trap_p (PATTERN (insn))
3026 || ((eqnote = find_reg_equal_equiv_note (insn))
3027 && ! may_trap_p (XEXP (eqnote, 0))))
3028 remove_note (insn, note);
3029 }
3030
546c093e 3031 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
628f6a4e 3032 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
546c093e 3033 {
1d65f45c
RH
3034 bool remove = false;
3035
e5f9a909
JW
3036 /* There are three types of edges we need to handle correctly here: EH
3037 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3038 latter can appear when nonlocal gotos are used. */
1d65f45c 3039 if (e->flags & EDGE_ABNORMAL_CALL)
546c093e 3040 {
1d65f45c
RH
3041 if (!CALL_P (insn))
3042 remove = true;
3043 else if (can_nonlocal_goto (insn))
3044 ;
3045 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3046 ;
0a35513e
AH
3047 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3048 ;
1d65f45c
RH
3049 else
3050 remove = true;
546c093e 3051 }
1d65f45c
RH
3052 else if (e->flags & EDGE_EH)
3053 remove = !can_throw_internal (insn);
3054
3055 if (remove)
546c093e 3056 {
1d65f45c
RH
3057 remove_edge (e);
3058 df_set_bb_dirty (bb);
3059 purged = true;
546c093e
RH
3060 }
3061 else
1d65f45c 3062 ei_next (&ei);
546c093e 3063 }
5f0d2358 3064
4b4bf941 3065 if (JUMP_P (insn))
ca6c03ca
JH
3066 {
3067 rtx note;
3068 edge b,f;
628f6a4e 3069 edge_iterator ei;
5f0d2358 3070
ca6c03ca
JH
3071 /* We do care only about conditional jumps and simplejumps. */
3072 if (!any_condjump_p (insn)
3073 && !returnjump_p (insn)
3074 && !simplejump_p (insn))
c51d95ec 3075 return purged;
5f0d2358 3076
5a1a3e5e
JH
3077 /* Branch probability/prediction notes are defined only for
3078 condjumps. We've possibly turned condjump into simplejump. */
3079 if (simplejump_p (insn))
3080 {
3081 note = find_reg_note (insn, REG_BR_PROB, NULL);
3082 if (note)
3083 remove_note (insn, note);
3084 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3085 remove_note (insn, note);
3086 }
3087
628f6a4e 3088 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 3089 {
7fcd7218
JH
3090 /* Avoid abnormal flags to leak from computed jumps turned
3091 into simplejumps. */
f87c27b4 3092
0e1638d4 3093 e->flags &= ~EDGE_ABNORMAL;
7fcd7218 3094
5a566bed
MM
3095 /* See if this edge is one we should keep. */
3096 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3097 /* A conditional jump can fall through into the next
3098 block, so we should keep the edge. */
628f6a4e
BE
3099 {
3100 ei_next (&ei);
3101 continue;
3102 }
fefa31b5 3103 else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
a813c111 3104 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
5a566bed
MM
3105 /* If the destination block is the target of the jump,
3106 keep the edge. */
628f6a4e
BE
3107 {
3108 ei_next (&ei);
3109 continue;
3110 }
fefa31b5
DM
3111 else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3112 && returnjump_p (insn))
5a566bed
MM
3113 /* If the destination block is the exit block, and this
3114 instruction is a return, then keep the edge. */
628f6a4e
BE
3115 {
3116 ei_next (&ei);
3117 continue;
3118 }
5a566bed
MM
3119 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3120 /* Keep the edges that correspond to exceptions thrown by
0b75beaa
EB
3121 this instruction and rematerialize the EDGE_ABNORMAL
3122 flag we just cleared above. */
3123 {
3124 e->flags |= EDGE_ABNORMAL;
628f6a4e 3125 ei_next (&ei);
0b75beaa
EB
3126 continue;
3127 }
5f0d2358 3128
5a566bed 3129 /* We do not need this edge. */
6fb5fa3c 3130 df_set_bb_dirty (bb);
ca6c03ca
JH
3131 purged = true;
3132 remove_edge (e);
3133 }
5f0d2358 3134
628f6a4e 3135 if (EDGE_COUNT (bb->succs) == 0 || !purged)
c51d95ec 3136 return purged;
5f0d2358 3137
c263766c
RH
3138 if (dump_file)
3139 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
5f0d2358 3140
ca6c03ca
JH
3141 if (!optimize)
3142 return purged;
3143
3144 /* Redistribute probabilities. */
c5cbcccf 3145 if (single_succ_p (bb))
ca6c03ca 3146 {
c5cbcccf
ZD
3147 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3148 single_succ_edge (bb)->count = bb->count;
f87c27b4 3149 }
ca6c03ca
JH
3150 else
3151 {
3152 note = find_reg_note (insn, REG_BR_PROB, NULL);
3153 if (!note)
3154 return purged;
5f0d2358 3155
ca6c03ca
JH
3156 b = BRANCH_EDGE (bb);
3157 f = FALLTHRU_EDGE (bb);
e5af9ddd 3158 b->probability = XINT (note, 0);
ca6c03ca 3159 f->probability = REG_BR_PROB_BASE - b->probability;
8ddb5a29 3160 /* Update these to use GCOV_COMPUTE_SCALE. */
ca6c03ca
JH
3161 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
3162 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
3163 }
5f0d2358 3164
ca6c03ca
JH
3165 return purged;
3166 }
4b4bf941 3167 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
1722c2c8
RH
3168 {
3169 /* First, there should not be any EH or ABCALL edges resulting
3170 from non-local gotos and the like. If there were, we shouldn't
3171 have created the sibcall in the first place. Second, there
3172 should of course never have been a fallthru edge. */
c5cbcccf
ZD
3173 gcc_assert (single_succ_p (bb));
3174 gcc_assert (single_succ_edge (bb)->flags
3175 == (EDGE_SIBCALL | EDGE_ABNORMAL));
1722c2c8
RH
3176
3177 return 0;
3178 }
ca6c03ca 3179
ca6c03ca
JH
3180 /* If we don't see a jump insn, we don't know exactly why the block would
3181 have been broken at this point. Look for a simple, non-fallthru edge,
3182 as these are only created by conditional branches. If we find such an
3183 edge we know that there used to be a jump here and can then safely
3184 remove all non-fallthru edges. */
628f6a4e
BE
3185 found = false;
3186 FOR_EACH_EDGE (e, ei, bb->succs)
3187 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3188 {
3189 found = true;
3190 break;
3191 }
5f0d2358 3192
628f6a4e 3193 if (!found)
ca6c03ca 3194 return purged;
5f0d2358 3195
2afa8dce
DB
3196 /* Remove all but the fake and fallthru edges. The fake edge may be
3197 the only successor for this block in the case of noreturn
3198 calls. */
628f6a4e 3199 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 3200 {
2afa8dce 3201 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
c51d95ec 3202 {
6fb5fa3c 3203 df_set_bb_dirty (bb);
c51d95ec
JH
3204 remove_edge (e);
3205 purged = true;
3206 }
628f6a4e
BE
3207 else
3208 ei_next (&ei);
ca6c03ca 3209 }
5f0d2358 3210
c5cbcccf 3211 gcc_assert (single_succ_p (bb));
5f0d2358 3212
c5cbcccf
ZD
3213 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3214 single_succ_edge (bb)->count = bb->count;
ca6c03ca 3215
c263766c
RH
3216 if (dump_file)
3217 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
0b17ab2f 3218 bb->index);
ca6c03ca
JH
3219 return purged;
3220}
3221
5f0d2358
RK
3222/* Search all basic blocks for potentially dead edges and purge them. Return
3223 true if some edge has been eliminated. */
ca6c03ca
JH
3224
3225bool
25cd19de 3226purge_all_dead_edges (void)
ca6c03ca 3227{
e0082a72 3228 int purged = false;
e0082a72 3229 basic_block bb;
473fb060 3230
11cd3bed 3231 FOR_EACH_BB_FN (bb, cfun)
473fb060 3232 {
e0082a72 3233 bool purged_here = purge_dead_edges (bb);
5f0d2358 3234
473fb060 3235 purged |= purged_here;
473fb060 3236 }
5f0d2358 3237
ca6c03ca
JH
3238 return purged;
3239}
9ee634e3 3240
ba5e9aca
EB
3241/* This is used by a few passes that emit some instructions after abnormal
3242 calls, moving the basic block's end, while they in fact do want to emit
3243 them on the fallthru edge. Look for abnormal call edges, find backward
3244 the call in the block and insert the instructions on the edge instead.
3245
3246 Similarly, handle instructions throwing exceptions internally.
3247
3248 Return true when instructions have been found and inserted on edges. */
3249
3250bool
3251fixup_abnormal_edges (void)
3252{
3253 bool inserted = false;
3254 basic_block bb;
3255
11cd3bed 3256 FOR_EACH_BB_FN (bb, cfun)
ba5e9aca
EB
3257 {
3258 edge e;
3259 edge_iterator ei;
3260
3261 /* Look for cases we are interested in - calls or instructions causing
3262 exceptions. */
3263 FOR_EACH_EDGE (e, ei, bb->succs)
3264 if ((e->flags & EDGE_ABNORMAL_CALL)
3265 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3266 == (EDGE_ABNORMAL | EDGE_EH)))
3267 break;
3268
3269 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3270 {
8879d71c 3271 rtx_insn *insn;
ba5e9aca
EB
3272
3273 /* Get past the new insns generated. Allow notes, as the insns
3274 may be already deleted. */
3275 insn = BB_END (bb);
3276 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3277 && !can_throw_internal (insn)
3278 && insn != BB_HEAD (bb))
3279 insn = PREV_INSN (insn);
3280
3281 if (CALL_P (insn) || can_throw_internal (insn))
3282 {
8879d71c 3283 rtx_insn *stop, *next;
ba5e9aca
EB
3284
3285 e = find_fallthru_edge (bb->succs);
3286
3287 stop = NEXT_INSN (BB_END (bb));
1130d5e3 3288 BB_END (bb) = insn;
ba5e9aca
EB
3289
3290 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3291 {
3292 next = NEXT_INSN (insn);
3293 if (INSN_P (insn))
3294 {
3295 delete_insn (insn);
3296
3297 /* Sometimes there's still the return value USE.
3298 If it's placed after a trapping call (i.e. that
3299 call is the last insn anyway), we have no fallthru
3300 edge. Simply delete this use and don't try to insert
3301 on the non-existent edge. */
3302 if (GET_CODE (PATTERN (insn)) != USE)
3303 {
3304 /* We're not deleting it, we're moving it. */
4654c0cf 3305 insn->set_undeleted ();
0f82e5c9
DM
3306 SET_PREV_INSN (insn) = NULL_RTX;
3307 SET_NEXT_INSN (insn) = NULL_RTX;
ba5e9aca
EB
3308
3309 insert_insn_on_edge (insn, e);
3310 inserted = true;
3311 }
3312 }
3313 else if (!BARRIER_P (insn))
3314 set_block_for_insn (insn, NULL);
3315 }
3316 }
3317
3318 /* It may be that we don't find any trapping insn. In this
3319 case we discovered quite late that the insn that had been
3320 marked as can_throw_internal in fact couldn't trap at all.
3321 So we should in fact delete the EH edges out of the block. */
3322 else
3323 purge_dead_edges (bb);
3324 }
3325 }
3326
3327 return inserted;
3328}
78bde837
SB
3329\f
3330/* Cut the insns from FIRST to LAST out of the insns stream. */
3331
a90c614a 3332rtx_insn *
9152e0aa 3333unlink_insn_chain (rtx_insn *first, rtx_insn *last)
78bde837 3334{
a90c614a
DM
3335 rtx_insn *prevfirst = PREV_INSN (first);
3336 rtx_insn *nextlast = NEXT_INSN (last);
78bde837 3337
0f82e5c9
DM
3338 SET_PREV_INSN (first) = NULL;
3339 SET_NEXT_INSN (last) = NULL;
78bde837 3340 if (prevfirst)
0f82e5c9 3341 SET_NEXT_INSN (prevfirst) = nextlast;
78bde837 3342 if (nextlast)
0f82e5c9 3343 SET_PREV_INSN (nextlast) = prevfirst;
78bde837
SB
3344 else
3345 set_last_insn (prevfirst);
3346 if (!prevfirst)
3347 set_first_insn (nextlast);
9152e0aa 3348 return first;
78bde837
SB
3349}
3350\f
3351/* Skip over inter-block insns occurring after BB which are typically
3352 associated with BB (e.g., barriers). If there are any such insns,
3353 we return the last one. Otherwise, we return the end of BB. */
3354
8879d71c 3355static rtx_insn *
78bde837
SB
3356skip_insns_after_block (basic_block bb)
3357{
8879d71c 3358 rtx_insn *insn, *last_insn, *next_head, *prev;
78bde837 3359
8879d71c 3360 next_head = NULL;
fefa31b5 3361 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3362 next_head = BB_HEAD (bb->next_bb);
3363
3364 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3365 {
3366 if (insn == next_head)
3367 break;
3368
3369 switch (GET_CODE (insn))
3370 {
3371 case BARRIER:
3372 last_insn = insn;
3373 continue;
3374
3375 case NOTE:
3376 switch (NOTE_KIND (insn))
3377 {
3378 case NOTE_INSN_BLOCK_END:
3379 gcc_unreachable ();
3380 continue;
3381 default:
3382 continue;
3383 break;
3384 }
3385 break;
3386
3387 case CODE_LABEL:
3388 if (NEXT_INSN (insn)
3389 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3390 {
3391 insn = NEXT_INSN (insn);
3392 last_insn = insn;
3393 continue;
3394 }
3395 break;
3396
3397 default:
3398 break;
3399 }
3400
3401 break;
3402 }
3403
3404 /* It is possible to hit contradictory sequence. For instance:
3405
3406 jump_insn
3407 NOTE_INSN_BLOCK_BEG
3408 barrier
3409
3410 Where barrier belongs to jump_insn, but the note does not. This can be
3411 created by removing the basic block originally following
3412 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3413
3414 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3415 {
3416 prev = PREV_INSN (insn);
3417 if (NOTE_P (insn))
3418 switch (NOTE_KIND (insn))
3419 {
3420 case NOTE_INSN_BLOCK_END:
3421 gcc_unreachable ();
3422 break;
3423 case NOTE_INSN_DELETED:
3424 case NOTE_INSN_DELETED_LABEL:
3425 case NOTE_INSN_DELETED_DEBUG_LABEL:
3426 continue;
3427 default:
3428 reorder_insns (insn, insn, last_insn);
3429 }
3430 }
3431
3432 return last_insn;
3433}
3434
3435/* Locate or create a label for a given basic block. */
3436
e67d1102 3437static rtx_insn *
78bde837
SB
3438label_for_bb (basic_block bb)
3439{
e67d1102 3440 rtx_insn *label = BB_HEAD (bb);
78bde837
SB
3441
3442 if (!LABEL_P (label))
3443 {
3444 if (dump_file)
3445 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3446
3447 label = block_label (bb);
3448 }
3449
3450 return label;
3451}
3452
3453/* Locate the effective beginning and end of the insn chain for each
3454 block, as defined by skip_insns_after_block above. */
3455
3456static void
3457record_effective_endpoints (void)
3458{
8879d71c 3459 rtx_insn *next_insn;
78bde837 3460 basic_block bb;
8879d71c 3461 rtx_insn *insn;
78bde837
SB
3462
3463 for (insn = get_insns ();
3464 insn
3465 && NOTE_P (insn)
3466 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3467 insn = NEXT_INSN (insn))
3468 continue;
3469 /* No basic blocks at all? */
3470 gcc_assert (insn);
3471
3472 if (PREV_INSN (insn))
3473 cfg_layout_function_header =
3474 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3475 else
9152e0aa 3476 cfg_layout_function_header = NULL;
78bde837
SB
3477
3478 next_insn = get_insns ();
11cd3bed 3479 FOR_EACH_BB_FN (bb, cfun)
78bde837 3480 {
8879d71c 3481 rtx_insn *end;
78bde837
SB
3482
3483 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
1130d5e3 3484 BB_HEADER (bb) = unlink_insn_chain (next_insn,
190bea87 3485 PREV_INSN (BB_HEAD (bb)));
78bde837
SB
3486 end = skip_insns_after_block (bb);
3487 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
d8ce2eae 3488 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
78bde837
SB
3489 next_insn = NEXT_INSN (BB_END (bb));
3490 }
3491
3492 cfg_layout_function_footer = next_insn;
3493 if (cfg_layout_function_footer)
3494 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3495}
3496\f
27a4cd48
DM
3497namespace {
3498
3499const pass_data pass_data_into_cfg_layout_mode =
3500{
3501 RTL_PASS, /* type */
3502 "into_cfglayout", /* name */
3503 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3504 TV_CFG, /* tv_id */
3505 0, /* properties_required */
3506 PROP_cfglayout, /* properties_provided */
3507 0, /* properties_destroyed */
3508 0, /* todo_flags_start */
3509 0, /* todo_flags_finish */
78bde837
SB
3510};
3511
27a4cd48
DM
3512class pass_into_cfg_layout_mode : public rtl_opt_pass
3513{
3514public:
c3284718
RS
3515 pass_into_cfg_layout_mode (gcc::context *ctxt)
3516 : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
27a4cd48
DM
3517 {}
3518
3519 /* opt_pass methods: */
be55bfe6
TS
3520 virtual unsigned int execute (function *)
3521 {
3522 cfg_layout_initialize (0);
3523 return 0;
3524 }
27a4cd48
DM
3525
3526}; // class pass_into_cfg_layout_mode
3527
3528} // anon namespace
3529
3530rtl_opt_pass *
3531make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3532{
3533 return new pass_into_cfg_layout_mode (ctxt);
3534}
3535
3536namespace {
3537
3538const pass_data pass_data_outof_cfg_layout_mode =
3539{
3540 RTL_PASS, /* type */
3541 "outof_cfglayout", /* name */
3542 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3543 TV_CFG, /* tv_id */
3544 0, /* properties_required */
3545 0, /* properties_provided */
3546 PROP_cfglayout, /* properties_destroyed */
3547 0, /* todo_flags_start */
3548 0, /* todo_flags_finish */
78bde837 3549};
27a4cd48
DM
3550
3551class pass_outof_cfg_layout_mode : public rtl_opt_pass
3552{
3553public:
c3284718
RS
3554 pass_outof_cfg_layout_mode (gcc::context *ctxt)
3555 : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
27a4cd48
DM
3556 {}
3557
3558 /* opt_pass methods: */
be55bfe6 3559 virtual unsigned int execute (function *);
27a4cd48
DM
3560
3561}; // class pass_outof_cfg_layout_mode
3562
be55bfe6
TS
3563unsigned int
3564pass_outof_cfg_layout_mode::execute (function *fun)
3565{
3566 basic_block bb;
3567
3568 FOR_EACH_BB_FN (bb, fun)
3569 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3570 bb->aux = bb->next_bb;
3571
3572 cfg_layout_finalize ();
3573
3574 return 0;
3575}
3576
27a4cd48
DM
3577} // anon namespace
3578
3579rtl_opt_pass *
3580make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3581{
3582 return new pass_outof_cfg_layout_mode (ctxt);
3583}
78bde837
SB
3584\f
3585
3586/* Link the basic blocks in the correct order, compacting the basic
3587 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3588 function also clears the basic block header and footer fields.
3589
3590 This function is usually called after a pass (e.g. tracer) finishes
3591 some transformations while in cfglayout mode. The required sequence
3592 of the basic blocks is in a linked list along the bb->aux field.
3593 This functions re-links the basic block prev_bb and next_bb pointers
532aafad
SB
3594 accordingly, and it compacts and renumbers the blocks.
3595
3596 FIXME: This currently works only for RTL, but the only RTL-specific
3597 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3598 to GIMPLE a long time ago, but it doesn't relink the basic block
3599 chain. It could do that (to give better initial RTL) if this function
3600 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
78bde837
SB
3601
3602void
3603relink_block_chain (bool stay_in_cfglayout_mode)
3604{
3605 basic_block bb, prev_bb;
3606 int index;
3607
3608 /* Maybe dump the re-ordered sequence. */
3609 if (dump_file)
3610 {
3611 fprintf (dump_file, "Reordered sequence:\n");
fefa31b5
DM
3612 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3613 NUM_FIXED_BLOCKS;
78bde837
SB
3614 bb;
3615 bb = (basic_block) bb->aux, index++)
3616 {
3617 fprintf (dump_file, " %i ", index);
3618 if (get_bb_original (bb))
3619 fprintf (dump_file, "duplicate of %i ",
3620 get_bb_original (bb)->index);
3621 else if (forwarder_block_p (bb)
3622 && !LABEL_P (BB_HEAD (bb)))
3623 fprintf (dump_file, "compensation ");
3624 else
3625 fprintf (dump_file, "bb %i ", bb->index);
3626 fprintf (dump_file, " [%i]\n", bb->frequency);
3627 }
3628 }
3629
3630 /* Now reorder the blocks. */
fefa31b5
DM
3631 prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3632 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
78bde837
SB
3633 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3634 {
3635 bb->prev_bb = prev_bb;
3636 prev_bb->next_bb = bb;
3637 }
fefa31b5
DM
3638 prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3639 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
78bde837
SB
3640
3641 /* Then, clean up the aux fields. */
04a90bec 3642 FOR_ALL_BB_FN (bb, cfun)
78bde837
SB
3643 {
3644 bb->aux = NULL;
3645 if (!stay_in_cfglayout_mode)
1130d5e3 3646 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
78bde837
SB
3647 }
3648
3649 /* Maybe reset the original copy tables, they are not valid anymore
3650 when we renumber the basic blocks in compact_blocks. If we are
3651 are going out of cfglayout mode, don't re-allocate the tables. */
3652 free_original_copy_tables ();
3653 if (stay_in_cfglayout_mode)
3654 initialize_original_copy_tables ();
3655
3656 /* Finally, put basic_block_info in the new order. */
3657 compact_blocks ();
3658}
3659\f
3660
3661/* Given a reorder chain, rearrange the code to match. */
3662
3663static void
3664fixup_reorder_chain (void)
3665{
3666 basic_block bb;
9152e0aa 3667 rtx_insn *insn = NULL;
78bde837
SB
3668
3669 if (cfg_layout_function_header)
3670 {
3671 set_first_insn (cfg_layout_function_header);
3672 insn = cfg_layout_function_header;
3673 while (NEXT_INSN (insn))
3674 insn = NEXT_INSN (insn);
3675 }
3676
3677 /* First do the bulk reordering -- rechain the blocks without regard to
3678 the needed changes to jumps and labels. */
3679
fefa31b5
DM
3680 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3681 bb->aux)
78bde837
SB
3682 {
3683 if (BB_HEADER (bb))
3684 {
3685 if (insn)
0f82e5c9 3686 SET_NEXT_INSN (insn) = BB_HEADER (bb);
78bde837
SB
3687 else
3688 set_first_insn (BB_HEADER (bb));
0f82e5c9 3689 SET_PREV_INSN (BB_HEADER (bb)) = insn;
78bde837
SB
3690 insn = BB_HEADER (bb);
3691 while (NEXT_INSN (insn))
3692 insn = NEXT_INSN (insn);
3693 }
3694 if (insn)
0f82e5c9 3695 SET_NEXT_INSN (insn) = BB_HEAD (bb);
78bde837
SB
3696 else
3697 set_first_insn (BB_HEAD (bb));
0f82e5c9 3698 SET_PREV_INSN (BB_HEAD (bb)) = insn;
78bde837
SB
3699 insn = BB_END (bb);
3700 if (BB_FOOTER (bb))
3701 {
0f82e5c9
DM
3702 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3703 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
78bde837
SB
3704 while (NEXT_INSN (insn))
3705 insn = NEXT_INSN (insn);
3706 }
3707 }
3708
0f82e5c9 3709 SET_NEXT_INSN (insn) = cfg_layout_function_footer;
78bde837 3710 if (cfg_layout_function_footer)
0f82e5c9 3711 SET_PREV_INSN (cfg_layout_function_footer) = insn;
78bde837
SB
3712
3713 while (NEXT_INSN (insn))
3714 insn = NEXT_INSN (insn);
3715
3716 set_last_insn (insn);
b2b29377
MM
3717 if (flag_checking)
3718 verify_insn_chain ();
78bde837
SB
3719
3720 /* Now add jumps and labels as needed to match the blocks new
29f3fd5b 3721 outgoing edges. */
78bde837 3722
29f3fd5b
SB
3723 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3724 bb->aux)
78bde837
SB
3725 {
3726 edge e_fall, e_taken, e;
8879d71c 3727 rtx_insn *bb_end_insn;
78bde837 3728 rtx ret_label = NULL_RTX;
3371a64f 3729 basic_block nb;
78bde837
SB
3730 edge_iterator ei;
3731
3732 if (EDGE_COUNT (bb->succs) == 0)
3733 continue;
3734
3735 /* Find the old fallthru edge, and another non-EH edge for
3736 a taken jump. */
3737 e_taken = e_fall = NULL;
3738
3739 FOR_EACH_EDGE (e, ei, bb->succs)
3740 if (e->flags & EDGE_FALLTHRU)
3741 e_fall = e;
3742 else if (! (e->flags & EDGE_EH))
3743 e_taken = e;
3744
3745 bb_end_insn = BB_END (bb);
1476d1bd 3746 if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
78bde837 3747 {
1476d1bd
MM
3748 ret_label = JUMP_LABEL (bb_end_jump);
3749 if (any_condjump_p (bb_end_jump))
78bde837
SB
3750 {
3751 /* This might happen if the conditional jump has side
3752 effects and could therefore not be optimized away.
3753 Make the basic block to end with a barrier in order
3754 to prevent rtl_verify_flow_info from complaining. */
3755 if (!e_fall)
3756 {
1476d1bd
MM
3757 gcc_assert (!onlyjump_p (bb_end_jump)
3758 || returnjump_p (bb_end_jump)
91af97c3 3759 || (e_taken->flags & EDGE_CROSSING));
1476d1bd 3760 emit_barrier_after (bb_end_jump);
78bde837
SB
3761 continue;
3762 }
3763
3764 /* If the old fallthru is still next, nothing to do. */
3765 if (bb->aux == e_fall->dest
fefa31b5 3766 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3767 continue;
3768
3769 /* The degenerated case of conditional jump jumping to the next
3770 instruction can happen for jumps with side effects. We need
3771 to construct a forwarder block and this will be done just
3772 fine by force_nonfallthru below. */
3773 if (!e_taken)
3774 ;
3775
3776 /* There is another special case: if *neither* block is next,
3777 such as happens at the very end of a function, then we'll
3778 need to add a new unconditional jump. Choose the taken
3779 edge based on known or assumed probability. */
3780 else if (bb->aux != e_taken->dest)
3781 {
1476d1bd 3782 rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
78bde837
SB
3783
3784 if (note
e5af9ddd 3785 && XINT (note, 0) < REG_BR_PROB_BASE / 2
1476d1bd 3786 && invert_jump (bb_end_jump,
fefa31b5
DM
3787 (e_fall->dest
3788 == EXIT_BLOCK_PTR_FOR_FN (cfun)
78bde837
SB
3789 ? NULL_RTX
3790 : label_for_bb (e_fall->dest)), 0))
3791 {
3792 e_fall->flags &= ~EDGE_FALLTHRU;
3793 gcc_checking_assert (could_fall_through
3794 (e_taken->src, e_taken->dest));
3795 e_taken->flags |= EDGE_FALLTHRU;
3796 update_br_prob_note (bb);
3797 e = e_fall, e_fall = e_taken, e_taken = e;
3798 }
3799 }
3800
3801 /* If the "jumping" edge is a crossing edge, and the fall
3802 through edge is non-crossing, leave things as they are. */
3803 else if ((e_taken->flags & EDGE_CROSSING)
3804 && !(e_fall->flags & EDGE_CROSSING))
3805 continue;
3806
3807 /* Otherwise we can try to invert the jump. This will
3808 basically never fail, however, keep up the pretense. */
1476d1bd 3809 else if (invert_jump (bb_end_jump,
fefa31b5
DM
3810 (e_fall->dest
3811 == EXIT_BLOCK_PTR_FOR_FN (cfun)
78bde837
SB
3812 ? NULL_RTX
3813 : label_for_bb (e_fall->dest)), 0))
3814 {
3815 e_fall->flags &= ~EDGE_FALLTHRU;
3816 gcc_checking_assert (could_fall_through
3817 (e_taken->src, e_taken->dest));
3818 e_taken->flags |= EDGE_FALLTHRU;
3819 update_br_prob_note (bb);
3820 if (LABEL_NUSES (ret_label) == 0
3821 && single_pred_p (e_taken->dest))
3822 delete_insn (ret_label);
3823 continue;
3824 }
3825 }
3826 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3827 {
3828 /* If the old fallthru is still next or if
3829 asm goto doesn't have a fallthru (e.g. when followed by
3830 __builtin_unreachable ()), nothing to do. */
3831 if (! e_fall
3832 || bb->aux == e_fall->dest
fefa31b5 3833 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3834 continue;
3835
3836 /* Otherwise we'll have to use the fallthru fixup below. */
3837 }
3838 else
3839 {
3840 /* Otherwise we have some return, switch or computed
3841 jump. In the 99% case, there should not have been a
3842 fallthru edge. */
3843 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3844 continue;
3845 }
3846 }
3847 else
3848 {
3849 /* No fallthru implies a noreturn function with EH edges, or
3850 something similarly bizarre. In any case, we don't need to
3851 do anything. */
3852 if (! e_fall)
3853 continue;
3854
3855 /* If the fallthru block is still next, nothing to do. */
3856 if (bb->aux == e_fall->dest)
3857 continue;
3858
3859 /* A fallthru to exit block. */
fefa31b5 3860 if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3861 continue;
3862 }
3863
3864 /* We got here if we need to add a new jump insn.
3865 Note force_nonfallthru can delete E_FALL and thus we have to
3866 save E_FALL->src prior to the call to force_nonfallthru. */
78bde837
SB
3867 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3868 if (nb)
3869 {
3870 nb->aux = bb->aux;
3871 bb->aux = nb;
3872 /* Don't process this new block. */
3873 bb = nb;
78bde837
SB
3874 }
3875 }
3876
3877 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3878
29f3fd5b 3879 /* Annoying special case - jump around dead jumptables left in the code. */
11cd3bed 3880 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
3881 {
3882 edge e = find_fallthru_edge (bb->succs);
3883
29f3fd5b
SB
3884 if (e && !can_fallthru (e->src, e->dest))
3885 force_nonfallthru (e);
78bde837
SB
3886 }
3887
3888 /* Ensure goto_locus from edges has some instructions with that locus
3889 in RTL. */
3890 if (!optimize)
11cd3bed 3891 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
3892 {
3893 edge e;
3894 edge_iterator ei;
3895
3896 FOR_EACH_EDGE (e, ei, bb->succs)
2f13f2de 3897 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
5368224f 3898 && !(e->flags & EDGE_ABNORMAL))
78bde837
SB
3899 {
3900 edge e2;
3901 edge_iterator ei2;
3902 basic_block dest, nb;
8879d71c 3903 rtx_insn *end;
78bde837
SB
3904
3905 insn = BB_END (e->src);
3906 end = PREV_INSN (BB_HEAD (e->src));
3907 while (insn != end
5368224f 3908 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
78bde837
SB
3909 insn = PREV_INSN (insn);
3910 if (insn != end
5368224f 3911 && INSN_LOCATION (insn) == e->goto_locus)
78bde837
SB
3912 continue;
3913 if (simplejump_p (BB_END (e->src))
5368224f 3914 && !INSN_HAS_LOCATION (BB_END (e->src)))
78bde837 3915 {
5368224f 3916 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
78bde837
SB
3917 continue;
3918 }
3919 dest = e->dest;
fefa31b5 3920 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3921 {
3922 /* Non-fallthru edges to the exit block cannot be split. */
3923 if (!(e->flags & EDGE_FALLTHRU))
3924 continue;
3925 }
3926 else
3927 {
3928 insn = BB_HEAD (dest);
3929 end = NEXT_INSN (BB_END (dest));
3930 while (insn != end && !NONDEBUG_INSN_P (insn))
3931 insn = NEXT_INSN (insn);
5368224f
DC
3932 if (insn != end && INSN_HAS_LOCATION (insn)
3933 && INSN_LOCATION (insn) == e->goto_locus)
78bde837
SB
3934 continue;
3935 }
3936 nb = split_edge (e);
3937 if (!INSN_P (BB_END (nb)))
1130d5e3 3938 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
190bea87 3939 nb);
5368224f 3940 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
78bde837
SB
3941
3942 /* If there are other incoming edges to the destination block
3943 with the same goto locus, redirect them to the new block as
3944 well, this can prevent other such blocks from being created
3945 in subsequent iterations of the loop. */
3946 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
2f13f2de 3947 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
78bde837 3948 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
5368224f 3949 && e->goto_locus == e2->goto_locus)
78bde837
SB
3950 redirect_edge_and_branch (e2, nb);
3951 else
3952 ei_next (&ei2);
3953 }
3954 }
3955}
3956\f
3957/* Perform sanity checks on the insn chain.
3958 1. Check that next/prev pointers are consistent in both the forward and
3959 reverse direction.
3960 2. Count insns in chain, going both directions, and check if equal.
3961 3. Check that get_last_insn () returns the actual end of chain. */
3962
3963DEBUG_FUNCTION void
3964verify_insn_chain (void)
3965{
8879d71c 3966 rtx_insn *x, *prevx, *nextx;
78bde837
SB
3967 int insn_cnt1, insn_cnt2;
3968
3969 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3970 x != 0;
3971 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
3972 gcc_assert (PREV_INSN (x) == prevx);
3973
3974 gcc_assert (prevx == get_last_insn ());
3975
3976 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
3977 x != 0;
3978 nextx = x, insn_cnt2++, x = PREV_INSN (x))
3979 gcc_assert (NEXT_INSN (x) == nextx);
3980
3981 gcc_assert (insn_cnt1 == insn_cnt2);
3982}
3983\f
3984/* If we have assembler epilogues, the block falling through to exit must
3985 be the last one in the reordered chain when we reach final. Ensure
3986 that this condition is met. */
3987static void
3988fixup_fallthru_exit_predecessor (void)
3989{
3990 edge e;
3991 basic_block bb = NULL;
3992
3993 /* This transformation is not valid before reload, because we might
3994 separate a call from the instruction that copies the return
3995 value. */
3996 gcc_assert (reload_completed);
3997
fefa31b5 3998 e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
78bde837
SB
3999 if (e)
4000 bb = e->src;
4001
4002 if (bb && bb->aux)
4003 {
fefa31b5 4004 basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
78bde837
SB
4005
4006 /* If the very first block is the one with the fall-through exit
4007 edge, we have to split that block. */
4008 if (c == bb)
4009 {
c4d281b2 4010 bb = split_block_after_labels (bb)->dest;
78bde837
SB
4011 bb->aux = c->aux;
4012 c->aux = bb;
d8ce2eae
DM
4013 BB_FOOTER (bb) = BB_FOOTER (c);
4014 BB_FOOTER (c) = NULL;
78bde837
SB
4015 }
4016
4017 while (c->aux != bb)
4018 c = (basic_block) c->aux;
4019
4020 c->aux = bb->aux;
4021 while (c->aux)
4022 c = (basic_block) c->aux;
4023
4024 c->aux = bb;
4025 bb->aux = NULL;
4026 }
4027}
4028
4029/* In case there are more than one fallthru predecessors of exit, force that
4030 there is only one. */
4031
4032static void
4033force_one_exit_fallthru (void)
4034{
4035 edge e, predecessor = NULL;
4036 bool more = false;
4037 edge_iterator ei;
4038 basic_block forwarder, bb;
4039
fefa31b5 4040 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
78bde837
SB
4041 if (e->flags & EDGE_FALLTHRU)
4042 {
4043 if (predecessor == NULL)
4044 predecessor = e;
4045 else
4046 {
4047 more = true;
4048 break;
4049 }
4050 }
4051
4052 if (!more)
4053 return;
4054
4055 /* Exit has several fallthru predecessors. Create a forwarder block for
4056 them. */
4057 forwarder = split_edge (predecessor);
fefa31b5
DM
4058 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4059 (e = ei_safe_edge (ei)); )
78bde837
SB
4060 {
4061 if (e->src == forwarder
4062 || !(e->flags & EDGE_FALLTHRU))
4063 ei_next (&ei);
4064 else
4065 redirect_edge_and_branch_force (e, forwarder);
4066 }
4067
4068 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4069 exit block. */
11cd3bed 4070 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
4071 {
4072 if (bb->aux == NULL && bb != forwarder)
4073 {
4074 bb->aux = forwarder;
4075 break;
4076 }
4077 }
4078}
4079\f
4080/* Return true in case it is possible to duplicate the basic block BB. */
4081
4082static bool
4083cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4084{
4085 /* Do not attempt to duplicate tablejumps, as we need to unshare
4086 the dispatch table. This is difficult to do, as the instructions
4087 computing jump destination may be hoisted outside the basic block. */
4088 if (tablejump_p (BB_END (bb), NULL, NULL))
4089 return false;
4090
4091 /* Do not duplicate blocks containing insns that can't be copied. */
4092 if (targetm.cannot_copy_insn_p)
4093 {
8879d71c 4094 rtx_insn *insn = BB_HEAD (bb);
78bde837
SB
4095 while (1)
4096 {
4097 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4098 return false;
4099 if (insn == BB_END (bb))
4100 break;
4101 insn = NEXT_INSN (insn);
4102 }
4103 }
4104
4105 return true;
4106}
4107
a90c614a 4108rtx_insn *
5d34b050 4109duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
78bde837 4110{
5d34b050 4111 rtx_insn *insn, *next, *copy;
66e8df53 4112 rtx_note *last;
78bde837
SB
4113
4114 /* Avoid updating of boundaries of previous basic block. The
4115 note will get removed from insn stream in fixup. */
4116 last = emit_note (NOTE_INSN_DELETED);
4117
4118 /* Create copy at the end of INSN chain. The chain will
4119 be reordered later. */
4120 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4121 {
4122 switch (GET_CODE (insn))
4123 {
4124 case DEBUG_INSN:
4125 /* Don't duplicate label debug insns. */
4126 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4127 break;
4128 /* FALLTHRU */
4129 case INSN:
4130 case CALL_INSN:
4131 case JUMP_INSN:
78bde837
SB
4132 copy = emit_copy_of_insn_after (insn, get_last_insn ());
4133 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4134 && ANY_RETURN_P (JUMP_LABEL (insn)))
4135 JUMP_LABEL (copy) = JUMP_LABEL (insn);
4136 maybe_copy_prologue_epilogue_insn (insn, copy);
4137 break;
4138
39718607
SB
4139 case JUMP_TABLE_DATA:
4140 /* Avoid copying of dispatch tables. We never duplicate
4141 tablejumps, so this can hit only in case the table got
4142 moved far from original jump.
4143 Avoid copying following barrier as well if any
4144 (and debug insns in between). */
4145 for (next = NEXT_INSN (insn);
4146 next != NEXT_INSN (to);
4147 next = NEXT_INSN (next))
4148 if (!DEBUG_INSN_P (next))
4149 break;
4150 if (next != NEXT_INSN (to) && BARRIER_P (next))
4151 insn = next;
4152 break;
4153
78bde837
SB
4154 case CODE_LABEL:
4155 break;
4156
4157 case BARRIER:
4158 emit_barrier ();
4159 break;
4160
4161 case NOTE:
4162 switch (NOTE_KIND (insn))
4163 {
4164 /* In case prologue is empty and function contain label
4165 in first BB, we may want to copy the block. */
4166 case NOTE_INSN_PROLOGUE_END:
4167
4168 case NOTE_INSN_DELETED:
4169 case NOTE_INSN_DELETED_LABEL:
4170 case NOTE_INSN_DELETED_DEBUG_LABEL:
4171 /* No problem to strip these. */
4172 case NOTE_INSN_FUNCTION_BEG:
4173 /* There is always just single entry to function. */
4174 case NOTE_INSN_BASIC_BLOCK:
3371a64f
TJ
4175 /* We should only switch text sections once. */
4176 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
78bde837
SB
4177 break;
4178
4179 case NOTE_INSN_EPILOGUE_BEG:
d33606c3 4180 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
66e8df53 4181 emit_note_copy (as_a <rtx_note *> (insn));
78bde837
SB
4182 break;
4183
4184 default:
4185 /* All other notes should have already been eliminated. */
4186 gcc_unreachable ();
4187 }
4188 break;
4189 default:
4190 gcc_unreachable ();
4191 }
4192 }
4193 insn = NEXT_INSN (last);
4194 delete_insn (last);
5d34b050 4195 return insn;
78bde837
SB
4196}
4197
4198/* Create a duplicate of the basic block BB. */
4199
4200static basic_block
4201cfg_layout_duplicate_bb (basic_block bb)
4202{
8879d71c 4203 rtx_insn *insn;
78bde837
SB
4204 basic_block new_bb;
4205
4206 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4207 new_bb = create_basic_block (insn,
4208 insn ? get_last_insn () : NULL,
fefa31b5 4209 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
78bde837
SB
4210
4211 BB_COPY_PARTITION (new_bb, bb);
4212 if (BB_HEADER (bb))
4213 {
4214 insn = BB_HEADER (bb);
4215 while (NEXT_INSN (insn))
4216 insn = NEXT_INSN (insn);
4217 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4218 if (insn)
1130d5e3 4219 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
78bde837
SB
4220 }
4221
4222 if (BB_FOOTER (bb))
4223 {
4224 insn = BB_FOOTER (bb);
4225 while (NEXT_INSN (insn))
4226 insn = NEXT_INSN (insn);
4227 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4228 if (insn)
d8ce2eae 4229 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
78bde837
SB
4230 }
4231
4232 return new_bb;
4233}
4234
4235\f
4236/* Main entry point to this module - initialize the datastructures for
4237 CFG layout changes. It keeps LOOPS up-to-date if not null.
4238
4239 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4240
4241void
4242cfg_layout_initialize (unsigned int flags)
4243{
b5241a5a 4244 rtx_insn_list *x;
78bde837
SB
4245 basic_block bb;
4246
8a9e6b45
BS
4247 /* Once bb partitioning is complete, cfg layout mode should not be
4248 re-entered. Entering cfg layout mode may require fixups. As an
4249 example, if edge forwarding performed when optimizing the cfg
4250 layout required moving a block from the hot to the cold
4251 section. This would create an illegal partitioning unless some
4252 manual fixup was performed. */
4253 gcc_assert (!(crtl->bb_reorder_complete
4254 && flag_reorder_blocks_and_partition));
4ca19309 4255
78bde837
SB
4256 initialize_original_copy_tables ();
4257
4258 cfg_layout_rtl_register_cfg_hooks ();
4259
4260 record_effective_endpoints ();
4261
4262 /* Make sure that the targets of non local gotos are marked. */
2382940b 4263 for (x = nonlocal_goto_handler_labels; x; x = x->next ())
78bde837 4264 {
b5241a5a 4265 bb = BLOCK_FOR_INSN (x->insn ());
78bde837
SB
4266 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4267 }
4268
4269 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4270}
4271
4272/* Splits superblocks. */
4273void
4274break_superblocks (void)
4275{
78bde837
SB
4276 bool need = false;
4277 basic_block bb;
4278
7ba9e72d 4279 auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
f61e445a 4280 bitmap_clear (superblocks);
78bde837 4281
11cd3bed 4282 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
4283 if (bb->flags & BB_SUPERBLOCK)
4284 {
4285 bb->flags &= ~BB_SUPERBLOCK;
d7c028c0 4286 bitmap_set_bit (superblocks, bb->index);
78bde837
SB
4287 need = true;
4288 }
4289
4290 if (need)
4291 {
4292 rebuild_jump_labels (get_insns ());
4293 find_many_sub_basic_blocks (superblocks);
4294 }
78bde837
SB
4295}
4296
4297/* Finalize the changes: reorder insn list according to the sequence specified
29f3fd5b 4298 by aux pointers, enter compensation code, rebuild scope forest. */
78bde837
SB
4299
4300void
4301cfg_layout_finalize (void)
4302{
b2b29377 4303 checking_verify_flow_info ();
63da5ff6 4304 free_dominance_info (CDI_DOMINATORS);
78bde837
SB
4305 force_one_exit_fallthru ();
4306 rtl_register_cfg_hooks ();
e86a9946 4307 if (reload_completed && !targetm.have_epilogue ())
78bde837
SB
4308 fixup_fallthru_exit_predecessor ();
4309 fixup_reorder_chain ();
4310
29f3fd5b
SB
4311 rebuild_jump_labels (get_insns ());
4312 delete_dead_jumptables ();
4313
b2b29377
MM
4314 if (flag_checking)
4315 verify_insn_chain ();
4316 checking_verify_flow_info ();
78bde837
SB
4317}
4318
ba5e9aca 4319
9ee634e3 4320/* Same as split_block but update cfg_layout structures. */
f470c378
ZD
4321
4322static basic_block
d329e058 4323cfg_layout_split_block (basic_block bb, void *insnp)
9ee634e3 4324{
ae50c0cb 4325 rtx insn = (rtx) insnp;
f470c378 4326 basic_block new_bb = rtl_split_block (bb, insn);
9ee634e3 4327
d8ce2eae
DM
4328 BB_FOOTER (new_bb) = BB_FOOTER (bb);
4329 BB_FOOTER (bb) = NULL;
9ee634e3 4330
f470c378 4331 return new_bb;
9ee634e3
JH
4332}
4333
9ee634e3 4334/* Redirect Edge to DEST. */
6de9cd9a 4335static edge
d329e058 4336cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
9ee634e3
JH
4337{
4338 basic_block src = e->src;
6de9cd9a 4339 edge ret;
9ee634e3 4340
bc35512f 4341 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 4342 return NULL;
bc35512f 4343
3348b696 4344 if (e->dest == dest)
6de9cd9a 4345 return e;
bc35512f 4346
fefa31b5 4347 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
6de9cd9a 4348 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
f345f21a 4349 {
6fb5fa3c 4350 df_set_bb_dirty (src);
6de9cd9a 4351 return ret;
f345f21a 4352 }
bc35512f 4353
fefa31b5 4354 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
bc35512f
JH
4355 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4356 {
c263766c
RH
4357 if (dump_file)
4358 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
bc35512f
JH
4359 e->src->index, dest->index);
4360
6fb5fa3c 4361 df_set_bb_dirty (e->src);
bc35512f 4362 redirect_edge_succ (e, dest);
6de9cd9a 4363 return e;
bc35512f
JH
4364 }
4365
9ee634e3
JH
4366 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4367 in the case the basic block appears to be in sequence. Avoid this
4368 transformation. */
4369
9ee634e3
JH
4370 if (e->flags & EDGE_FALLTHRU)
4371 {
4372 /* Redirect any branch edges unified with the fallthru one. */
4b4bf941 4373 if (JUMP_P (BB_END (src))
432f982f
JH
4374 && label_is_jump_target_p (BB_HEAD (e->dest),
4375 BB_END (src)))
9ee634e3 4376 {
341c100f 4377 edge redirected;
c22cacf3 4378
c263766c
RH
4379 if (dump_file)
4380 fprintf (dump_file, "Fallthru edge unified with branch "
432f982f
JH
4381 "%i->%i redirected to %i\n",
4382 e->src->index, e->dest->index, dest->index);
4383 e->flags &= ~EDGE_FALLTHRU;
341c100f
NS
4384 redirected = redirect_branch_edge (e, dest);
4385 gcc_assert (redirected);
0c617be4
JL
4386 redirected->flags |= EDGE_FALLTHRU;
4387 df_set_bb_dirty (redirected->src);
4388 return redirected;
9ee634e3
JH
4389 }
4390 /* In case we are redirecting fallthru edge to the branch edge
c22cacf3 4391 of conditional jump, remove it. */
628f6a4e 4392 if (EDGE_COUNT (src->succs) == 2)
9ee634e3 4393 {
03101c6f
KH
4394 /* Find the edge that is different from E. */
4395 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
628f6a4e 4396
9ee634e3 4397 if (s->dest == dest
a813c111
SB
4398 && any_condjump_p (BB_END (src))
4399 && onlyjump_p (BB_END (src)))
4400 delete_insn (BB_END (src));
9ee634e3 4401 }
c263766c 4402 if (dump_file)
dc764d10 4403 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
bc35512f 4404 e->src->index, e->dest->index, dest->index);
0c617be4 4405 ret = redirect_edge_succ_nodup (e, dest);
9ee634e3
JH
4406 }
4407 else
bc35512f 4408 ret = redirect_branch_edge (e, dest);
9ee634e3
JH
4409
4410 /* We don't want simplejumps in the insn stream during cfglayout. */
341c100f 4411 gcc_assert (!simplejump_p (BB_END (src)));
9ee634e3 4412
6fb5fa3c 4413 df_set_bb_dirty (src);
9ee634e3
JH
4414 return ret;
4415}
4416
4417/* Simple wrapper as we always can redirect fallthru edges. */
4418static basic_block
d329e058 4419cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
9ee634e3 4420{
341c100f
NS
4421 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4422
4423 gcc_assert (redirected);
9ee634e3
JH
4424 return NULL;
4425}
4426
f470c378
ZD
4427/* Same as delete_basic_block but update cfg_layout structures. */
4428
9ee634e3 4429static void
d329e058 4430cfg_layout_delete_block (basic_block bb)
9ee634e3 4431{
8879d71c 4432 rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
1130d5e3 4433 rtx_insn **to;
9ee634e3 4434
bcc708fc 4435 if (BB_HEADER (bb))
9ee634e3 4436 {
a813c111 4437 next = BB_HEAD (bb);
9ee634e3 4438 if (prev)
0f82e5c9 4439 SET_NEXT_INSN (prev) = BB_HEADER (bb);
9ee634e3 4440 else
bcc708fc 4441 set_first_insn (BB_HEADER (bb));
0f82e5c9 4442 SET_PREV_INSN (BB_HEADER (bb)) = prev;
bcc708fc 4443 insn = BB_HEADER (bb);
9ee634e3
JH
4444 while (NEXT_INSN (insn))
4445 insn = NEXT_INSN (insn);
0f82e5c9
DM
4446 SET_NEXT_INSN (insn) = next;
4447 SET_PREV_INSN (next) = insn;
9ee634e3 4448 }
a813c111 4449 next = NEXT_INSN (BB_END (bb));
bcc708fc 4450 if (BB_FOOTER (bb))
9ee634e3 4451 {
bcc708fc 4452 insn = BB_FOOTER (bb);
bc35512f
JH
4453 while (insn)
4454 {
4b4bf941 4455 if (BARRIER_P (insn))
bc35512f
JH
4456 {
4457 if (PREV_INSN (insn))
0f82e5c9 4458 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
bc35512f 4459 else
d8ce2eae 4460 BB_FOOTER (bb) = NEXT_INSN (insn);
bc35512f 4461 if (NEXT_INSN (insn))
0f82e5c9 4462 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
bc35512f 4463 }
4b4bf941 4464 if (LABEL_P (insn))
bc35512f
JH
4465 break;
4466 insn = NEXT_INSN (insn);
4467 }
bcc708fc 4468 if (BB_FOOTER (bb))
bc35512f 4469 {
a813c111 4470 insn = BB_END (bb);
0f82e5c9
DM
4471 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4472 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
bc35512f
JH
4473 while (NEXT_INSN (insn))
4474 insn = NEXT_INSN (insn);
0f82e5c9 4475 SET_NEXT_INSN (insn) = next;
bc35512f 4476 if (next)
0f82e5c9 4477 SET_PREV_INSN (next) = insn;
bc35512f
JH
4478 else
4479 set_last_insn (insn);
4480 }
9ee634e3 4481 }
fefa31b5 4482 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1130d5e3 4483 to = &BB_HEADER (bb->next_bb);
9ee634e3
JH
4484 else
4485 to = &cfg_layout_function_footer;
997de8ed 4486
9ee634e3
JH
4487 rtl_delete_block (bb);
4488
4489 if (prev)
4490 prev = NEXT_INSN (prev);
d329e058 4491 else
9ee634e3
JH
4492 prev = get_insns ();
4493 if (next)
4494 next = PREV_INSN (next);
d329e058 4495 else
9ee634e3
JH
4496 next = get_last_insn ();
4497
4498 if (next && NEXT_INSN (next) != prev)
4499 {
4500 remaints = unlink_insn_chain (prev, next);
4501 insn = remaints;
4502 while (NEXT_INSN (insn))
4503 insn = NEXT_INSN (insn);
0f82e5c9 4504 SET_NEXT_INSN (insn) = *to;
9ee634e3 4505 if (*to)
0f82e5c9 4506 SET_PREV_INSN (*to) = insn;
9ee634e3
JH
4507 *to = remaints;
4508 }
4509}
4510
beb235f8 4511/* Return true when blocks A and B can be safely merged. */
b48d0358 4512
bc35512f 4513static bool
b48d0358 4514cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
bc35512f 4515{
750054a2
CT
4516 /* If we are partitioning hot/cold basic blocks, we don't want to
4517 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
4518 and cold sections.
4519
8e8d5162 4520 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
4521 be optimizable (or blocks that appear to be mergeable), but which really
4522 must be left untouched (they are required to make it safely across
4523 partition boundaries). See the comments at the top of
8e8d5162
CT
4524 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4525
87c8b4be 4526 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 4527 return false;
750054a2 4528
7d776ee2
RG
4529 /* Protect the loop latches. */
4530 if (current_loops && b->loop_father->latch == b)
4531 return false;
4532
894a84b5
BS
4533 /* If we would end up moving B's instructions, make sure it doesn't fall
4534 through into the exit block, since we cannot recover from a fallthrough
4535 edge into the exit block occurring in the middle of a function. */
4536 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4537 {
4538 edge e = find_fallthru_edge (b->succs);
fefa31b5 4539 if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
894a84b5
BS
4540 return false;
4541 }
4542
bc35512f 4543 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
4544 return (single_succ_p (a)
4545 && single_succ (a) == b
4546 && single_pred_p (b) == 1
628f6a4e 4547 && a != b
bc35512f 4548 /* Must be simple edge. */
c5cbcccf 4549 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
fefa31b5
DM
4550 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4551 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
49ea3702
SB
4552 /* If the jump insn has side effects, we can't kill the edge.
4553 When not optimizing, try_redirect_by_replacing_jump will
4554 not allow us to redirect an edge by replacing a table jump. */
4b4bf941 4555 && (!JUMP_P (BB_END (a))
49ea3702 4556 || ((!optimize || reload_completed)
a813c111 4557 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f
JH
4558}
4559
41806d92
NS
4560/* Merge block A and B. The blocks must be mergeable. */
4561
bc35512f
JH
4562static void
4563cfg_layout_merge_blocks (basic_block a, basic_block b)
4564{
50a36e42 4565 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
8879d71c 4566 rtx_insn *insn;
50a36e42 4567
77a74ed7 4568 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
bc35512f 4569
6fb5fa3c 4570 if (dump_file)
50a36e42
EB
4571 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4572 a->index);
6fb5fa3c 4573
bc35512f 4574 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 4575 if (LABEL_P (BB_HEAD (b)))
2c97f8e4 4576 {
2c97f8e4
RH
4577 delete_insn (BB_HEAD (b));
4578 }
bc35512f
JH
4579
4580 /* We should have fallthru edge in a, or we can do dummy redirection to get
4581 it cleaned up. */
4b4bf941 4582 if (JUMP_P (BB_END (a)))
628f6a4e 4583 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
341c100f 4584 gcc_assert (!JUMP_P (BB_END (a)));
bc35512f 4585
8a829274 4586 /* When not optimizing and the edge is the only place in RTL which holds
7241571e 4587 some unique locus, emit a nop with that locus in between. */
9be94227
EB
4588 if (!optimize)
4589 emit_nop_for_unique_locus_between (a, b);
7241571e 4590
4c8af858
SB
4591 /* Move things from b->footer after a->footer. */
4592 if (BB_FOOTER (b))
bc35512f 4593 {
4c8af858 4594 if (!BB_FOOTER (a))
d8ce2eae 4595 BB_FOOTER (a) = BB_FOOTER (b);
4c8af858
SB
4596 else
4597 {
8879d71c 4598 rtx_insn *last = BB_FOOTER (a);
4c8af858
SB
4599
4600 while (NEXT_INSN (last))
4601 last = NEXT_INSN (last);
0f82e5c9
DM
4602 SET_NEXT_INSN (last) = BB_FOOTER (b);
4603 SET_PREV_INSN (BB_FOOTER (b)) = last;
4c8af858 4604 }
d8ce2eae 4605 BB_FOOTER (b) = NULL;
4c8af858
SB
4606 }
4607
4608 /* Move things from b->header before a->footer.
4609 Note that this may include dead tablejump data, but we don't clean
4610 those up until we go out of cfglayout mode. */
4611 if (BB_HEADER (b))
4612 {
4613 if (! BB_FOOTER (a))
d8ce2eae 4614 BB_FOOTER (a) = BB_HEADER (b);
4c8af858
SB
4615 else
4616 {
8879d71c 4617 rtx_insn *last = BB_HEADER (b);
4c8af858
SB
4618
4619 while (NEXT_INSN (last))
4620 last = NEXT_INSN (last);
0f82e5c9
DM
4621 SET_NEXT_INSN (last) = BB_FOOTER (a);
4622 SET_PREV_INSN (BB_FOOTER (a)) = last;
d8ce2eae 4623 BB_FOOTER (a) = BB_HEADER (b);
4c8af858 4624 }
1130d5e3 4625 BB_HEADER (b) = NULL;
bc35512f
JH
4626 }
4627
4628 /* In the case basic blocks are not adjacent, move them around. */
a813c111 4629 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
bc35512f 4630 {
f9df6f16 4631 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
6c3d0e31 4632
f9df6f16 4633 emit_insn_after_noloc (insn, BB_END (a), a);
bc35512f
JH
4634 }
4635 /* Otherwise just re-associate the instructions. */
4636 else
4637 {
a813c111 4638 insn = BB_HEAD (b);
1130d5e3 4639 BB_END (a) = BB_END (b);
bc35512f
JH
4640 }
4641
f9df6f16
JJ
4642 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4643 We need to explicitly call. */
4644 update_bb_for_insn_chain (insn, BB_END (b), a);
4645
4646 /* Skip possible DELETED_LABEL insn. */
4647 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4648 insn = NEXT_INSN (insn);
4649 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
1130d5e3 4650 BB_HEAD (b) = BB_END (b) = NULL;
f9df6f16
JJ
4651 delete_insn (insn);
4652
6fb5fa3c
DB
4653 df_bb_delete (b->index);
4654
50a36e42 4655 /* If B was a forwarder block, propagate the locus on the edge. */
2f13f2de 4656 if (forwarder_p
fbc68f2a 4657 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
50a36e42
EB
4658 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4659
c263766c 4660 if (dump_file)
50a36e42 4661 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
bc35512f
JH
4662}
4663
4664/* Split edge E. */
f470c378 4665
bc35512f
JH
4666static basic_block
4667cfg_layout_split_edge (edge e)
4668{
bc35512f 4669 basic_block new_bb =
fefa31b5 4670 create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
a813c111 4671 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
bc35512f
JH
4672 NULL_RTX, e->src);
4673
fefa31b5 4674 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3e7eb734
JJ
4675 BB_COPY_PARTITION (new_bb, e->src);
4676 else
4677 BB_COPY_PARTITION (new_bb, e->dest);
a9b2ee88 4678 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
bc35512f
JH
4679 redirect_edge_and_branch_force (e, new_bb);
4680
4681 return new_bb;
4682}
4683
f470c378
ZD
4684/* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4685
4686static void
4687rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4688{
4689}
4690
df92c640
SB
4691/* Return true if BB contains only labels or non-executable
4692 instructions. */
4693
4694static bool
4695rtl_block_empty_p (basic_block bb)
4696{
8879d71c 4697 rtx_insn *insn;
df92c640 4698
fefa31b5
DM
4699 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4700 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
df92c640
SB
4701 return true;
4702
4703 FOR_BB_INSNS (bb, insn)
4704 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4705 return false;
4706
4707 return true;
4708}
4709
4710/* Split a basic block if it ends with a conditional branch and if
4711 the other part of the block is not empty. */
4712
4713static basic_block
4714rtl_split_block_before_cond_jump (basic_block bb)
4715{
8879d71c
DM
4716 rtx_insn *insn;
4717 rtx_insn *split_point = NULL;
4718 rtx_insn *last = NULL;
df92c640
SB
4719 bool found_code = false;
4720
4721 FOR_BB_INSNS (bb, insn)
4722 {
4723 if (any_condjump_p (insn))
4724 split_point = last;
4725 else if (NONDEBUG_INSN_P (insn))
4726 found_code = true;
4727 last = insn;
4728 }
4729
4730 /* Did not find everything. */
4731 if (found_code && split_point)
4732 return split_block (bb, split_point)->dest;
4733 else
4734 return NULL;
4735}
4736
6de9cd9a
DN
4737/* Return 1 if BB ends with a call, possibly followed by some
4738 instructions that must stay with the call, 0 otherwise. */
4739
4740static bool
b48d0358 4741rtl_block_ends_with_call_p (basic_block bb)
6de9cd9a 4742{
8879d71c 4743 rtx_insn *insn = BB_END (bb);
6de9cd9a 4744
4b4bf941 4745 while (!CALL_P (insn)
6de9cd9a 4746 && insn != BB_HEAD (bb)
92ddef69 4747 && (keep_with_call_p (insn)
b5b8b0ac
AO
4748 || NOTE_P (insn)
4749 || DEBUG_INSN_P (insn)))
6de9cd9a 4750 insn = PREV_INSN (insn);
4b4bf941 4751 return (CALL_P (insn));
6de9cd9a
DN
4752}
4753
4754/* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4755
4756static bool
9678086d 4757rtl_block_ends_with_condjump_p (const_basic_block bb)
6de9cd9a
DN
4758{
4759 return any_condjump_p (BB_END (bb));
4760}
4761
4762/* Return true if we need to add fake edge to exit.
4763 Helper function for rtl_flow_call_edges_add. */
4764
4765static bool
8879d71c 4766need_fake_edge_p (const rtx_insn *insn)
6de9cd9a
DN
4767{
4768 if (!INSN_P (insn))
4769 return false;
4770
4b4bf941 4771 if ((CALL_P (insn)
6de9cd9a
DN
4772 && !SIBLING_CALL_P (insn)
4773 && !find_reg_note (insn, REG_NORETURN, NULL)
becfd6e5 4774 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
6de9cd9a
DN
4775 return true;
4776
4777 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4778 && MEM_VOLATILE_P (PATTERN (insn)))
4779 || (GET_CODE (PATTERN (insn)) == PARALLEL
4780 && asm_noperands (insn) != -1
4781 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4782 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4783}
4784
4785/* Add fake edges to the function exit for any non constant and non noreturn
4786 calls, volatile inline assembly in the bitmap of blocks specified by
4787 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4788 that were split.
4789
4790 The goal is to expose cases in which entering a basic block does not imply
4791 that all subsequent instructions must be executed. */
4792
4793static int
4794rtl_flow_call_edges_add (sbitmap blocks)
4795{
4796 int i;
4797 int blocks_split = 0;
8b1c6fd7 4798 int last_bb = last_basic_block_for_fn (cfun);
6de9cd9a
DN
4799 bool check_last_block = false;
4800
0cae8d31 4801 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
6de9cd9a
DN
4802 return 0;
4803
4804 if (! blocks)
4805 check_last_block = true;
4806 else
fefa31b5
DM
4807 check_last_block = bitmap_bit_p (blocks,
4808 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
6de9cd9a
DN
4809
4810 /* In the last basic block, before epilogue generation, there will be
4811 a fallthru edge to EXIT. Special care is required if the last insn
4812 of the last basic block is a call because make_edge folds duplicate
4813 edges, which would result in the fallthru edge also being marked
4814 fake, which would result in the fallthru edge being removed by
4815 remove_fake_edges, which would result in an invalid CFG.
4816
4817 Moreover, we can't elide the outgoing fake edge, since the block
4818 profiler needs to take this into account in order to solve the minimal
4819 spanning tree in the case that the call doesn't return.
4820
4821 Handle this by adding a dummy instruction in a new last basic block. */
4822 if (check_last_block)
4823 {
fefa31b5 4824 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8879d71c 4825 rtx_insn *insn = BB_END (bb);
6de9cd9a
DN
4826
4827 /* Back up past insns that must be kept in the same block as a call. */
4828 while (insn != BB_HEAD (bb)
4829 && keep_with_call_p (insn))
4830 insn = PREV_INSN (insn);
4831
4832 if (need_fake_edge_p (insn))
4833 {
4834 edge e;
4835
fefa31b5 4836 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
9ff3d2de
JL
4837 if (e)
4838 {
c41c1387 4839 insert_insn_on_edge (gen_use (const0_rtx), e);
9ff3d2de
JL
4840 commit_edge_insertions ();
4841 }
6de9cd9a
DN
4842 }
4843 }
4844
4845 /* Now add fake edges to the function exit for any non constant
4846 calls since there is no way that we can determine if they will
4847 return or not... */
4848
24bd1a0b 4849 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
6de9cd9a 4850 {
06e28de2 4851 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8879d71c
DM
4852 rtx_insn *insn;
4853 rtx_insn *prev_insn;
6de9cd9a
DN
4854
4855 if (!bb)
4856 continue;
4857
d7c028c0 4858 if (blocks && !bitmap_bit_p (blocks, i))
6de9cd9a
DN
4859 continue;
4860
4861 for (insn = BB_END (bb); ; insn = prev_insn)
4862 {
4863 prev_insn = PREV_INSN (insn);
4864 if (need_fake_edge_p (insn))
4865 {
4866 edge e;
8879d71c 4867 rtx_insn *split_at_insn = insn;
6de9cd9a
DN
4868
4869 /* Don't split the block between a call and an insn that should
c22cacf3 4870 remain in the same block as the call. */
4b4bf941 4871 if (CALL_P (insn))
6de9cd9a
DN
4872 while (split_at_insn != BB_END (bb)
4873 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4874 split_at_insn = NEXT_INSN (split_at_insn);
4875
4876 /* The handling above of the final block before the epilogue
c22cacf3 4877 should be enough to verify that there is no edge to the exit
6de9cd9a
DN
4878 block in CFG already. Calling make_edge in such case would
4879 cause us to mark that edge as fake and remove it later. */
4880
b2b29377 4881 if (flag_checking && split_at_insn == BB_END (bb))
628f6a4e 4882 {
fefa31b5 4883 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
9ff3d2de 4884 gcc_assert (e == NULL);
628f6a4e 4885 }
6de9cd9a
DN
4886
4887 /* Note that the following may create a new basic block
4888 and renumber the existing basic blocks. */
4889 if (split_at_insn != BB_END (bb))
4890 {
4891 e = split_block (bb, split_at_insn);
4892 if (e)
4893 blocks_split++;
4894 }
4895
fefa31b5 4896 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
6de9cd9a
DN
4897 }
4898
4899 if (insn == BB_HEAD (bb))
4900 break;
4901 }
4902 }
4903
4904 if (blocks_split)
4905 verify_flow_info ();
4906
4907 return blocks_split;
4908}
4909
1cb7dfc3 4910/* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
315682fb 4911 the conditional branch target, SECOND_HEAD should be the fall-thru
1cb7dfc3
MH
4912 there is no need to handle this here the loop versioning code handles
4913 this. the reason for SECON_HEAD is that it is needed for condition
4914 in trees, and this should be of the same type since it is a hook. */
4915static void
4916rtl_lv_add_condition_to_bb (basic_block first_head ,
c22cacf3
MS
4917 basic_block second_head ATTRIBUTE_UNUSED,
4918 basic_block cond_bb, void *comp_rtx)
1cb7dfc3 4919{
1476d1bd 4920 rtx_code_label *label;
8879d71c 4921 rtx_insn *seq, *jump;
1cb7dfc3
MH
4922 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4923 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4924 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
ef4bddc2 4925 machine_mode mode;
1cb7dfc3
MH
4926
4927
4928 label = block_label (first_head);
4929 mode = GET_MODE (op0);
4930 if (mode == VOIDmode)
4931 mode = GET_MODE (op1);
4932
4933 start_sequence ();
4934 op0 = force_operand (op0, NULL_RTX);
4935 op1 = force_operand (op1, NULL_RTX);
1476d1bd 4936 do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label, -1);
1cb7dfc3
MH
4937 jump = get_last_insn ();
4938 JUMP_LABEL (jump) = label;
4939 LABEL_NUSES (label)++;
4940 seq = get_insns ();
4941 end_sequence ();
4942
2b9c63a2 4943 /* Add the new cond, in the new head. */
c3284718 4944 emit_insn_after (seq, BB_END (cond_bb));
1cb7dfc3
MH
4945}
4946
4947
4948/* Given a block B with unconditional branch at its end, get the
4949 store the return the branch edge and the fall-thru edge in
4950 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4951static void
4952rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4953 edge *fallthru_edge)
4954{
4955 edge e = EDGE_SUCC (b, 0);
4956
4957 if (e->flags & EDGE_FALLTHRU)
4958 {
4959 *fallthru_edge = e;
4960 *branch_edge = EDGE_SUCC (b, 1);
4961 }
4962 else
4963 {
4964 *branch_edge = e;
4965 *fallthru_edge = EDGE_SUCC (b, 1);
4966 }
4967}
4968
5e2d947c
JH
4969void
4970init_rtl_bb_info (basic_block bb)
4971{
bcc708fc
MM
4972 gcc_assert (!bb->il.x.rtl);
4973 bb->il.x.head_ = NULL;
766090c2 4974 bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5e2d947c
JH
4975}
4976
14fa2cc0
ZD
4977/* Returns true if it is possible to remove edge E by redirecting
4978 it to the destination of the other edge from E->src. */
4979
4980static bool
9678086d 4981rtl_can_remove_branch_p (const_edge e)
14fa2cc0 4982{
9678086d
KG
4983 const_basic_block src = e->src;
4984 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
8879d71c
DM
4985 const rtx_insn *insn = BB_END (src);
4986 rtx set;
14fa2cc0
ZD
4987
4988 /* The conditions are taken from try_redirect_by_replacing_jump. */
fefa31b5 4989 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
14fa2cc0
ZD
4990 return false;
4991
4992 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4993 return false;
4994
3371a64f 4995 if (BB_PARTITION (src) != BB_PARTITION (target))
14fa2cc0
ZD
4996 return false;
4997
4998 if (!onlyjump_p (insn)
4999 || tablejump_p (insn, NULL, NULL))
5000 return false;
5001
5002 set = single_set (insn);
5003 if (!set || side_effects_p (set))
5004 return false;
5005
5006 return true;
5007}
5008
ffe14686
AM
5009static basic_block
5010rtl_duplicate_bb (basic_block bb)
5011{
5012 bb = cfg_layout_duplicate_bb (bb);
5013 bb->aux = NULL;
5014 return bb;
5015}
5016
aa4723d7
SB
5017/* Do book-keeping of basic block BB for the profile consistency checker.
5018 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
5019 then do post-pass accounting. Store the counting in RECORD. */
5020static void
5021rtl_account_profile_record (basic_block bb, int after_pass,
5022 struct profile_record *record)
5023{
8879d71c 5024 rtx_insn *insn;
aa4723d7
SB
5025 FOR_BB_INSNS (bb, insn)
5026 if (INSN_P (insn))
5027 {
5028 record->size[after_pass]
5029 += insn_rtx_cost (PATTERN (insn), false);
0a6a6ac9 5030 if (profile_status_for_fn (cfun) == PROFILE_READ)
aa4723d7
SB
5031 record->time[after_pass]
5032 += insn_rtx_cost (PATTERN (insn), true) * bb->count;
0a6a6ac9 5033 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
aa4723d7
SB
5034 record->time[after_pass]
5035 += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
5036 }
5037}
5038
9ee634e3
JH
5039/* Implementation of CFG manipulation for linearized RTL. */
5040struct cfg_hooks rtl_cfg_hooks = {
f470c378 5041 "rtl",
9ee634e3 5042 rtl_verify_flow_info,
10e9fecc 5043 rtl_dump_bb,
2c895bd1 5044 rtl_dump_bb_for_graph,
bc35512f 5045 rtl_create_basic_block,
9ee634e3
JH
5046 rtl_redirect_edge_and_branch,
5047 rtl_redirect_edge_and_branch_force,
14fa2cc0 5048 rtl_can_remove_branch_p,
9ee634e3
JH
5049 rtl_delete_block,
5050 rtl_split_block,
f470c378 5051 rtl_move_block_after,
bc35512f
JH
5052 rtl_can_merge_blocks, /* can_merge_blocks_p */
5053 rtl_merge_blocks,
6de9cd9a
DN
5054 rtl_predict_edge,
5055 rtl_predicted_by_p,
ffe14686
AM
5056 cfg_layout_can_duplicate_bb_p,
5057 rtl_duplicate_bb,
f470c378
ZD
5058 rtl_split_edge,
5059 rtl_make_forwarder_block,
6de9cd9a 5060 rtl_tidy_fallthru_edge,
cf103ca4 5061 rtl_force_nonfallthru,
6de9cd9a
DN
5062 rtl_block_ends_with_call_p,
5063 rtl_block_ends_with_condjump_p,
d9d4706f
KH
5064 rtl_flow_call_edges_add,
5065 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
5066 NULL, /* execute_on_shrinking_pred */
5067 NULL, /* duplicate loop for trees */
5068 NULL, /* lv_add_condition_to_bb */
5069 NULL, /* lv_adjust_loop_header_phi*/
5070 NULL, /* extract_cond_bb_edges */
df92c640
SB
5071 NULL, /* flush_pending_stmts */
5072 rtl_block_empty_p, /* block_empty_p */
5073 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
aa4723d7 5074 rtl_account_profile_record,
9ee634e3
JH
5075};
5076
5077/* Implementation of CFG manipulation for cfg layout RTL, where
5078 basic block connected via fallthru edges does not have to be adjacent.
5079 This representation will hopefully become the default one in future
5080 version of the compiler. */
6de9cd9a 5081
9ee634e3 5082struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
f470c378 5083 "cfglayout mode",
bc35512f 5084 rtl_verify_flow_info_1,
10e9fecc 5085 rtl_dump_bb,
2c895bd1 5086 rtl_dump_bb_for_graph,
bc35512f 5087 cfg_layout_create_basic_block,
9ee634e3
JH
5088 cfg_layout_redirect_edge_and_branch,
5089 cfg_layout_redirect_edge_and_branch_force,
14fa2cc0 5090 rtl_can_remove_branch_p,
9ee634e3
JH
5091 cfg_layout_delete_block,
5092 cfg_layout_split_block,
f470c378 5093 rtl_move_block_after,
bc35512f
JH
5094 cfg_layout_can_merge_blocks_p,
5095 cfg_layout_merge_blocks,
6de9cd9a
DN
5096 rtl_predict_edge,
5097 rtl_predicted_by_p,
5098 cfg_layout_can_duplicate_bb_p,
5099 cfg_layout_duplicate_bb,
f470c378
ZD
5100 cfg_layout_split_edge,
5101 rtl_make_forwarder_block,
cf103ca4
EB
5102 NULL, /* tidy_fallthru_edge */
5103 rtl_force_nonfallthru,
6de9cd9a
DN
5104 rtl_block_ends_with_call_p,
5105 rtl_block_ends_with_condjump_p,
d9d4706f
KH
5106 rtl_flow_call_edges_add,
5107 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
5108 NULL, /* execute_on_shrinking_pred */
5109 duplicate_loop_to_header_edge, /* duplicate loop for trees */
5110 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5111 NULL, /* lv_adjust_loop_header_phi*/
5112 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
df92c640
SB
5113 NULL, /* flush_pending_stmts */
5114 rtl_block_empty_p, /* block_empty_p */
5115 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
aa4723d7 5116 rtl_account_profile_record,
9ee634e3 5117};
78bde837
SB
5118
5119#include "gt-cfgrtl.h"