]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgrtl.c
Fix PR 71627 - unable to find a register to spill
[thirdparty/gcc.git] / gcc / cfgrtl.c
CommitLineData
ca6c03ca 1/* Control flow graph manipulation code for GNU compiler.
818ab71a 2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
ca6c03ca
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
ca6c03ca
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ca6c03ca 19
5f0d2358
RK
20/* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
ca6c03ca
JH
22
23 Available functionality:
bc35512f 24 - Basic CFG/RTL manipulation API documented in cfghooks.h
5f0d2358 25 - CFG-aware instruction chain manipulation
ca6c03ca 26 delete_insn, delete_insn_chain
bc35512f
JH
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
ba5e9aca
EB
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
bc35512f
JH
33
34 Functions not supposed for generic use:
5f0d2358 35 - Infrastructure to determine quickly basic block for insn
ca6c03ca 36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
5f0d2358 37 - Edge redirection with updating and optimizing of insn chain
bc35512f 38 block_label, tidy_fallthru_edge, force_nonfallthru */
ca6c03ca
JH
39\f
40#include "config.h"
41#include "system.h"
4977bab6 42#include "coretypes.h"
c7131fb2 43#include "backend.h"
957060b5 44#include "target.h"
c7131fb2 45#include "rtl.h"
957060b5
AM
46#include "tree.h"
47#include "cfghooks.h"
c7131fb2 48#include "df.h"
957060b5 49#include "insn-config.h"
4d0cdd0c 50#include "memmodel.h"
957060b5 51#include "emit-rtl.h"
60393bbc
AM
52#include "cfgrtl.h"
53#include "cfganal.h"
54#include "cfgbuild.h"
55#include "cfgcleanup.h"
60393bbc 56#include "bb-reorder.h"
0cbd9993 57#include "rtl-error.h"
2a34bece 58#include "insn-attr.h"
36566b39 59#include "dojump.h"
ff25ef99 60#include "expr.h"
1cb7dfc3 61#include "cfgloop.h"
ef330312 62#include "tree-pass.h"
013a8899 63#include "print-rtl.h"
ca6c03ca 64
78bde837
SB
65/* Holds the interesting leading and trailing notes for the function.
66 Only applicable if the CFG is in cfglayout mode. */
1130d5e3 67static GTY(()) rtx_insn *cfg_layout_function_footer;
9152e0aa 68static GTY(()) rtx_insn *cfg_layout_function_header;
78bde837 69
8879d71c 70static rtx_insn *skip_insns_after_block (basic_block);
78bde837 71static void record_effective_endpoints (void);
78bde837
SB
72static void fixup_reorder_chain (void);
73
74void verify_insn_chain (void);
75static void fixup_fallthru_exit_predecessor (void);
8879d71c
DM
76static int can_delete_note_p (const rtx_note *);
77static int can_delete_label_p (const rtx_code_label *);
d329e058 78static basic_block rtl_split_edge (edge);
f470c378 79static bool rtl_move_block_after (basic_block, basic_block);
d329e058 80static int rtl_verify_flow_info (void);
f470c378 81static basic_block cfg_layout_split_block (basic_block, void *);
6de9cd9a 82static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
d329e058
AJ
83static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
84static void cfg_layout_delete_block (basic_block);
85static void rtl_delete_block (basic_block);
86static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
6de9cd9a 87static edge rtl_redirect_edge_and_branch (edge, basic_block);
f470c378 88static basic_block rtl_split_block (basic_block, void *);
a315c44c 89static void rtl_dump_bb (FILE *, basic_block, int, int);
d329e058 90static int rtl_verify_flow_info_1 (void);
f470c378 91static void rtl_make_forwarder_block (edge);
ca6c03ca
JH
92\f
93/* Return true if NOTE is not one of the ones that must be kept paired,
5f0d2358 94 so that we may simply delete it. */
ca6c03ca
JH
95
96static int
8879d71c 97can_delete_note_p (const rtx_note *note)
ca6c03ca 98{
cd9c1ca8
RH
99 switch (NOTE_KIND (note))
100 {
101 case NOTE_INSN_DELETED:
102 case NOTE_INSN_BASIC_BLOCK:
103 case NOTE_INSN_EPILOGUE_BEG:
104 return true;
105
106 default:
107 return false;
108 }
ca6c03ca
JH
109}
110
111/* True if a given label can be deleted. */
112
113static int
8879d71c 114can_delete_label_p (const rtx_code_label *label)
ca6c03ca 115{
5f0d2358
RK
116 return (!LABEL_PRESERVE_P (label)
117 /* User declared labels must be preserved. */
118 && LABEL_NAME (label) == 0
6f7eba34
TS
119 && !vec_safe_contains<rtx_insn *> (forced_labels,
120 const_cast<rtx_code_label *> (label)));
ca6c03ca
JH
121}
122
03fbe718 123/* Delete INSN by patching it out. */
ca6c03ca 124
03fbe718 125void
1130d5e3 126delete_insn (rtx uncast_insn)
ca6c03ca 127{
1130d5e3 128 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
ca6c03ca
JH
129 rtx note;
130 bool really_delete = true;
131
4b4bf941 132 if (LABEL_P (insn))
ca6c03ca
JH
133 {
134 /* Some labels can't be directly removed from the INSN chain, as they
c22cacf3
MS
135 might be references via variables, constant pool etc.
136 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
8879d71c 137 if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
ca6c03ca
JH
138 {
139 const char *name = LABEL_NAME (insn);
5e5df392 140 basic_block bb = BLOCK_FOR_INSN (insn);
8879d71c 141 rtx_insn *bb_note = NEXT_INSN (insn);
ca6c03ca
JH
142
143 really_delete = false;
144 PUT_CODE (insn, NOTE);
a38e7aa5 145 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
6773e15f 146 NOTE_DELETED_LABEL_NAME (insn) = name;
03fbe718 147
0f33baa9 148 /* If the note following the label starts a basic block, and the
5e5df392 149 label is a member of the same basic block, interchange the two. */
0f33baa9
TV
150 if (bb_note != NULL_RTX
151 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
5e5df392
TV
152 && bb != NULL
153 && bb == BLOCK_FOR_INSN (bb_note))
03fbe718
TV
154 {
155 reorder_insns_nobb (insn, insn, bb_note);
1130d5e3 156 BB_HEAD (bb) = bb_note;
03fbe718 157 if (BB_END (bb) == bb_note)
1130d5e3 158 BB_END (bb) = insn;
03fbe718 159 }
ca6c03ca 160 }
5f0d2358 161
b5241a5a 162 remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
ca6c03ca
JH
163 }
164
165 if (really_delete)
166 {
cda94cbb 167 /* If this insn has already been deleted, something is very wrong. */
4654c0cf 168 gcc_assert (!insn->deleted ());
1f397f45
SB
169 if (INSN_P (insn))
170 df_insn_delete (insn);
ca6c03ca 171 remove_insn (insn);
4654c0cf 172 insn->set_deleted ();
ca6c03ca
JH
173 }
174
175 /* If deleting a jump, decrement the use count of the label. Deleting
176 the label itself should happen in the normal course of block merging. */
cf7c4aa6 177 if (JUMP_P (insn))
9295a326 178 {
cf7c4aa6
HPN
179 if (JUMP_LABEL (insn)
180 && LABEL_P (JUMP_LABEL (insn)))
181 LABEL_NUSES (JUMP_LABEL (insn))--;
182
183 /* If there are more targets, remove them too. */
184 while ((note
185 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
4b4bf941 186 && LABEL_P (XEXP (note, 0)))
9295a326
JZ
187 {
188 LABEL_NUSES (XEXP (note, 0))--;
189 remove_note (insn, note);
190 }
191 }
ca6c03ca 192
cf7c4aa6
HPN
193 /* Also if deleting any insn that references a label as an operand. */
194 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
195 && LABEL_P (XEXP (note, 0)))
196 {
197 LABEL_NUSES (XEXP (note, 0))--;
198 remove_note (insn, note);
199 }
200
95c43227 201 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
ca6c03ca 202 {
95c43227
DM
203 rtvec vec = table->get_labels ();
204 int len = GET_NUM_ELEM (vec);
ca6c03ca
JH
205 int i;
206
207 for (i = 0; i < len; i++)
a124fcda 208 {
95c43227 209 rtx label = XEXP (RTVEC_ELT (vec, i), 0);
a124fcda
RH
210
211 /* When deleting code in bulk (e.g. removing many unreachable
212 blocks) we can delete a label that's a target of the vector
213 before deleting the vector itself. */
4b4bf941 214 if (!NOTE_P (label))
a124fcda
RH
215 LABEL_NUSES (label)--;
216 }
ca6c03ca 217 }
ca6c03ca
JH
218}
219
98ccd1d7
IE
220/* Like delete_insn but also purge dead edges from BB.
221 Return true if any edges are eliminated. */
9ed7b221 222
98ccd1d7 223bool
373fc786 224delete_insn_and_edges (rtx_insn *insn)
3dec4024 225{
3dec4024
JH
226 bool purge = false;
227
ba4f7968 228 if (INSN_P (insn)
3dec4024 229 && BLOCK_FOR_INSN (insn)
a813c111 230 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
3dec4024 231 purge = true;
03fbe718 232 delete_insn (insn);
3dec4024 233 if (purge)
98ccd1d7
IE
234 return purge_dead_edges (BLOCK_FOR_INSN (insn));
235 return false;
3dec4024
JH
236}
237
ca6c03ca 238/* Unlink a chain of insns between START and FINISH, leaving notes
a7b87f73
ZD
239 that must be paired. If CLEAR_BB is true, we set bb field for
240 insns that cannot be removed to NULL. */
ca6c03ca
JH
241
242void
a7b87f73 243delete_insn_chain (rtx start, rtx finish, bool clear_bb)
ca6c03ca 244{
8879d71c 245 rtx_insn *prev, *current;
ca6c03ca 246
5f0d2358
RK
247 /* Unchain the insns one by one. It would be quicker to delete all of these
248 with a single unchaining, rather than one at a time, but we need to keep
249 the NOTE's. */
8879d71c 250 current = safe_as_a <rtx_insn *> (finish);
ca6c03ca
JH
251 while (1)
252 {
03fbe718 253 prev = PREV_INSN (current);
8879d71c 254 if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
ca6c03ca
JH
255 ;
256 else
03fbe718 257 delete_insn (current);
ca6c03ca 258
4654c0cf 259 if (clear_bb && !current->deleted ())
03fbe718 260 set_block_for_insn (current, NULL);
a7b87f73 261
03fbe718 262 if (current == start)
ca6c03ca 263 break;
03fbe718 264 current = prev;
ca6c03ca
JH
265 }
266}
267\f
5f0d2358
RK
268/* Create a new basic block consisting of the instructions between HEAD and END
269 inclusive. This function is designed to allow fast BB construction - reuses
270 the note and basic block struct in BB_NOTE, if any and do not grow
271 BASIC_BLOCK chain and should be used directly only by CFG construction code.
272 END can be NULL in to create new empty basic block before HEAD. Both END
918ed612
ZD
273 and HEAD can be NULL to create basic block at the end of INSN chain.
274 AFTER is the basic block we should be put after. */
ca6c03ca
JH
275
276basic_block
e00022e9 277create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
66e8df53 278 basic_block after)
ca6c03ca
JH
279{
280 basic_block bb;
281
282 if (bb_note
ca6c03ca
JH
283 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
284 && bb->aux == NULL)
285 {
286 /* If we found an existing note, thread it back onto the chain. */
287
fee3e72c 288 rtx_insn *after;
ca6c03ca 289
4b4bf941 290 if (LABEL_P (head))
ca6c03ca
JH
291 after = head;
292 else
293 {
294 after = PREV_INSN (head);
295 head = bb_note;
296 }
297
298 if (after != bb_note && NEXT_INSN (after) != bb_note)
ba4f7968 299 reorder_insns_nobb (bb_note, bb_note, after);
ca6c03ca
JH
300 }
301 else
302 {
303 /* Otherwise we must create a note and a basic block structure. */
304
305 bb = alloc_block ();
306
5e2d947c 307 init_rtl_bb_info (bb);
ca6c03ca 308 if (!head && !end)
5f0d2358
RK
309 head = end = bb_note
310 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
4b4bf941 311 else if (LABEL_P (head) && end)
ca6c03ca
JH
312 {
313 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
314 if (head == end)
315 end = bb_note;
316 }
317 else
318 {
319 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
320 head = bb_note;
321 if (!end)
322 end = head;
323 }
5f0d2358 324
ca6c03ca
JH
325 NOTE_BASIC_BLOCK (bb_note) = bb;
326 }
327
328 /* Always include the bb note in the block. */
329 if (NEXT_INSN (end) == bb_note)
330 end = bb_note;
331
1130d5e3
DM
332 BB_HEAD (bb) = head;
333 BB_END (bb) = end;
8b1c6fd7 334 bb->index = last_basic_block_for_fn (cfun)++;
5e2d947c 335 bb->flags = BB_NEW | BB_RTL;
918ed612 336 link_block (bb, after);
557c4b49 337 SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
6fb5fa3c 338 df_bb_refs_record (bb->index, false);
ba4f7968 339 update_bb_for_insn (bb);
076c7ab8 340 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
ca6c03ca
JH
341
342 /* Tag the block so that we know it has been used when considering
343 other basic block notes. */
344 bb->aux = bb;
345
346 return bb;
347}
348
5f0d2358 349/* Create new basic block consisting of instructions in between HEAD and END
ffe14686
AM
350 and place it to the BB chain after block AFTER. END can be NULL to
351 create a new empty basic block before HEAD. Both END and HEAD can be
352 NULL to create basic block at the end of INSN chain. */
ca6c03ca 353
bc35512f
JH
354static basic_block
355rtl_create_basic_block (void *headp, void *endp, basic_block after)
ca6c03ca 356{
e00022e9
DM
357 rtx_insn *head = (rtx_insn *) headp;
358 rtx_insn *end = (rtx_insn *) endp;
ca6c03ca 359 basic_block bb;
0b17ab2f 360
7eca0767 361 /* Grow the basic block array if needed. */
8b1c6fd7
DM
362 if ((size_t) last_basic_block_for_fn (cfun)
363 >= basic_block_info_for_fn (cfun)->length ())
7eca0767 364 {
8b1c6fd7
DM
365 size_t new_size =
366 (last_basic_block_for_fn (cfun)
367 + (last_basic_block_for_fn (cfun) + 3) / 4);
c5f05f68 368 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
7eca0767 369 }
0b17ab2f 370
0cae8d31 371 n_basic_blocks_for_fn (cfun)++;
ca6c03ca 372
852c6ec7 373 bb = create_basic_block_structure (head, end, NULL, after);
ca6c03ca
JH
374 bb->aux = NULL;
375 return bb;
376}
bc35512f
JH
377
378static basic_block
379cfg_layout_create_basic_block (void *head, void *end, basic_block after)
380{
381 basic_block newbb = rtl_create_basic_block (head, end, after);
382
bc35512f
JH
383 return newbb;
384}
ca6c03ca
JH
385\f
386/* Delete the insns in a (non-live) block. We physically delete every
387 non-deleted-note insn, and update the flow graph appropriately.
388
389 Return nonzero if we deleted an exception handler. */
390
391/* ??? Preserving all such notes strikes me as wrong. It would be nice
392 to post-process the stream to remove empty blocks, loops, ranges, etc. */
393
f0fda11c 394static void
d329e058 395rtl_delete_block (basic_block b)
ca6c03ca 396{
8879d71c 397 rtx_insn *insn, *end;
ca6c03ca
JH
398
399 /* If the head of this block is a CODE_LABEL, then it might be the
f39e46ba
SB
400 label for an exception handler which can't be reached. We need
401 to remove the label from the exception_handler_label list. */
a813c111 402 insn = BB_HEAD (b);
ca6c03ca 403
96370780 404 end = get_last_bb_insn (b);
ca6c03ca
JH
405
406 /* Selectively delete the entire chain. */
1130d5e3 407 BB_HEAD (b) = NULL;
a7b87f73
ZD
408 delete_insn_chain (insn, end, true);
409
6fb5fa3c
DB
410
411 if (dump_file)
412 fprintf (dump_file, "deleting block %d\n", b->index);
413 df_bb_delete (b->index);
ca6c03ca
JH
414}
415\f
852c6ec7 416/* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
ca6c03ca
JH
417
418void
d329e058 419compute_bb_for_insn (void)
ca6c03ca 420{
e0082a72 421 basic_block bb;
ca6c03ca 422
11cd3bed 423 FOR_EACH_BB_FN (bb, cfun)
ca6c03ca 424 {
8879d71c
DM
425 rtx_insn *end = BB_END (bb);
426 rtx_insn *insn;
ca6c03ca 427
a813c111 428 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
ca6c03ca 429 {
ba4f7968 430 BLOCK_FOR_INSN (insn) = bb;
ca6c03ca
JH
431 if (insn == end)
432 break;
ca6c03ca
JH
433 }
434 }
435}
436
437/* Release the basic_block_for_insn array. */
438
c2924966 439unsigned int
d329e058 440free_bb_for_insn (void)
ca6c03ca 441{
8879d71c 442 rtx_insn *insn;
ba4f7968 443 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 444 if (!BARRIER_P (insn))
ba4f7968 445 BLOCK_FOR_INSN (insn) = NULL;
c2924966 446 return 0;
ca6c03ca
JH
447}
448
27a4cd48
DM
449namespace {
450
451const pass_data pass_data_free_cfg =
452{
453 RTL_PASS, /* type */
454 "*free_cfg", /* name */
455 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
456 TV_NONE, /* tv_id */
457 0, /* properties_required */
458 0, /* properties_provided */
459 PROP_cfg, /* properties_destroyed */
460 0, /* todo_flags_start */
461 0, /* todo_flags_finish */
ef330312
PB
462};
463
27a4cd48
DM
464class pass_free_cfg : public rtl_opt_pass
465{
466public:
c3284718
RS
467 pass_free_cfg (gcc::context *ctxt)
468 : rtl_opt_pass (pass_data_free_cfg, ctxt)
27a4cd48
DM
469 {}
470
471 /* opt_pass methods: */
be55bfe6 472 virtual unsigned int execute (function *);
27a4cd48
DM
473
474}; // class pass_free_cfg
475
be55bfe6
TS
476unsigned int
477pass_free_cfg::execute (function *)
478{
be55bfe6
TS
479 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
480 valid at that point so it would be too late to call df_analyze. */
e90bedf5 481 if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
be55bfe6
TS
482 {
483 df_note_add_problem ();
484 df_analyze ();
485 }
be55bfe6
TS
486
487 if (crtl->has_bb_partition)
488 insert_section_boundary_note ();
489
490 free_bb_for_insn ();
491 return 0;
492}
493
27a4cd48
DM
494} // anon namespace
495
496rtl_opt_pass *
497make_pass_free_cfg (gcc::context *ctxt)
498{
499 return new pass_free_cfg (ctxt);
500}
501
91278841 502/* Return RTX to emit after when we want to emit code on the entry of function. */
8a2e8325 503rtx_insn *
91278841
AP
504entry_of_function (void)
505{
0cae8d31 506 return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
fefa31b5 507 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
91278841
AP
508}
509
11b904a1
BS
510/* Emit INSN at the entry point of the function, ensuring that it is only
511 executed once per function. */
512void
513emit_insn_at_entry (rtx insn)
514{
fefa31b5 515 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
11b904a1 516 edge e = ei_safe_edge (ei);
5419bc7f 517 gcc_assert (e->flags & EDGE_FALLTHRU);
11b904a1
BS
518
519 insert_insn_on_edge (insn, e);
520 commit_edge_insertions ();
521}
522
6c3d0e31 523/* Update BLOCK_FOR_INSN of insns between BEGIN and END
b8698a0f 524 (or BARRIER if found) and notify df of the bb change.
6c3d0e31
SP
525 The insn chain range is inclusive
526 (i.e. both BEGIN and END will be updated. */
ca6c03ca 527
6c3d0e31 528static void
8879d71c 529update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
ca6c03ca 530{
8879d71c 531 rtx_insn *insn;
ca6c03ca 532
6c3d0e31
SP
533 end = NEXT_INSN (end);
534 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
63642d5a
AO
535 if (!BARRIER_P (insn))
536 df_insn_change_bb (insn, bb);
ca6c03ca 537}
6c3d0e31
SP
538
539/* Update BLOCK_FOR_INSN of insns in BB to BB,
540 and notify df of the change. */
541
542void
543update_bb_for_insn (basic_block bb)
544{
545 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
546}
547
532aafad
SB
548\f
549/* Like active_insn_p, except keep the return value clobber around
550 even after reload. */
551
552static bool
8879d71c 553flow_active_insn_p (const rtx_insn *insn)
532aafad
SB
554{
555 if (active_insn_p (insn))
556 return true;
557
558 /* A clobber of the function return value exists for buggy
559 programs that fail to return a value. Its effect is to
560 keep the return value from being live across the entire
561 function. If we allow it to be skipped, we introduce the
562 possibility for register lifetime confusion. */
563 if (GET_CODE (PATTERN (insn)) == CLOBBER
564 && REG_P (XEXP (PATTERN (insn), 0))
565 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
566 return true;
567
568 return false;
569}
570
571/* Return true if the block has no effect and only forwards control flow to
572 its single destination. */
532aafad
SB
573
574bool
2407343c 575contains_no_active_insn_p (const_basic_block bb)
532aafad 576{
8879d71c 577 rtx_insn *insn;
532aafad 578
4833f561
JJ
579 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
580 || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
581 || !single_succ_p (bb)
582 || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
532aafad
SB
583 return false;
584
2407343c
JR
585 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
586 if (INSN_P (insn) && flow_active_insn_p (insn))
587 return false;
588
589 return (!INSN_P (insn)
590 || (JUMP_P (insn) && simplejump_p (insn))
591 || !flow_active_insn_p (insn));
592}
593
594/* Likewise, but protect loop latches, headers and preheaders. */
595/* FIXME: Make this a cfg hook. */
596
597bool
598forwarder_block_p (const_basic_block bb)
599{
600 if (!contains_no_active_insn_p (bb))
601 return false;
602
532aafad
SB
603 /* Protect loop latches, headers and preheaders. */
604 if (current_loops)
605 {
606 basic_block dest;
607 if (bb->loop_father->header == bb)
608 return false;
609 dest = EDGE_SUCC (bb, 0)->dest;
610 if (dest->loop_father->header == dest)
611 return false;
612 }
613
2407343c 614 return true;
532aafad
SB
615}
616
617/* Return nonzero if we can reach target from src by falling through. */
d7b6661b 618/* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
532aafad
SB
619
620bool
621can_fallthru (basic_block src, basic_block target)
622{
8879d71c
DM
623 rtx_insn *insn = BB_END (src);
624 rtx_insn *insn2;
532aafad
SB
625 edge e;
626 edge_iterator ei;
627
fefa31b5 628 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
532aafad
SB
629 return true;
630 if (src->next_bb != target)
d7b6661b
SB
631 return false;
632
633 /* ??? Later we may add code to move jump tables offline. */
634 if (tablejump_p (insn, NULL, NULL))
635 return false;
636
532aafad 637 FOR_EACH_EDGE (e, ei, src->succs)
fefa31b5 638 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
532aafad 639 && e->flags & EDGE_FALLTHRU)
d7b6661b 640 return false;
532aafad
SB
641
642 insn2 = BB_HEAD (target);
d7b6661b 643 if (!active_insn_p (insn2))
532aafad
SB
644 insn2 = next_active_insn (insn2);
645
532aafad
SB
646 return next_active_insn (insn) == insn2;
647}
648
649/* Return nonzero if we could reach target from src by falling through,
650 if the target was made adjacent. If we already have a fall-through
651 edge to the exit block, we can't do that. */
652static bool
653could_fall_through (basic_block src, basic_block target)
654{
655 edge e;
656 edge_iterator ei;
657
fefa31b5 658 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
532aafad
SB
659 return true;
660 FOR_EACH_EDGE (e, ei, src->succs)
fefa31b5 661 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
532aafad
SB
662 && e->flags & EDGE_FALLTHRU)
663 return 0;
664 return true;
665}
ca6c03ca 666\f
ef2be249 667/* Return the NOTE_INSN_BASIC_BLOCK of BB. */
ed270997 668rtx_note *
ef2be249
RS
669bb_note (basic_block bb)
670{
8879d71c 671 rtx_insn *note;
ef2be249
RS
672
673 note = BB_HEAD (bb);
674 if (LABEL_P (note))
675 note = NEXT_INSN (note);
676
677 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
ed270997 678 return as_a <rtx_note *> (note);
ef2be249
RS
679}
680
6fb5fa3c
DB
681/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
682 note associated with the BLOCK. */
683
8879d71c 684static rtx_insn *
6fb5fa3c
DB
685first_insn_after_basic_block_note (basic_block block)
686{
8879d71c 687 rtx_insn *insn;
6fb5fa3c
DB
688
689 /* Get the first instruction in the block. */
690 insn = BB_HEAD (block);
691
692 if (insn == NULL_RTX)
8879d71c 693 return NULL;
6fb5fa3c
DB
694 if (LABEL_P (insn))
695 insn = NEXT_INSN (insn);
696 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
697
698 return NEXT_INSN (insn);
699}
700
2b9c63a2
FY
701/* Creates a new basic block just after basic block BB by splitting
702 everything after specified instruction INSNP. */
ca6c03ca 703
f470c378 704static basic_block
d329e058 705rtl_split_block (basic_block bb, void *insnp)
ca6c03ca
JH
706{
707 basic_block new_bb;
8879d71c 708 rtx_insn *insn = (rtx_insn *) insnp;
f470c378 709 edge e;
628f6a4e 710 edge_iterator ei;
ca6c03ca 711
f470c378
ZD
712 if (!insn)
713 {
714 insn = first_insn_after_basic_block_note (bb);
715
716 if (insn)
b5b8b0ac 717 {
8879d71c 718 rtx_insn *next = insn;
b5b8b0ac
AO
719
720 insn = PREV_INSN (insn);
721
722 /* If the block contains only debug insns, insn would have
723 been NULL in a non-debug compilation, and then we'd end
724 up emitting a DELETED note. For -fcompare-debug
725 stability, emit the note too. */
726 if (insn != BB_END (bb)
727 && DEBUG_INSN_P (next)
728 && DEBUG_INSN_P (BB_END (bb)))
729 {
730 while (next != BB_END (bb) && DEBUG_INSN_P (next))
731 next = NEXT_INSN (next);
732
733 if (next == BB_END (bb))
734 emit_note_after (NOTE_INSN_DELETED, next);
735 }
736 }
f470c378
ZD
737 else
738 insn = get_last_insn ();
739 }
740
741 /* We probably should check type of the insn so that we do not create
742 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
743 bother. */
744 if (insn == BB_END (bb))
745 emit_note_after (NOTE_INSN_DELETED, insn);
ca6c03ca
JH
746
747 /* Create the new basic block. */
a813c111 748 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
076c7ab8 749 BB_COPY_PARTITION (new_bb, bb);
1130d5e3 750 BB_END (bb) = insn;
ca6c03ca
JH
751
752 /* Redirect the outgoing edges. */
628f6a4e
BE
753 new_bb->succs = bb->succs;
754 bb->succs = NULL;
755 FOR_EACH_EDGE (e, ei, new_bb->succs)
ca6c03ca
JH
756 e->src = new_bb;
757
6fb5fa3c
DB
758 /* The new block starts off being dirty. */
759 df_set_bb_dirty (bb);
f470c378 760 return new_bb;
bc35512f
JH
761}
762
9be94227
EB
763/* Return true if the single edge between blocks A and B is the only place
764 in RTL which holds some unique locus. */
765
766static bool
767unique_locus_on_edge_between_p (basic_block a, basic_block b)
768{
5368224f 769 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
8879d71c 770 rtx_insn *insn, *end;
9be94227 771
2f13f2de 772 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
9be94227
EB
773 return false;
774
775 /* First scan block A backward. */
776 insn = BB_END (a);
777 end = PREV_INSN (BB_HEAD (a));
5368224f 778 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
9be94227
EB
779 insn = PREV_INSN (insn);
780
5368224f 781 if (insn != end && INSN_LOCATION (insn) == goto_locus)
9be94227
EB
782 return false;
783
784 /* Then scan block B forward. */
785 insn = BB_HEAD (b);
786 if (insn)
787 {
788 end = NEXT_INSN (BB_END (b));
789 while (insn != end && !NONDEBUG_INSN_P (insn))
790 insn = NEXT_INSN (insn);
791
5368224f
DC
792 if (insn != end && INSN_HAS_LOCATION (insn)
793 && INSN_LOCATION (insn) == goto_locus)
9be94227
EB
794 return false;
795 }
796
797 return true;
798}
799
800/* If the single edge between blocks A and B is the only place in RTL which
801 holds some unique locus, emit a nop with that locus between the blocks. */
802
803static void
804emit_nop_for_unique_locus_between (basic_block a, basic_block b)
805{
806 if (!unique_locus_on_edge_between_p (a, b))
807 return;
808
1130d5e3 809 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
5368224f 810 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
9be94227
EB
811}
812
ca6c03ca 813/* Blocks A and B are to be merged into a single block A. The insns
bc35512f 814 are already contiguous. */
ca6c03ca 815
bc35512f
JH
816static void
817rtl_merge_blocks (basic_block a, basic_block b)
ca6c03ca 818{
8879d71c
DM
819 rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
820 rtx_insn *del_first = NULL, *del_last = NULL;
821 rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
50a36e42 822 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
ca6c03ca
JH
823 int b_empty = 0;
824
6fb5fa3c 825 if (dump_file)
50a36e42
EB
826 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
827 a->index);
6fb5fa3c 828
b5b8b0ac
AO
829 while (DEBUG_INSN_P (b_end))
830 b_end = PREV_INSN (b_debug_start = b_end);
831
ca6c03ca 832 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 833 if (LABEL_P (b_head))
ca6c03ca
JH
834 {
835 /* Detect basic blocks with nothing but a label. This can happen
836 in particular at the end of a function. */
837 if (b_head == b_end)
838 b_empty = 1;
5f0d2358 839
ca6c03ca
JH
840 del_first = del_last = b_head;
841 b_head = NEXT_INSN (b_head);
842 }
843
5f0d2358
RK
844 /* Delete the basic block note and handle blocks containing just that
845 note. */
ca6c03ca
JH
846 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
847 {
848 if (b_head == b_end)
849 b_empty = 1;
850 if (! del_last)
851 del_first = b_head;
5f0d2358 852
ca6c03ca
JH
853 del_last = b_head;
854 b_head = NEXT_INSN (b_head);
855 }
856
857 /* If there was a jump out of A, delete it. */
4b4bf941 858 if (JUMP_P (a_end))
ca6c03ca 859 {
8879d71c 860 rtx_insn *prev;
ca6c03ca
JH
861
862 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
4b4bf941 863 if (!NOTE_P (prev)
a38e7aa5 864 || NOTE_INSN_BASIC_BLOCK_P (prev)
a813c111 865 || prev == BB_HEAD (a))
ca6c03ca
JH
866 break;
867
868 del_first = a_end;
869
ca6c03ca
JH
870 /* If this was a conditional jump, we need to also delete
871 the insn that set cc0. */
058eb3b0 872 if (HAVE_cc0 && only_sets_cc0_p (prev))
ca6c03ca 873 {
8879d71c 874 rtx_insn *tmp = prev;
5f0d2358 875
ca6c03ca
JH
876 prev = prev_nonnote_insn (prev);
877 if (!prev)
a813c111 878 prev = BB_HEAD (a);
ca6c03ca
JH
879 del_first = tmp;
880 }
ca6c03ca
JH
881
882 a_end = PREV_INSN (del_first);
883 }
4b4bf941 884 else if (BARRIER_P (NEXT_INSN (a_end)))
ca6c03ca
JH
885 del_first = NEXT_INSN (a_end);
886
ca6c03ca
JH
887 /* Delete everything marked above as well as crap that might be
888 hanging out between the two blocks. */
1130d5e3
DM
889 BB_END (a) = a_end;
890 BB_HEAD (b) = b_empty ? NULL : b_head;
a7b87f73 891 delete_insn_chain (del_first, del_last, true);
ca6c03ca 892
8a829274 893 /* When not optimizing and the edge is the only place in RTL which holds
9be94227
EB
894 some unique locus, emit a nop with that locus in between. */
895 if (!optimize)
896 {
897 emit_nop_for_unique_locus_between (a, b);
898 a_end = BB_END (a);
899 }
900
ca6c03ca
JH
901 /* Reassociate the insns of B with A. */
902 if (!b_empty)
903 {
b5b8b0ac 904 update_bb_for_insn_chain (a_end, b_debug_end, a);
5f0d2358 905
1130d5e3
DM
906 BB_END (a) = b_debug_end;
907 BB_HEAD (b) = NULL;
b5b8b0ac
AO
908 }
909 else if (b_end != b_debug_end)
910 {
911 /* Move any deleted labels and other notes between the end of A
912 and the debug insns that make up B after the debug insns,
913 bringing the debug insns into A while keeping the notes after
914 the end of A. */
915 if (NEXT_INSN (a_end) != b_debug_start)
916 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
917 b_debug_end);
918 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
1130d5e3 919 BB_END (a) = b_debug_end;
ca6c03ca 920 }
5f0d2358 921
6fb5fa3c 922 df_bb_delete (b->index);
50a36e42
EB
923
924 /* If B was a forwarder block, propagate the locus on the edge. */
fbc68f2a
DC
925 if (forwarder_p
926 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
50a36e42
EB
927 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
928
929 if (dump_file)
930 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
ca6c03ca 931}
bc35512f 932
6fb5fa3c 933
bc35512f 934/* Return true when block A and B can be merged. */
9678086d 935
b48d0358
DN
936static bool
937rtl_can_merge_blocks (basic_block a, basic_block b)
bc35512f 938{
750054a2
CT
939 /* If we are partitioning hot/cold basic blocks, we don't want to
940 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
941 and cold sections.
942
8e8d5162 943 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
944 be optimizable (or blocks that appear to be mergeable), but which really
945 must be left untouched (they are required to make it safely across
946 partition boundaries). See the comments at the top of
8e8d5162 947 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
076c7ab8 948
87c8b4be 949 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 950 return false;
750054a2 951
7d776ee2
RG
952 /* Protect the loop latches. */
953 if (current_loops && b->loop_father->latch == b)
954 return false;
955
bc35512f 956 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
957 return (single_succ_p (a)
958 && single_succ (a) == b
959 && single_pred_p (b)
628f6a4e 960 && a != b
bc35512f 961 /* Must be simple edge. */
c5cbcccf 962 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
bc35512f 963 && a->next_bb == b
fefa31b5
DM
964 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
965 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
bc35512f
JH
966 /* If the jump insn has side effects,
967 we can't kill the edge. */
4b4bf941 968 && (!JUMP_P (BB_END (a))
e24e7211 969 || (reload_completed
a813c111 970 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f 971}
ca6c03ca 972\f
5f0d2358
RK
973/* Return the label in the head of basic block BLOCK. Create one if it doesn't
974 exist. */
ca6c03ca 975
1476d1bd 976rtx_code_label *
d329e058 977block_label (basic_block block)
ca6c03ca 978{
fefa31b5 979 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
1476d1bd 980 return NULL;
5f0d2358 981
4b4bf941 982 if (!LABEL_P (BB_HEAD (block)))
ca6c03ca 983 {
1130d5e3 984 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
ca6c03ca 985 }
5f0d2358 986
1476d1bd 987 return as_a <rtx_code_label *> (BB_HEAD (block));
ca6c03ca
JH
988}
989
990/* Attempt to perform edge redirection by replacing possibly complex jump
5f0d2358
RK
991 instruction by unconditional jump or removing jump completely. This can
992 apply only if all edges now point to the same block. The parameters and
993 return values are equivalent to redirect_edge_and_branch. */
ca6c03ca 994
6de9cd9a 995edge
bc35512f 996try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
ca6c03ca
JH
997{
998 basic_block src = e->src;
8879d71c 999 rtx_insn *insn = BB_END (src), *kill_from;
e1233a7d 1000 rtx set;
ca6c03ca 1001 int fallthru = 0;
750054a2
CT
1002
1003 /* If we are partitioning hot/cold basic blocks, we don't want to
1004 mess up unconditional or indirect jumps that cross between hot
8e8d5162
CT
1005 and cold sections.
1006
1007 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
1008 be optimizable (or blocks that appear to be mergeable), but which really
1009 must be left untouched (they are required to make it safely across
1010 partition boundaries). See the comments at the top of
8e8d5162 1011 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
c22cacf3 1012
3371a64f 1013 if (BB_PARTITION (src) != BB_PARTITION (target))
9cf84a3c 1014 return NULL;
750054a2 1015
6a66a8a7
KH
1016 /* We can replace or remove a complex jump only when we have exactly
1017 two edges. Also, if we have exactly one outgoing edge, we can
1018 redirect that. */
1019 if (EDGE_COUNT (src->succs) >= 3
1020 /* Verify that all targets will be TARGET. Specifically, the
1021 edge that is not E must also go to TARGET. */
1022 || (EDGE_COUNT (src->succs) == 2
1023 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1024 return NULL;
5f0d2358 1025
6a66a8a7 1026 if (!onlyjump_p (insn))
6de9cd9a 1027 return NULL;
3348b696 1028 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
6de9cd9a 1029 return NULL;
ca6c03ca
JH
1030
1031 /* Avoid removing branch with side effects. */
1032 set = single_set (insn);
1033 if (!set || side_effects_p (set))
6de9cd9a 1034 return NULL;
ca6c03ca
JH
1035
1036 /* In case we zap a conditional jump, we'll need to kill
1037 the cc0 setter too. */
1038 kill_from = insn;
058eb3b0 1039 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn))
9caea4a7 1040 && only_sets_cc0_p (PREV_INSN (insn)))
ca6c03ca 1041 kill_from = PREV_INSN (insn);
ca6c03ca
JH
1042
1043 /* See if we can create the fallthru edge. */
bc35512f 1044 if (in_cfglayout || can_fallthru (src, target))
ca6c03ca 1045 {
c263766c
RH
1046 if (dump_file)
1047 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
ca6c03ca
JH
1048 fallthru = 1;
1049
eaec9b3d 1050 /* Selectively unlink whole insn chain. */
bc35512f
JH
1051 if (in_cfglayout)
1052 {
8879d71c 1053 rtx_insn *insn = BB_FOOTER (src);
bc35512f 1054
a7b87f73 1055 delete_insn_chain (kill_from, BB_END (src), false);
bc35512f
JH
1056
1057 /* Remove barriers but keep jumptables. */
1058 while (insn)
1059 {
4b4bf941 1060 if (BARRIER_P (insn))
bc35512f
JH
1061 {
1062 if (PREV_INSN (insn))
0f82e5c9 1063 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
bc35512f 1064 else
d8ce2eae 1065 BB_FOOTER (src) = NEXT_INSN (insn);
bc35512f 1066 if (NEXT_INSN (insn))
0f82e5c9 1067 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
bc35512f 1068 }
4b4bf941 1069 if (LABEL_P (insn))
bc35512f
JH
1070 break;
1071 insn = NEXT_INSN (insn);
1072 }
1073 }
1074 else
a7b87f73
ZD
1075 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1076 false);
ca6c03ca 1077 }
5f0d2358 1078
ca6c03ca
JH
1079 /* If this already is simplejump, redirect it. */
1080 else if (simplejump_p (insn))
1081 {
1082 if (e->dest == target)
6de9cd9a 1083 return NULL;
c263766c
RH
1084 if (dump_file)
1085 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
0b17ab2f 1086 INSN_UID (insn), e->dest->index, target->index);
1476d1bd
MM
1087 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1088 block_label (target), 0))
6ee3c8e4 1089 {
fefa31b5 1090 gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
341c100f 1091 return NULL;
6ee3c8e4 1092 }
ca6c03ca 1093 }
5f0d2358 1094
6ee3c8e4 1095 /* Cannot do anything for target exit block. */
fefa31b5 1096 else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
6de9cd9a 1097 return NULL;
6ee3c8e4 1098
ca6c03ca
JH
1099 /* Or replace possibly complicated jump insn by simple jump insn. */
1100 else
1101 {
e67d1102 1102 rtx_code_label *target_label = block_label (target);
8879d71c
DM
1103 rtx_insn *barrier;
1104 rtx label;
8942ee0f 1105 rtx_jump_table_data *table;
ca6c03ca 1106
ec4a505f 1107 emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
a813c111 1108 JUMP_LABEL (BB_END (src)) = target_label;
ca6c03ca 1109 LABEL_NUSES (target_label)++;
c263766c
RH
1110 if (dump_file)
1111 fprintf (dump_file, "Replacing insn %i by jump %i\n",
a813c111 1112 INSN_UID (insn), INSN_UID (BB_END (src)));
ca6c03ca 1113
4da2eb6b 1114
ba4807a0
LB
1115 delete_insn_chain (kill_from, insn, false);
1116
4da2eb6b
RH
1117 /* Recognize a tablejump that we are converting to a
1118 simple jump and remove its associated CODE_LABEL
1119 and ADDR_VEC or ADDR_DIFF_VEC. */
ba4807a0
LB
1120 if (tablejump_p (insn, &label, &table))
1121 delete_insn_chain (label, table, false);
eb5b8ad4 1122
a813c111 1123 barrier = next_nonnote_insn (BB_END (src));
4b4bf941 1124 if (!barrier || !BARRIER_P (barrier))
a813c111 1125 emit_barrier_after (BB_END (src));
5d693491
JZ
1126 else
1127 {
a813c111 1128 if (barrier != NEXT_INSN (BB_END (src)))
5d693491
JZ
1129 {
1130 /* Move the jump before barrier so that the notes
1131 which originally were or were created before jump table are
1132 inside the basic block. */
8879d71c 1133 rtx_insn *new_insn = BB_END (src);
5d693491 1134
6c3d0e31
SP
1135 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1136 PREV_INSN (barrier), src);
5d693491 1137
0f82e5c9
DM
1138 SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1139 SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
5d693491 1140
0f82e5c9
DM
1141 SET_NEXT_INSN (new_insn) = barrier;
1142 SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
5d693491 1143
0f82e5c9
DM
1144 SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1145 SET_PREV_INSN (barrier) = new_insn;
5d693491
JZ
1146 }
1147 }
ca6c03ca
JH
1148 }
1149
1150 /* Keep only one edge out and set proper flags. */
c5cbcccf 1151 if (!single_succ_p (src))
628f6a4e 1152 remove_edge (e);
c5cbcccf 1153 gcc_assert (single_succ_p (src));
628f6a4e 1154
c5cbcccf 1155 e = single_succ_edge (src);
ca6c03ca
JH
1156 if (fallthru)
1157 e->flags = EDGE_FALLTHRU;
1158 else
1159 e->flags = 0;
5f0d2358 1160
ca6c03ca
JH
1161 e->probability = REG_BR_PROB_BASE;
1162 e->count = src->count;
1163
ca6c03ca
JH
1164 if (e->dest != target)
1165 redirect_edge_succ (e, target);
6de9cd9a 1166 return e;
ca6c03ca
JH
1167}
1168
4e3825db
MM
1169/* Subroutine of redirect_branch_edge that tries to patch the jump
1170 instruction INSN so that it reaches block NEW. Do this
1171 only when it originally reached block OLD. Return true if this
1172 worked or the original target wasn't OLD, return false if redirection
1173 doesn't work. */
1174
1175static bool
8879d71c 1176patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
ca6c03ca 1177{
8942ee0f 1178 rtx_jump_table_data *table;
ca6c03ca 1179 rtx tmp;
ca6c03ca 1180 /* Recognize a tablejump and adjust all matching cases. */
8942ee0f 1181 if (tablejump_p (insn, NULL, &table))
ca6c03ca
JH
1182 {
1183 rtvec vec;
1184 int j;
e67d1102 1185 rtx_code_label *new_label = block_label (new_bb);
ca6c03ca 1186
fefa31b5 1187 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4e3825db 1188 return false;
95c43227 1189 vec = table->get_labels ();
ca6c03ca
JH
1190
1191 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1192 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1193 {
1194 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1195 --LABEL_NUSES (old_label);
1196 ++LABEL_NUSES (new_label);
1197 }
1198
f9da5064 1199 /* Handle casesi dispatch insns. */
ca6c03ca
JH
1200 if ((tmp = single_set (insn)) != NULL
1201 && SET_DEST (tmp) == pc_rtx
1202 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1203 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
a827d9b1 1204 && LABEL_REF_LABEL (XEXP (SET_SRC (tmp), 2)) == old_label)
ca6c03ca 1205 {
4c33cb26 1206 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
ca6c03ca
JH
1207 new_label);
1208 --LABEL_NUSES (old_label);
1209 ++LABEL_NUSES (new_label);
1210 }
1211 }
1c384bf1
RH
1212 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1213 {
1214 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
e67d1102 1215 rtx note;
1c384bf1 1216
fefa31b5 1217 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1c384bf1 1218 return false;
e67d1102 1219 rtx_code_label *new_label = block_label (new_bb);
1c384bf1
RH
1220
1221 for (i = 0; i < n; ++i)
1222 {
1223 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1224 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1225 if (XEXP (old_ref, 0) == old_label)
1226 {
1227 ASM_OPERANDS_LABEL (tmp, i)
1228 = gen_rtx_LABEL_REF (Pmode, new_label);
1229 --LABEL_NUSES (old_label);
1230 ++LABEL_NUSES (new_label);
1231 }
1232 }
1233
1234 if (JUMP_LABEL (insn) == old_label)
1235 {
1236 JUMP_LABEL (insn) = new_label;
1237 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1238 if (note)
1239 remove_note (insn, note);
1240 }
1241 else
1242 {
1243 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1244 if (note)
1245 remove_note (insn, note);
1246 if (JUMP_LABEL (insn) != new_label
1247 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1248 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1249 }
3b5fda81
JJ
1250 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1251 != NULL_RTX)
1252 XEXP (note, 0) = new_label;
1c384bf1 1253 }
ca6c03ca
JH
1254 else
1255 {
1256 /* ?? We may play the games with moving the named labels from
1257 one basic block to the other in case only one computed_jump is
1258 available. */
5f0d2358
RK
1259 if (computed_jump_p (insn)
1260 /* A return instruction can't be redirected. */
1261 || returnjump_p (insn))
4e3825db 1262 return false;
6ee3c8e4 1263
4e3825db 1264 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
6ee3c8e4 1265 {
4e3825db
MM
1266 /* If the insn doesn't go where we think, we're confused. */
1267 gcc_assert (JUMP_LABEL (insn) == old_label);
1268
1269 /* If the substitution doesn't succeed, die. This can happen
1270 if the back end emitted unrecognizable instructions or if
1271 target is exit block on some arches. */
1476d1bd
MM
1272 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1273 block_label (new_bb), 0))
4e3825db 1274 {
fefa31b5 1275 gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun));
4e3825db
MM
1276 return false;
1277 }
6ee3c8e4 1278 }
ca6c03ca 1279 }
4e3825db
MM
1280 return true;
1281}
1282
1283
1284/* Redirect edge representing branch of (un)conditional jump or tablejump,
1285 NULL on failure */
1286static edge
1287redirect_branch_edge (edge e, basic_block target)
1288{
8879d71c 1289 rtx_insn *old_label = BB_HEAD (e->dest);
4e3825db 1290 basic_block src = e->src;
8879d71c 1291 rtx_insn *insn = BB_END (src);
4e3825db
MM
1292
1293 /* We can only redirect non-fallthru edges of jump insn. */
1294 if (e->flags & EDGE_FALLTHRU)
1295 return NULL;
1296 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1297 return NULL;
1298
1299 if (!currently_expanding_to_rtl)
1300 {
1476d1bd 1301 if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
4e3825db
MM
1302 return NULL;
1303 }
1304 else
1305 /* When expanding this BB might actually contain multiple
1306 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1307 Redirect all of those that match our label. */
0f346928 1308 FOR_BB_INSNS (src, insn)
1476d1bd
MM
1309 if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
1310 old_label, target))
4e3825db 1311 return NULL;
ca6c03ca 1312
c263766c
RH
1313 if (dump_file)
1314 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
0b17ab2f 1315 e->src->index, e->dest->index, target->index);
5f0d2358 1316
ca6c03ca 1317 if (e->dest != target)
6de9cd9a 1318 e = redirect_edge_succ_nodup (e, target);
6fb5fa3c 1319
6de9cd9a 1320 return e;
bc35512f
JH
1321}
1322
3371a64f
TJ
1323/* Called when edge E has been redirected to a new destination,
1324 in order to update the region crossing flag on the edge and
1325 jump. */
1326
1327static void
1328fixup_partition_crossing (edge e)
1329{
fefa31b5
DM
1330 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1331 == EXIT_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1332 return;
1333 /* If we redirected an existing edge, it may already be marked
1334 crossing, even though the new src is missing a reg crossing note.
1335 But make sure reg crossing note doesn't already exist before
1336 inserting. */
1337 if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1338 {
1339 e->flags |= EDGE_CROSSING;
3371a64f 1340 if (JUMP_P (BB_END (e->src))
339ba33b
RS
1341 && !CROSSING_JUMP_P (BB_END (e->src)))
1342 CROSSING_JUMP_P (BB_END (e->src)) = 1;
3371a64f
TJ
1343 }
1344 else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1345 {
1346 e->flags &= ~EDGE_CROSSING;
1347 /* Remove the section crossing note from jump at end of
1348 src if it exists, and if no other successors are
1349 still crossing. */
339ba33b 1350 if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
3371a64f
TJ
1351 {
1352 bool has_crossing_succ = false;
1353 edge e2;
1354 edge_iterator ei;
1355 FOR_EACH_EDGE (e2, ei, e->src->succs)
1356 {
1357 has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1358 if (has_crossing_succ)
1359 break;
1360 }
1361 if (!has_crossing_succ)
339ba33b 1362 CROSSING_JUMP_P (BB_END (e->src)) = 0;
3371a64f
TJ
1363 }
1364 }
1365}
1366
600b5b1d
TJ
1367/* Called when block BB has been reassigned to the cold partition,
1368 because it is now dominated by another cold block,
1369 to ensure that the region crossing attributes are updated. */
1370
1371static void
1372fixup_new_cold_bb (basic_block bb)
1373{
1374 edge e;
1375 edge_iterator ei;
1376
1377 /* This is called when a hot bb is found to now be dominated
1378 by a cold bb and therefore needs to become cold. Therefore,
1379 its preds will no longer be region crossing. Any non-dominating
1380 preds that were previously hot would also have become cold
1381 in the caller for the same region. Any preds that were previously
1382 region-crossing will be adjusted in fixup_partition_crossing. */
1383 FOR_EACH_EDGE (e, ei, bb->preds)
1384 {
1385 fixup_partition_crossing (e);
1386 }
1387
1388 /* Possibly need to make bb's successor edges region crossing,
1389 or remove stale region crossing. */
1390 FOR_EACH_EDGE (e, ei, bb->succs)
1391 {
1392 /* We can't have fall-through edges across partition boundaries.
1393 Note that force_nonfallthru will do any necessary partition
1394 boundary fixup by calling fixup_partition_crossing itself. */
1395 if ((e->flags & EDGE_FALLTHRU)
1396 && BB_PARTITION (bb) != BB_PARTITION (e->dest)
fefa31b5 1397 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
600b5b1d
TJ
1398 force_nonfallthru (e);
1399 else
1400 fixup_partition_crossing (e);
1401 }
1402}
1403
bc35512f
JH
1404/* Attempt to change code to redirect edge E to TARGET. Don't do that on
1405 expense of adding new instructions or reordering basic blocks.
1406
1407 Function can be also called with edge destination equivalent to the TARGET.
1408 Then it should try the simplifications and do nothing if none is possible.
1409
6de9cd9a
DN
1410 Return edge representing the branch if transformation succeeded. Return NULL
1411 on failure.
1412 We still return NULL in case E already destinated TARGET and we didn't
1413 managed to simplify instruction stream. */
bc35512f 1414
6de9cd9a 1415static edge
5671bf27 1416rtl_redirect_edge_and_branch (edge e, basic_block target)
bc35512f 1417{
6de9cd9a 1418 edge ret;
f345f21a 1419 basic_block src = e->src;
3371a64f 1420 basic_block dest = e->dest;
f345f21a 1421
bc35512f 1422 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 1423 return NULL;
bc35512f 1424
3371a64f 1425 if (dest == target)
6de9cd9a 1426 return e;
3348b696 1427
6de9cd9a 1428 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
f345f21a 1429 {
6fb5fa3c 1430 df_set_bb_dirty (src);
3371a64f 1431 fixup_partition_crossing (ret);
6de9cd9a 1432 return ret;
f345f21a 1433 }
bc35512f 1434
6de9cd9a
DN
1435 ret = redirect_branch_edge (e, target);
1436 if (!ret)
1437 return NULL;
5f0d2358 1438
6fb5fa3c 1439 df_set_bb_dirty (src);
3371a64f 1440 fixup_partition_crossing (ret);
6de9cd9a 1441 return ret;
ca6c03ca
JH
1442}
1443
3371a64f
TJ
1444/* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1445
1446void
1447emit_barrier_after_bb (basic_block bb)
1448{
cd459bf8 1449 rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
c3284718 1450 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
3371a64f
TJ
1451 || current_ir_type () == IR_RTL_CFGLAYOUT);
1452 if (current_ir_type () == IR_RTL_CFGLAYOUT)
0fb3402f
RX
1453 {
1454 rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1455
1456 if (BB_FOOTER (bb))
1457 {
1458 rtx_insn *footer_tail = BB_FOOTER (bb);
1459
1460 while (NEXT_INSN (footer_tail))
1461 footer_tail = NEXT_INSN (footer_tail);
1462 if (!BARRIER_P (footer_tail))
1463 {
1464 SET_NEXT_INSN (footer_tail) = insn;
1465 SET_PREV_INSN (insn) = footer_tail;
1466 }
1467 }
1468 else
1469 BB_FOOTER (bb) = insn;
1470 }
3371a64f
TJ
1471}
1472
4fe9b91c 1473/* Like force_nonfallthru below, but additionally performs redirection
26898771
BS
1474 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1475 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1476 simple_return_rtx, indicating which kind of returnjump to create.
1477 It should be NULL otherwise. */
ca6c03ca 1478
dc0ff1c8 1479basic_block
26898771 1480force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
ca6c03ca 1481{
a3716585 1482 basic_block jump_block, new_bb = NULL, src = e->src;
ca6c03ca
JH
1483 rtx note;
1484 edge new_edge;
a3716585 1485 int abnormal_edge_flags = 0;
a3e6a37b 1486 bool asm_goto_edge = false;
7241571e 1487 int loc;
ca6c03ca 1488
cb9a1d9b
JH
1489 /* In the case the last instruction is conditional jump to the next
1490 instruction, first redirect the jump itself and then continue
b20b352b 1491 by creating a basic block afterwards to redirect fallthru edge. */
fefa31b5
DM
1492 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1493 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
a813c111 1494 && any_condjump_p (BB_END (e->src))
a813c111 1495 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
cb9a1d9b
JH
1496 {
1497 rtx note;
58e6ae30 1498 edge b = unchecked_make_edge (e->src, target, 0);
341c100f 1499 bool redirected;
cb9a1d9b 1500
1476d1bd
MM
1501 redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
1502 block_label (target), 0);
341c100f 1503 gcc_assert (redirected);
c22cacf3 1504
a813c111 1505 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
cb9a1d9b
JH
1506 if (note)
1507 {
e5af9ddd 1508 int prob = XINT (note, 0);
cb9a1d9b
JH
1509
1510 b->probability = prob;
8ddb5a29 1511 /* Update this to use GCOV_COMPUTE_SCALE. */
cb9a1d9b
JH
1512 b->count = e->count * prob / REG_BR_PROB_BASE;
1513 e->probability -= e->probability;
1514 e->count -= b->count;
1515 if (e->probability < 0)
1516 e->probability = 0;
1517 if (e->count < 0)
1518 e->count = 0;
1519 }
1520 }
1521
ca6c03ca 1522 if (e->flags & EDGE_ABNORMAL)
a3716585
JH
1523 {
1524 /* Irritating special case - fallthru edge to the same block as abnormal
1525 edge.
1526 We can't redirect abnormal edge, but we still can split the fallthru
d329e058 1527 one and create separate abnormal edge to original destination.
a3716585 1528 This allows bb-reorder to make such edge non-fallthru. */
341c100f 1529 gcc_assert (e->dest == target);
ee44c28d
SB
1530 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1531 e->flags &= EDGE_FALLTHRU;
a3716585 1532 }
341c100f 1533 else
24c545ff 1534 {
341c100f 1535 gcc_assert (e->flags & EDGE_FALLTHRU);
fefa31b5 1536 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
341c100f
NS
1537 {
1538 /* We can't redirect the entry block. Create an empty block
628f6a4e
BE
1539 at the start of the function which we use to add the new
1540 jump. */
1541 edge tmp;
1542 edge_iterator ei;
1543 bool found = false;
c22cacf3 1544
fefa31b5
DM
1545 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1546 ENTRY_BLOCK_PTR_FOR_FN (cfun));
c22cacf3 1547
341c100f
NS
1548 /* Change the existing edge's source to be the new block, and add
1549 a new edge from the entry block to the new block. */
1550 e->src = bb;
fefa31b5
DM
1551 for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1552 (tmp = ei_safe_edge (ei)); )
628f6a4e
BE
1553 {
1554 if (tmp == e)
1555 {
fefa31b5 1556 ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
628f6a4e
BE
1557 found = true;
1558 break;
1559 }
1560 else
1561 ei_next (&ei);
1562 }
c22cacf3 1563
628f6a4e 1564 gcc_assert (found);
c22cacf3 1565
9771b263 1566 vec_safe_push (bb->succs, e);
fefa31b5
DM
1567 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1568 EDGE_FALLTHRU);
341c100f 1569 }
24c545ff
BS
1570 }
1571
a3e6a37b 1572 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
891ca07d 1573 don't point to the target or fallthru label. */
a3e6a37b 1574 if (JUMP_P (BB_END (e->src))
fefa31b5 1575 && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
a3e6a37b
JJ
1576 && (e->flags & EDGE_FALLTHRU)
1577 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
ca6c03ca 1578 {
a3e6a37b 1579 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
5d68b7e6 1580 bool adjust_jump_target = false;
a3e6a37b
JJ
1581
1582 for (i = 0; i < n; ++i)
891ca07d
JJ
1583 {
1584 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
5d68b7e6
JJ
1585 {
1586 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1587 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1588 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1589 adjust_jump_target = true;
1590 }
891ca07d 1591 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
a3e6a37b 1592 asm_goto_edge = true;
891ca07d 1593 }
5d68b7e6
JJ
1594 if (adjust_jump_target)
1595 {
8879d71c
DM
1596 rtx_insn *insn = BB_END (e->src);
1597 rtx note;
1598 rtx_insn *old_label = BB_HEAD (e->dest);
1599 rtx_insn *new_label = BB_HEAD (target);
5d68b7e6
JJ
1600
1601 if (JUMP_LABEL (insn) == old_label)
1602 {
1603 JUMP_LABEL (insn) = new_label;
1604 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1605 if (note)
1606 remove_note (insn, note);
1607 }
1608 else
1609 {
1610 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1611 if (note)
1612 remove_note (insn, note);
1613 if (JUMP_LABEL (insn) != new_label
1614 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1615 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1616 }
1617 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1618 != NULL_RTX)
1619 XEXP (note, 0) = new_label;
1620 }
a3e6a37b
JJ
1621 }
1622
1623 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1624 {
dc01c3d1 1625 rtx_insn *new_head;
a3e6a37b
JJ
1626 gcov_type count = e->count;
1627 int probability = e->probability;
ca6c03ca 1628 /* Create the new structures. */
31a78298 1629
79019985
RH
1630 /* If the old block ended with a tablejump, skip its table
1631 by searching forward from there. Otherwise start searching
1632 forward from the last instruction of the old block. */
8942ee0f
DM
1633 rtx_jump_table_data *table;
1634 if (tablejump_p (BB_END (e->src), NULL, &table))
dc01c3d1 1635 new_head = table;
8942ee0f 1636 else
dc01c3d1
DM
1637 new_head = BB_END (e->src);
1638 new_head = NEXT_INSN (new_head);
31a78298 1639
dc01c3d1 1640 jump_block = create_basic_block (new_head, NULL, e->src);
a3e6a37b 1641 jump_block->count = count;
ca6c03ca 1642 jump_block->frequency = EDGE_FREQUENCY (e);
ca6c03ca 1643
750054a2
CT
1644 /* Make sure new block ends up in correct hot/cold section. */
1645
076c7ab8 1646 BB_COPY_PARTITION (jump_block, e->src);
c22cacf3 1647
ca6c03ca
JH
1648 /* Wire edge in. */
1649 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
a3e6a37b
JJ
1650 new_edge->probability = probability;
1651 new_edge->count = count;
ca6c03ca
JH
1652
1653 /* Redirect old edge. */
1654 redirect_edge_pred (e, jump_block);
1655 e->probability = REG_BR_PROB_BASE;
1656
3371a64f
TJ
1657 /* If e->src was previously region crossing, it no longer is
1658 and the reg crossing note should be removed. */
1659 fixup_partition_crossing (new_edge);
1660
a3e6a37b
JJ
1661 /* If asm goto has any label refs to target's label,
1662 add also edge from asm goto bb to target. */
1663 if (asm_goto_edge)
1664 {
1665 new_edge->probability /= 2;
1666 new_edge->count /= 2;
1667 jump_block->count /= 2;
1668 jump_block->frequency /= 2;
1669 new_edge = make_edge (new_edge->src, target,
1670 e->flags & ~EDGE_FALLTHRU);
1671 new_edge->probability = probability - probability / 2;
1672 new_edge->count = count - count / 2;
1673 }
1674
ca6c03ca
JH
1675 new_bb = jump_block;
1676 }
1677 else
1678 jump_block = e->src;
5f0d2358 1679
2f13f2de 1680 loc = e->goto_locus;
ca6c03ca 1681 e->flags &= ~EDGE_FALLTHRU;
fefa31b5 1682 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
ca6c03ca 1683 {
26898771 1684 if (jump_label == ret_rtx)
4bccb39e
RS
1685 emit_jump_insn_after_setloc (targetm.gen_return (),
1686 BB_END (jump_block), loc);
26898771
BS
1687 else
1688 {
1689 gcc_assert (jump_label == simple_return_rtx);
4bccb39e 1690 emit_jump_insn_after_setloc (targetm.gen_simple_return (),
26898771 1691 BB_END (jump_block), loc);
26898771 1692 }
387748de 1693 set_return_jump_label (BB_END (jump_block));
ca6c03ca
JH
1694 }
1695 else
1696 {
e67d1102 1697 rtx_code_label *label = block_label (target);
ec4a505f
RS
1698 emit_jump_insn_after_setloc (targetm.gen_jump (label),
1699 BB_END (jump_block), loc);
a813c111 1700 JUMP_LABEL (BB_END (jump_block)) = label;
ca6c03ca
JH
1701 LABEL_NUSES (label)++;
1702 }
5f0d2358 1703
3371a64f
TJ
1704 /* We might be in cfg layout mode, and if so, the following routine will
1705 insert the barrier correctly. */
1706 emit_barrier_after_bb (jump_block);
ca6c03ca
JH
1707 redirect_edge_succ_nodup (e, target);
1708
a3716585
JH
1709 if (abnormal_edge_flags)
1710 make_edge (src, target, abnormal_edge_flags);
1711
b8698a0f 1712 df_mark_solutions_dirty ();
3371a64f 1713 fixup_partition_crossing (e);
ca6c03ca
JH
1714 return new_bb;
1715}
1716
1717/* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1718 (and possibly create new basic block) to make edge non-fallthru.
1719 Return newly created BB or NULL if none. */
5f0d2358 1720
cf103ca4
EB
1721static basic_block
1722rtl_force_nonfallthru (edge e)
ca6c03ca 1723{
26898771 1724 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
ca6c03ca
JH
1725}
1726
1727/* Redirect edge even at the expense of creating new jump insn or
1728 basic block. Return new basic block if created, NULL otherwise.
41806d92 1729 Conversion must be possible. */
ca6c03ca 1730
9ee634e3 1731static basic_block
d329e058 1732rtl_redirect_edge_and_branch_force (edge e, basic_block target)
ca6c03ca 1733{
5f0d2358
RK
1734 if (redirect_edge_and_branch (e, target)
1735 || e->dest == target)
ca6c03ca
JH
1736 return NULL;
1737
1738 /* In case the edge redirection failed, try to force it to be non-fallthru
1739 and redirect newly created simplejump. */
6fb5fa3c 1740 df_set_bb_dirty (e->src);
26898771 1741 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
ca6c03ca
JH
1742}
1743
1744/* The given edge should potentially be a fallthru edge. If that is in
1745 fact true, delete the jump and barriers that are in the way. */
1746
f470c378
ZD
1747static void
1748rtl_tidy_fallthru_edge (edge e)
ca6c03ca 1749{
8879d71c 1750 rtx_insn *q;
f470c378
ZD
1751 basic_block b = e->src, c = b->next_bb;
1752
ca6c03ca
JH
1753 /* ??? In a late-running flow pass, other folks may have deleted basic
1754 blocks by nopping out blocks, leaving multiple BARRIERs between here
0fa2e4df 1755 and the target label. They ought to be chastised and fixed.
ca6c03ca
JH
1756
1757 We can also wind up with a sequence of undeletable labels between
1758 one block and the next.
1759
1760 So search through a sequence of barriers, labels, and notes for
1761 the head of block C and assert that we really do fall through. */
1762
a813c111 1763 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
9c0a0632
RH
1764 if (INSN_P (q))
1765 return;
ca6c03ca
JH
1766
1767 /* Remove what will soon cease being the jump insn from the source block.
1768 If block B consisted only of this single jump, turn it into a deleted
1769 note. */
a813c111 1770 q = BB_END (b);
4b4bf941 1771 if (JUMP_P (q)
ca6c03ca
JH
1772 && onlyjump_p (q)
1773 && (any_uncondjump_p (q)
c5cbcccf 1774 || single_succ_p (b)))
ca6c03ca 1775 {
adfac8df
JJ
1776 rtx label;
1777 rtx_jump_table_data *table;
1778
1779 if (tablejump_p (q, &label, &table))
1780 {
1781 /* The label is likely mentioned in some instruction before
1782 the tablejump and might not be DCEd, so turn it into
1783 a note instead and move before the tablejump that is going to
1784 be deleted. */
1785 const char *name = LABEL_NAME (label);
1786 PUT_CODE (label, NOTE);
1787 NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1788 NOTE_DELETED_LABEL_NAME (label) = name;
1789 rtx_insn *lab = safe_as_a <rtx_insn *> (label);
1790 reorder_insns (lab, lab, PREV_INSN (q));
1791 delete_insn (table);
1792 }
1793
ca6c03ca
JH
1794 /* If this was a conditional jump, we need to also delete
1795 the insn that set cc0. */
058eb3b0 1796 if (HAVE_cc0 && any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
ca6c03ca 1797 q = PREV_INSN (q);
ca6c03ca
JH
1798
1799 q = PREV_INSN (q);
ca6c03ca
JH
1800 }
1801
1802 /* Selectively unlink the sequence. */
a813c111 1803 if (q != PREV_INSN (BB_HEAD (c)))
a7b87f73 1804 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
ca6c03ca
JH
1805
1806 e->flags |= EDGE_FALLTHRU;
1807}
ca6c03ca 1808\f
f470c378
ZD
1809/* Should move basic block BB after basic block AFTER. NIY. */
1810
1811static bool
1812rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1813 basic_block after ATTRIBUTE_UNUSED)
1814{
1815 return false;
1816}
1817
3371a64f
TJ
1818/* Locate the last bb in the same partition as START_BB. */
1819
1820static basic_block
1821last_bb_in_partition (basic_block start_bb)
1822{
1823 basic_block bb;
fefa31b5 1824 FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
3371a64f
TJ
1825 {
1826 if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1827 return bb;
1828 }
6626665f 1829 /* Return bb before the exit block. */
3371a64f
TJ
1830 return bb->prev_bb;
1831}
1832
ca6c03ca 1833/* Split a (typically critical) edge. Return the new block.
41806d92 1834 The edge must not be abnormal.
ca6c03ca
JH
1835
1836 ??? The code generally expects to be called on critical edges.
1837 The case of a block ending in an unconditional jump to a
1838 block with multiple predecessors is not handled optimally. */
1839
8ce33230 1840static basic_block
d329e058 1841rtl_split_edge (edge edge_in)
ca6c03ca 1842{
3371a64f 1843 basic_block bb, new_bb;
8879d71c 1844 rtx_insn *before;
ca6c03ca
JH
1845
1846 /* Abnormal edges cannot be split. */
341c100f 1847 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
ca6c03ca
JH
1848
1849 /* We are going to place the new block in front of edge destination.
eaec9b3d 1850 Avoid existence of fallthru predecessors. */
ca6c03ca
JH
1851 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1852 {
0fd4b31d 1853 edge e = find_fallthru_edge (edge_in->dest->preds);
ca6c03ca
JH
1854
1855 if (e)
1856 force_nonfallthru (e);
1857 }
1858
96e82e0a 1859 /* Create the basic block note. */
fefa31b5 1860 if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
a813c111 1861 before = BB_HEAD (edge_in->dest);
ca6c03ca 1862 else
8879d71c 1863 before = NULL;
ca6c03ca 1864
623a66fa 1865 /* If this is a fall through edge to the exit block, the blocks might be
ffe14686 1866 not adjacent, and the right place is after the source. */
fefa31b5
DM
1867 if ((edge_in->flags & EDGE_FALLTHRU)
1868 && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
623a66fa
R
1869 {
1870 before = NEXT_INSN (BB_END (edge_in->src));
623a66fa 1871 bb = create_basic_block (before, NULL, edge_in->src);
076c7ab8 1872 BB_COPY_PARTITION (bb, edge_in->src);
623a66fa
R
1873 }
1874 else
9fb32434 1875 {
fefa31b5 1876 if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1877 {
1878 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1879 BB_COPY_PARTITION (bb, edge_in->dest);
1880 }
1881 else
1882 {
1883 basic_block after = edge_in->dest->prev_bb;
1884 /* If this is post-bb reordering, and the edge crosses a partition
1885 boundary, the new block needs to be inserted in the bb chain
1886 at the end of the src partition (since we put the new bb into
1887 that partition, see below). Otherwise we may end up creating
1888 an extra partition crossing in the chain, which is illegal.
1889 It can't go after the src, because src may have a fall-through
1890 to a different block. */
1891 if (crtl->bb_reorder_complete
1892 && (edge_in->flags & EDGE_CROSSING))
1893 {
1894 after = last_bb_in_partition (edge_in->src);
b3a3ae09 1895 before = get_last_bb_insn (after);
3371a64f
TJ
1896 /* The instruction following the last bb in partition should
1897 be a barrier, since it cannot end in a fall-through. */
1898 gcc_checking_assert (BARRIER_P (before));
1899 before = NEXT_INSN (before);
1900 }
1901 bb = create_basic_block (before, NULL, after);
1902 /* Put the split bb into the src partition, to avoid creating
1903 a situation where a cold bb dominates a hot bb, in the case
1904 where src is cold and dest is hot. The src will dominate
1905 the new bb (whereas it might not have dominated dest). */
1906 BB_COPY_PARTITION (bb, edge_in->src);
1907 }
9fb32434 1908 }
ca6c03ca 1909
4977bab6 1910 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
ca6c03ca 1911
3371a64f
TJ
1912 /* Can't allow a region crossing edge to be fallthrough. */
1913 if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
fefa31b5 1914 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1915 {
1916 new_bb = force_nonfallthru (single_succ_edge (bb));
1917 gcc_assert (!new_bb);
1918 }
1919
4d6922ee 1920 /* For non-fallthru edges, we must adjust the predecessor's
ca6c03ca
JH
1921 jump instruction to target our new block. */
1922 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1923 {
341c100f
NS
1924 edge redirected = redirect_edge_and_branch (edge_in, bb);
1925 gcc_assert (redirected);
ca6c03ca
JH
1926 }
1927 else
3b5fda81 1928 {
fefa31b5 1929 if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
3b5fda81
JJ
1930 {
1931 /* For asm goto even splitting of fallthru edge might
1932 need insn patching, as other labels might point to the
1933 old label. */
8879d71c 1934 rtx_insn *last = BB_END (edge_in->src);
3b5fda81
JJ
1935 if (last
1936 && JUMP_P (last)
fefa31b5 1937 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3b5fda81
JJ
1938 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1939 && patch_jump_insn (last, before, bb))
1940 df_set_bb_dirty (edge_in->src);
1941 }
1942 redirect_edge_succ (edge_in, bb);
1943 }
ca6c03ca
JH
1944
1945 return bb;
1946}
1947
1948/* Queue instructions for insertion on an edge between two basic blocks.
1949 The new instructions and basic blocks (if any) will not appear in the
1950 CFG until commit_edge_insertions is called. */
1951
1952void
d329e058 1953insert_insn_on_edge (rtx pattern, edge e)
ca6c03ca
JH
1954{
1955 /* We cannot insert instructions on an abnormal critical edge.
1956 It will be easier to find the culprit if we die now. */
341c100f 1957 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
ca6c03ca 1958
6de9cd9a 1959 if (e->insns.r == NULL_RTX)
ca6c03ca
JH
1960 start_sequence ();
1961 else
6de9cd9a 1962 push_to_sequence (e->insns.r);
ca6c03ca
JH
1963
1964 emit_insn (pattern);
1965
6de9cd9a 1966 e->insns.r = get_insns ();
ca6c03ca
JH
1967 end_sequence ();
1968}
1969
1970/* Update the CFG for the instructions queued on edge E. */
1971
4e3825db 1972void
2ac66157 1973commit_one_edge_insertion (edge e)
ca6c03ca 1974{
8879d71c 1975 rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
898c90c0 1976 basic_block bb;
ca6c03ca
JH
1977
1978 /* Pull the insns off the edge now since the edge might go away. */
3ffa95c2
DM
1979 insns = e->insns.r;
1980 e->insns.r = NULL;
ca6c03ca 1981
898c90c0
EB
1982 /* Figure out where to put these insns. If the destination has
1983 one predecessor, insert there. Except for the exit block. */
fefa31b5 1984 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
ca6c03ca 1985 {
898c90c0
EB
1986 bb = e->dest;
1987
1988 /* Get the location correct wrt a code label, and "nice" wrt
1989 a basic block note, and before everything else. */
1990 tmp = BB_HEAD (bb);
1991 if (LABEL_P (tmp))
1992 tmp = NEXT_INSN (tmp);
1993 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1994 tmp = NEXT_INSN (tmp);
1995 if (tmp == BB_HEAD (bb))
1996 before = tmp;
1997 else if (tmp)
1998 after = PREV_INSN (tmp);
1999 else
2000 after = get_last_insn ();
2001 }
3dec4024 2002
898c90c0 2003 /* If the source has one successor and the edge is not abnormal,
a1d50386
JJ
2004 insert there. Except for the entry block.
2005 Don't do this if the predecessor ends in a jump other than
2006 unconditional simple jump. E.g. for asm goto that points all
2007 its labels at the fallthru basic block, we can't insert instructions
2008 before the asm goto, as the asm goto can have various of side effects,
2009 and can't emit instructions after the asm goto, as it must end
2010 the basic block. */
898c90c0
EB
2011 else if ((e->flags & EDGE_ABNORMAL) == 0
2012 && single_succ_p (e->src)
fefa31b5 2013 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
a1d50386
JJ
2014 && (!JUMP_P (BB_END (e->src))
2015 || simplejump_p (BB_END (e->src))))
898c90c0
EB
2016 {
2017 bb = e->src;
3dec4024 2018
898c90c0
EB
2019 /* It is possible to have a non-simple jump here. Consider a target
2020 where some forms of unconditional jumps clobber a register. This
2021 happens on the fr30 for example.
ca6c03ca 2022
898c90c0
EB
2023 We know this block has a single successor, so we can just emit
2024 the queued insns before the jump. */
2025 if (JUMP_P (BB_END (bb)))
2026 before = BB_END (bb);
3dec4024
JH
2027 else
2028 {
898c90c0
EB
2029 /* We'd better be fallthru, or we've lost track of what's what. */
2030 gcc_assert (e->flags & EDGE_FALLTHRU);
750054a2 2031
898c90c0 2032 after = BB_END (bb);
ca6c03ca
JH
2033 }
2034 }
2035
898c90c0
EB
2036 /* Otherwise we must split the edge. */
2037 else
2038 {
2039 bb = split_edge (e);
3371a64f
TJ
2040
2041 /* If E crossed a partition boundary, we needed to make bb end in
2042 a region-crossing jump, even though it was originally fallthru. */
2043 if (JUMP_P (BB_END (bb)))
2044 before = BB_END (bb);
2045 else
2046 after = BB_END (bb);
898c90c0
EB
2047 }
2048
2049 /* Now that we've found the spot, do the insertion. */
ca6c03ca
JH
2050 if (before)
2051 {
6fb5fa3c 2052 emit_insn_before_noloc (insns, before, bb);
ca6c03ca
JH
2053 last = prev_nonnote_insn (before);
2054 }
2055 else
6fb5fa3c 2056 last = emit_insn_after_noloc (insns, after, bb);
ca6c03ca
JH
2057
2058 if (returnjump_p (last))
2059 {
2060 /* ??? Remove all outgoing edges from BB and add one for EXIT.
c22cacf3
MS
2061 This is not currently a problem because this only happens
2062 for the (single) epilogue, which already has a fallthru edge
2063 to EXIT. */
ca6c03ca 2064
c5cbcccf 2065 e = single_succ_edge (bb);
fefa31b5 2066 gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
c5cbcccf 2067 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
ca6c03ca 2068
5f0d2358 2069 e->flags &= ~EDGE_FALLTHRU;
ca6c03ca 2070 emit_barrier_after (last);
0b17ab2f 2071
ca6c03ca
JH
2072 if (before)
2073 delete_insn (before);
2074 }
341c100f
NS
2075 else
2076 gcc_assert (!JUMP_P (last));
ca6c03ca
JH
2077}
2078
2079/* Update the CFG for all queued instructions. */
2080
2081void
d329e058 2082commit_edge_insertions (void)
ca6c03ca 2083{
ca6c03ca
JH
2084 basic_block bb;
2085
600b5b1d
TJ
2086 /* Optimization passes that invoke this routine can cause hot blocks
2087 previously reached by both hot and cold blocks to become dominated only
2088 by cold blocks. This will cause the verification below to fail,
2089 and lead to now cold code in the hot section. In some cases this
2090 may only be visible after newly unreachable blocks are deleted,
2091 which will be done by fixup_partitions. */
2092 fixup_partitions ();
2093
b2b29377 2094 checking_verify_flow_info ();
ca6c03ca 2095
fefa31b5
DM
2096 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2097 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
ca6c03ca 2098 {
628f6a4e
BE
2099 edge e;
2100 edge_iterator ei;
ca6c03ca 2101
628f6a4e
BE
2102 FOR_EACH_EDGE (e, ei, bb->succs)
2103 if (e->insns.r)
cf103ca4 2104 commit_one_edge_insertion (e);
3dec4024 2105 }
ca6c03ca
JH
2106}
2107\f
6fb5fa3c 2108
f470c378 2109/* Print out RTL-specific basic block information (live information
a315c44c
SB
2110 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2111 documented in dumpfile.h. */
ca6c03ca 2112
10e9fecc 2113static void
a315c44c 2114rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
ca6c03ca 2115{
8879d71c
DM
2116 rtx_insn *insn;
2117 rtx_insn *last;
f470c378 2118 char *s_indent;
ca6c03ca 2119
ae50c0cb 2120 s_indent = (char *) alloca ((size_t) indent + 1);
400e39e3 2121 memset (s_indent, ' ', (size_t) indent);
f470c378 2122 s_indent[indent] = '\0';
b8698a0f 2123
a315c44c 2124 if (df && (flags & TDF_DETAILS))
6fb5fa3c
DB
2125 {
2126 df_dump_top (bb, outf);
2127 putc ('\n', outf);
2128 }
ca6c03ca 2129
68cc3174
EB
2130 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2131 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
2132 insn = NEXT_INSN (insn))
a315c44c 2133 {
7b19209f
SB
2134 if (flags & TDF_DETAILS)
2135 df_dump_insn_top (insn, outf);
a315c44c
SB
2136 if (! (flags & TDF_SLIM))
2137 print_rtl_single (outf, insn);
2138 else
2139 dump_insn_slim (outf, insn);
7b19209f
SB
2140 if (flags & TDF_DETAILS)
2141 df_dump_insn_bottom (insn, outf);
a315c44c
SB
2142 }
2143
2144 if (df && (flags & TDF_DETAILS))
6fb5fa3c
DB
2145 {
2146 df_dump_bottom (bb, outf);
2147 putc ('\n', outf);
2148 }
2149
ca6c03ca
JH
2150}
2151\f
c4669594
SB
2152/* Like dump_function_to_file, but for RTL. Print out dataflow information
2153 for the start of each basic block. FLAGS are the TDF_* masks documented
2154 in dumpfile.h. */
ca6c03ca
JH
2155
2156void
b2908ba6 2157print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, int flags)
ca6c03ca 2158{
b2908ba6 2159 const rtx_insn *tmp_rtx;
ca6c03ca
JH
2160 if (rtx_first == 0)
2161 fprintf (outf, "(nil)\n");
2162 else
2163 {
ca6c03ca
JH
2164 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2165 int max_uid = get_max_uid ();
5ed6ace5
MD
2166 basic_block *start = XCNEWVEC (basic_block, max_uid);
2167 basic_block *end = XCNEWVEC (basic_block, max_uid);
2168 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
e0082a72
ZD
2169 basic_block bb;
2170
c4669594
SB
2171 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2172 insns, but the CFG is not maintained so the basic block info
2173 is not reliable. Therefore it's omitted from the dumps. */
2174 if (! (cfun->curr_properties & PROP_cfg))
2175 flags &= ~TDF_BLOCKS;
2176
6fb5fa3c
DB
2177 if (df)
2178 df_dump_start (outf);
2179
c4669594 2180 if (flags & TDF_BLOCKS)
ca6c03ca 2181 {
4f42035e 2182 FOR_EACH_BB_REVERSE_FN (bb, cfun)
ca6c03ca 2183 {
8879d71c 2184 rtx_insn *x;
c4669594
SB
2185
2186 start[INSN_UID (BB_HEAD (bb))] = bb;
2187 end[INSN_UID (BB_END (bb))] = bb;
2188 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2189 {
2190 enum bb_state state = IN_MULTIPLE_BB;
5f0d2358 2191
c4669594
SB
2192 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2193 state = IN_ONE_BB;
2194 in_bb_p[INSN_UID (x)] = state;
ca6c03ca 2195
c4669594
SB
2196 if (x == BB_END (bb))
2197 break;
2198 }
ca6c03ca
JH
2199 }
2200 }
2201
2202 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
2203 {
c4669594
SB
2204 if (flags & TDF_BLOCKS)
2205 {
2206 bb = start[INSN_UID (tmp_rtx)];
2207 if (bb != NULL)
2208 {
2209 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
2210 if (df && (flags & TDF_DETAILS))
2211 df_dump_top (bb, outf);
2212 }
ca6c03ca 2213
c4669594
SB
2214 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2215 && !NOTE_P (tmp_rtx)
2216 && !BARRIER_P (tmp_rtx))
2217 fprintf (outf, ";; Insn is not within a basic block\n");
2218 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2219 fprintf (outf, ";; Insn is in multiple basic blocks\n");
2220 }
ca6c03ca 2221
7b19209f
SB
2222 if (flags & TDF_DETAILS)
2223 df_dump_insn_top (tmp_rtx, outf);
a315c44c
SB
2224 if (! (flags & TDF_SLIM))
2225 print_rtl_single (outf, tmp_rtx);
2226 else
2227 dump_insn_slim (outf, tmp_rtx);
7b19209f
SB
2228 if (flags & TDF_DETAILS)
2229 df_dump_insn_bottom (tmp_rtx, outf);
ca6c03ca 2230
c4669594
SB
2231 if (flags & TDF_BLOCKS)
2232 {
2233 bb = end[INSN_UID (tmp_rtx)];
2234 if (bb != NULL)
2235 {
2236 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
2237 if (df && (flags & TDF_DETAILS))
2238 df_dump_bottom (bb, outf);
1b03a96d 2239 putc ('\n', outf);
c4669594
SB
2240 }
2241 }
ca6c03ca
JH
2242 }
2243
2244 free (start);
2245 free (end);
2246 free (in_bb_p);
2247 }
ca6c03ca
JH
2248}
2249\f
532aafad
SB
2250/* Update the branch probability of BB if a REG_BR_PROB is present. */
2251
b446e5a2 2252void
d329e058 2253update_br_prob_note (basic_block bb)
b446e5a2
JH
2254{
2255 rtx note;
4b4bf941 2256 if (!JUMP_P (BB_END (bb)))
b446e5a2 2257 return;
a813c111 2258 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
e5af9ddd 2259 if (!note || XINT (note, 0) == BRANCH_EDGE (bb)->probability)
b446e5a2 2260 return;
e5af9ddd 2261 XINT (note, 0) = BRANCH_EDGE (bb)->probability;
b446e5a2 2262}
96370780
MK
2263
2264/* Get the last insn associated with block BB (that includes barriers and
2265 tablejumps after BB). */
636eb204 2266rtx_insn *
96370780
MK
2267get_last_bb_insn (basic_block bb)
2268{
8942ee0f 2269 rtx_jump_table_data *table;
636eb204
DM
2270 rtx_insn *tmp;
2271 rtx_insn *end = BB_END (bb);
96370780
MK
2272
2273 /* Include any jump table following the basic block. */
8942ee0f
DM
2274 if (tablejump_p (end, NULL, &table))
2275 end = table;
96370780
MK
2276
2277 /* Include any barriers that may follow the basic block. */
1e211590 2278 tmp = next_nonnote_insn_bb (end);
96370780
MK
2279 while (tmp && BARRIER_P (tmp))
2280 {
2281 end = tmp;
1e211590 2282 tmp = next_nonnote_insn_bb (end);
96370780
MK
2283 }
2284
2285 return end;
2286}
af205f67 2287
600b5b1d
TJ
2288/* Sanity check partition hotness to ensure that basic blocks in
2289   the cold partition don't dominate basic blocks in the hot partition.
2290 If FLAG_ONLY is true, report violations as errors. Otherwise
2291 re-mark the dominated blocks as cold, since this is run after
2292 cfg optimizations that may make hot blocks previously reached
2293 by both hot and cold blocks now only reachable along cold paths. */
2294
2295static vec<basic_block>
2296find_partition_fixes (bool flag_only)
2297{
2298 basic_block bb;
2299 vec<basic_block> bbs_in_cold_partition = vNULL;
2300 vec<basic_block> bbs_to_fix = vNULL;
2301
2302 /* Callers check this. */
2303 gcc_checking_assert (crtl->has_bb_partition);
2304
11cd3bed 2305 FOR_EACH_BB_FN (bb, cfun)
600b5b1d
TJ
2306 if ((BB_PARTITION (bb) == BB_COLD_PARTITION))
2307 bbs_in_cold_partition.safe_push (bb);
2308
2309 if (bbs_in_cold_partition.is_empty ())
2310 return vNULL;
2311
2312 bool dom_calculated_here = !dom_info_available_p (CDI_DOMINATORS);
2313
2314 if (dom_calculated_here)
2315 calculate_dominance_info (CDI_DOMINATORS);
2316
2317 while (! bbs_in_cold_partition.is_empty ())
2318 {
2319 bb = bbs_in_cold_partition.pop ();
2320 /* Any blocks dominated by a block in the cold section
2321 must also be cold. */
2322 basic_block son;
2323 for (son = first_dom_son (CDI_DOMINATORS, bb);
2324 son;
2325 son = next_dom_son (CDI_DOMINATORS, son))
2326 {
2327 /* If son is not yet cold, then mark it cold here and
2328 enqueue it for further processing. */
2329 if ((BB_PARTITION (son) != BB_COLD_PARTITION))
2330 {
2331 if (flag_only)
2332 error ("non-cold basic block %d dominated "
2333 "by a block in the cold partition (%d)", son->index, bb->index);
2334 else
2335 BB_SET_PARTITION (son, BB_COLD_PARTITION);
2336 bbs_to_fix.safe_push (son);
2337 bbs_in_cold_partition.safe_push (son);
2338 }
2339 }
2340 }
2341
2342 if (dom_calculated_here)
2343 free_dominance_info (CDI_DOMINATORS);
2344
2345 return bbs_to_fix;
2346}
2347
2348/* Perform cleanup on the hot/cold bb partitioning after optimization
2349 passes that modify the cfg. */
2350
2351void
2352fixup_partitions (void)
2353{
2354 basic_block bb;
2355
2356 if (!crtl->has_bb_partition)
2357 return;
2358
2359 /* Delete any blocks that became unreachable and weren't
2360 already cleaned up, for example during edge forwarding
2361 and convert_jumps_to_returns. This will expose more
2362 opportunities for fixing the partition boundaries here.
2363 Also, the calculation of the dominance graph during verification
2364 will assert if there are unreachable nodes. */
2365 delete_unreachable_blocks ();
2366
2367 /* If there are partitions, do a sanity check on them: A basic block in
2368   a cold partition cannot dominate a basic block in a hot partition.
2369 Fixup any that now violate this requirement, as a result of edge
2370 forwarding and unreachable block deletion.  */
2371 vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2372
2373 /* Do the partition fixup after all necessary blocks have been converted to
2374 cold, so that we only update the region crossings the minimum number of
2375 places, which can require forcing edges to be non fallthru. */
2376 while (! bbs_to_fix.is_empty ())
2377 {
2378 bb = bbs_to_fix.pop ();
2379 fixup_new_cold_bb (bb);
2380 }
2381}
2382
af205f67
TJ
2383/* Verify, in the basic block chain, that there is at most one switch
2384 between hot/cold partitions. This condition will not be true until
2385 after reorder_basic_blocks is called. */
2386
251a41b9 2387static int
af205f67
TJ
2388verify_hot_cold_block_grouping (void)
2389{
2390 basic_block bb;
2391 int err = 0;
2392 bool switched_sections = false;
2393 int current_partition = BB_UNPARTITIONED;
2394
3371a64f
TJ
2395 /* Even after bb reordering is complete, we go into cfglayout mode
2396 again (in compgoto). Ensure we don't call this before going back
2397 into linearized RTL when any layout fixes would have been committed. */
2398 if (!crtl->bb_reorder_complete
c3284718 2399 || current_ir_type () != IR_RTL_CFGRTL)
251a41b9 2400 return err;
af205f67 2401
11cd3bed 2402 FOR_EACH_BB_FN (bb, cfun)
af205f67
TJ
2403 {
2404 if (current_partition != BB_UNPARTITIONED
2405 && BB_PARTITION (bb) != current_partition)
2406 {
2407 if (switched_sections)
2408 {
2409 error ("multiple hot/cold transitions found (bb %i)",
2410 bb->index);
2411 err = 1;
2412 }
2413 else
2414 switched_sections = true;
2415
2416 if (!crtl->has_bb_partition)
2417 error ("partition found but function partition flag not set");
2418 }
2419 current_partition = BB_PARTITION (bb);
2420 }
2421
251a41b9 2422 return err;
af205f67 2423}
b446e5a2 2424\f
ca6c03ca 2425
251a41b9
TJ
2426/* Perform several checks on the edges out of each block, such as
2427 the consistency of the branch probabilities, the correctness
2428 of hot/cold partition crossing edges, and the number of expected
600b5b1d
TJ
2429 successor edges. Also verify that the dominance relationship
2430 between hot/cold blocks is sane. */
f470c378 2431
10e9fecc 2432static int
251a41b9 2433rtl_verify_edges (void)
ca6c03ca 2434{
10e9fecc 2435 int err = 0;
94eb5ddb 2436 basic_block bb;
ca6c03ca 2437
4f42035e 2438 FOR_EACH_BB_REVERSE_FN (bb, cfun)
ca6c03ca 2439 {
f99ffaa3
EB
2440 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2441 int n_eh = 0, n_abnormal = 0;
3cf54412 2442 edge e, fallthru = NULL;
628f6a4e 2443 edge_iterator ei;
251a41b9 2444 rtx note;
3371a64f 2445 bool has_crossing_edge = false;
ca6c03ca 2446
2085a21f 2447 if (JUMP_P (BB_END (bb))
a813c111 2448 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
628f6a4e 2449 && EDGE_COUNT (bb->succs) >= 2
a813c111 2450 && any_condjump_p (BB_END (bb)))
5a1a3e5e 2451 {
e5af9ddd 2452 if (XINT (note, 0) != BRANCH_EDGE (bb)->probability
0a6a6ac9 2453 && profile_status_for_fn (cfun) != PROFILE_ABSENT)
5a1a3e5e 2454 {
e5af9ddd
RS
2455 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2456 XINT (note, 0), BRANCH_EDGE (bb)->probability);
5a1a3e5e
JH
2457 err = 1;
2458 }
2459 }
251a41b9 2460
628f6a4e 2461 FOR_EACH_EDGE (e, ei, bb->succs)
ca6c03ca 2462 {
0be7e7a6
RH
2463 bool is_crossing;
2464
ca6c03ca 2465 if (e->flags & EDGE_FALLTHRU)
0be7e7a6
RH
2466 n_fallthru++, fallthru = e;
2467
2468 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
fefa31b5
DM
2469 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2470 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
3371a64f 2471 has_crossing_edge |= is_crossing;
0be7e7a6 2472 if (e->flags & EDGE_CROSSING)
750054a2 2473 {
0be7e7a6
RH
2474 if (!is_crossing)
2475 {
2476 error ("EDGE_CROSSING incorrectly set across same section");
2477 err = 1;
2478 }
2479 if (e->flags & EDGE_FALLTHRU)
2480 {
f99ffaa3 2481 error ("fallthru edge crosses section boundary in bb %i",
750054a2
CT
2482 e->src->index);
2483 err = 1;
2484 }
0be7e7a6
RH
2485 if (e->flags & EDGE_EH)
2486 {
f99ffaa3 2487 error ("EH edge crosses section boundary in bb %i",
0be7e7a6
RH
2488 e->src->index);
2489 err = 1;
2490 }
339ba33b 2491 if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
3371a64f
TJ
2492 {
2493 error ("No region crossing jump at section boundary in bb %i",
2494 bb->index);
2495 err = 1;
2496 }
0be7e7a6
RH
2497 }
2498 else if (is_crossing)
2499 {
2500 error ("EDGE_CROSSING missing across section boundary");
2501 err = 1;
750054a2 2502 }
3dec4024 2503
65f43cdf
ZD
2504 if ((e->flags & ~(EDGE_DFS_BACK
2505 | EDGE_CAN_FALLTHRU
2506 | EDGE_IRREDUCIBLE_LOOP
9beb1c84 2507 | EDGE_LOOP_EXIT
7e872b90
JJ
2508 | EDGE_CROSSING
2509 | EDGE_PRESERVE)) == 0)
3dec4024
JH
2510 n_branch++;
2511
2512 if (e->flags & EDGE_ABNORMAL_CALL)
f99ffaa3
EB
2513 n_abnormal_call++;
2514
2515 if (e->flags & EDGE_SIBCALL)
2516 n_sibcall++;
3dec4024
JH
2517
2518 if (e->flags & EDGE_EH)
2519 n_eh++;
f99ffaa3
EB
2520
2521 if (e->flags & EDGE_ABNORMAL)
3dec4024 2522 n_abnormal++;
ca6c03ca 2523 }
5f0d2358 2524
3371a64f 2525 if (!has_crossing_edge
339ba33b
RS
2526 && JUMP_P (BB_END (bb))
2527 && CROSSING_JUMP_P (BB_END (bb)))
3371a64f
TJ
2528 {
2529 print_rtl_with_bb (stderr, get_insns (), TDF_RTL | TDF_BLOCKS | TDF_DETAILS);
2530 error ("Region crossing jump across same section in bb %i",
2531 bb->index);
2532 err = 1;
2533 }
2534
1d65f45c 2535 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
3dec4024 2536 {
f99ffaa3 2537 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
3dec4024
JH
2538 err = 1;
2539 }
1d65f45c
RH
2540 if (n_eh > 1)
2541 {
f99ffaa3 2542 error ("too many exception handling edges in bb %i", bb->index);
1d65f45c
RH
2543 err = 1;
2544 }
3dec4024 2545 if (n_branch
4b4bf941 2546 && (!JUMP_P (BB_END (bb))
a813c111
SB
2547 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2548 || any_condjump_p (BB_END (bb))))))
3dec4024 2549 {
ab532386 2550 error ("too many outgoing branch edges from bb %i", bb->index);
3dec4024
JH
2551 err = 1;
2552 }
a813c111 2553 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
3dec4024 2554 {
f99ffaa3 2555 error ("fallthru edge after unconditional jump in bb %i", bb->index);
3dec4024
JH
2556 err = 1;
2557 }
a813c111 2558 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
3dec4024 2559 {
f99ffaa3
EB
2560 error ("wrong number of branch edges after unconditional jump"
2561 " in bb %i", bb->index);
3dec4024
JH
2562 err = 1;
2563 }
a813c111 2564 if (n_branch != 1 && any_condjump_p (BB_END (bb))
c11fd0b2 2565 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
3dec4024 2566 {
f99ffaa3
EB
2567 error ("wrong amount of branch edges after conditional jump"
2568 " in bb %i", bb->index);
2569 err = 1;
2570 }
2571 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2572 {
2573 error ("abnormal call edges for non-call insn in bb %i", bb->index);
3dec4024
JH
2574 err = 1;
2575 }
f99ffaa3 2576 if (n_sibcall && !CALL_P (BB_END (bb)))
3dec4024 2577 {
f99ffaa3 2578 error ("sibcall edges for non-call insn in bb %i", bb->index);
3dec4024
JH
2579 err = 1;
2580 }
f99ffaa3
EB
2581 if (n_abnormal > n_eh
2582 && !(CALL_P (BB_END (bb))
2583 && n_abnormal == n_abnormal_call + n_sibcall)
4b4bf941 2584 && (!JUMP_P (BB_END (bb))
a813c111
SB
2585 || any_condjump_p (BB_END (bb))
2586 || any_uncondjump_p (BB_END (bb))))
3dec4024 2587 {
ab532386 2588 error ("abnormal edges for no purpose in bb %i", bb->index);
3dec4024
JH
2589 err = 1;
2590 }
251a41b9 2591 }
f87c27b4 2592
600b5b1d
TJ
2593 /* If there are partitions, do a sanity check on them: A basic block in
2594   a cold partition cannot dominate a basic block in a hot partition.  */
2595 if (crtl->has_bb_partition && !err)
2596 {
2597 vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2598 err = !bbs_to_fix.is_empty ();
2599 }
2600
251a41b9
TJ
2601 /* Clean up. */
2602 return err;
2603}
5f0d2358 2604
251a41b9
TJ
2605/* Checks on the instructions within blocks. Currently checks that each
2606 block starts with a basic block note, and that basic block notes and
2607 control flow jumps are not found in the middle of the block. */
ca6c03ca 2608
251a41b9
TJ
2609static int
2610rtl_verify_bb_insns (void)
2611{
8879d71c 2612 rtx_insn *x;
251a41b9
TJ
2613 int err = 0;
2614 basic_block bb;
2615
4f42035e 2616 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9
TJ
2617 {
2618 /* Now check the header of basic
c22cacf3 2619 block. It ought to contain optional CODE_LABEL followed
ca6c03ca 2620 by NOTE_BASIC_BLOCK. */
a813c111 2621 x = BB_HEAD (bb);
4b4bf941 2622 if (LABEL_P (x))
ca6c03ca 2623 {
a813c111 2624 if (BB_END (bb) == x)
ca6c03ca
JH
2625 {
2626 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 2627 bb->index);
ca6c03ca
JH
2628 err = 1;
2629 }
5f0d2358 2630
ca6c03ca
JH
2631 x = NEXT_INSN (x);
2632 }
5f0d2358 2633
ca6c03ca
JH
2634 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2635 {
2636 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 2637 bb->index);
ca6c03ca
JH
2638 err = 1;
2639 }
2640
a813c111 2641 if (BB_END (bb) == x)
49243025 2642 /* Do checks for empty blocks here. */
5f0d2358 2643 ;
ca6c03ca 2644 else
5f0d2358
RK
2645 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2646 {
2647 if (NOTE_INSN_BASIC_BLOCK_P (x))
2648 {
2649 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
0b17ab2f 2650 INSN_UID (x), bb->index);
5f0d2358
RK
2651 err = 1;
2652 }
2653
a813c111 2654 if (x == BB_END (bb))
5f0d2358 2655 break;
ca6c03ca 2656
83fd323c 2657 if (control_flow_insn_p (x))
5f0d2358 2658 {
0b17ab2f 2659 error ("in basic block %d:", bb->index);
5f0d2358
RK
2660 fatal_insn ("flow control insn inside a basic block", x);
2661 }
2662 }
ca6c03ca
JH
2663 }
2664
251a41b9
TJ
2665 /* Clean up. */
2666 return err;
2667}
2668
2669/* Verify that block pointers for instructions in basic blocks, headers and
2670 footers are set appropriately. */
2671
2672static int
2673rtl_verify_bb_pointers (void)
2674{
2675 int err = 0;
2676 basic_block bb;
2677
2678 /* Check the general integrity of the basic blocks. */
4f42035e 2679 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9 2680 {
8879d71c 2681 rtx_insn *insn;
251a41b9
TJ
2682
2683 if (!(bb->flags & BB_RTL))
2684 {
2685 error ("BB_RTL flag not set for block %d", bb->index);
2686 err = 1;
2687 }
2688
2689 FOR_BB_INSNS (bb, insn)
2690 if (BLOCK_FOR_INSN (insn) != bb)
2691 {
2692 error ("insn %d basic block pointer is %d, should be %d",
2693 INSN_UID (insn),
2694 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2695 bb->index);
2696 err = 1;
2697 }
2698
2699 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2700 if (!BARRIER_P (insn)
2701 && BLOCK_FOR_INSN (insn) != NULL)
2702 {
2703 error ("insn %d in header of bb %d has non-NULL basic block",
2704 INSN_UID (insn), bb->index);
2705 err = 1;
2706 }
2707 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2708 if (!BARRIER_P (insn)
2709 && BLOCK_FOR_INSN (insn) != NULL)
2710 {
2711 error ("insn %d in footer of bb %d has non-NULL basic block",
2712 INSN_UID (insn), bb->index);
2713 err = 1;
2714 }
2715 }
af205f67 2716
10e9fecc 2717 /* Clean up. */
10e9fecc
JH
2718 return err;
2719}
5f0d2358 2720
10e9fecc
JH
2721/* Verify the CFG and RTL consistency common for both underlying RTL and
2722 cfglayout RTL.
5f0d2358 2723
10e9fecc 2724 Currently it does following checks:
251a41b9
TJ
2725
2726 - overlapping of basic blocks
2727 - insns with wrong BLOCK_FOR_INSN pointers
2728 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2729 - tails of basic blocks (ensure that boundary is necessary)
2730 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2731 and NOTE_INSN_BASIC_BLOCK
2732 - verify that no fall_thru edge crosses hot/cold partition boundaries
2733 - verify that there are no pending RTL branch predictions
600b5b1d 2734 - verify that hot blocks are not dominated by cold blocks
251a41b9
TJ
2735
2736 In future it can be extended check a lot of other stuff as well
2737 (reachability of basic blocks, life information, etc. etc.). */
9eab6785 2738
10e9fecc 2739static int
251a41b9
TJ
2740rtl_verify_flow_info_1 (void)
2741{
2742 int err = 0;
2743
2744 err |= rtl_verify_bb_pointers ();
2745
2746 err |= rtl_verify_bb_insns ();
2747
2748 err |= rtl_verify_edges ();
2749
251a41b9
TJ
2750 return err;
2751}
2752
2753/* Walk the instruction chain and verify that bb head/end pointers
2754 are correct, and that instructions are in exactly one bb and have
2755 correct block pointers. */
2756
2757static int
2758rtl_verify_bb_insn_chain (void)
10e9fecc
JH
2759{
2760 basic_block bb;
251a41b9 2761 int err = 0;
8879d71c
DM
2762 rtx_insn *x;
2763 rtx_insn *last_head = get_last_insn ();
9eab6785 2764 basic_block *bb_info;
9eab6785
SB
2765 const int max_uid = get_max_uid ();
2766
2767 bb_info = XCNEWVEC (basic_block, max_uid);
ca6c03ca 2768
4f42035e 2769 FOR_EACH_BB_REVERSE_FN (bb, cfun)
10e9fecc 2770 {
8879d71c
DM
2771 rtx_insn *head = BB_HEAD (bb);
2772 rtx_insn *end = BB_END (bb);
628f6a4e 2773
9eab6785 2774 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
a7b87f73
ZD
2775 {
2776 /* Verify the end of the basic block is in the INSN chain. */
2777 if (x == end)
2778 break;
2779
251a41b9
TJ
2780 /* And that the code outside of basic blocks has NULL bb field. */
2781 if (!BARRIER_P (x)
2782 && BLOCK_FOR_INSN (x) != NULL)
2783 {
2784 error ("insn %d outside of basic blocks has non-NULL bb field",
2785 INSN_UID (x));
2786 err = 1;
2787 }
a7b87f73 2788 }
9eab6785
SB
2789
2790 if (!x)
2791 {
2792 error ("end insn %d for block %d not found in the insn stream",
2793 INSN_UID (end), bb->index);
2794 err = 1;
2795 }
2796
2797 /* Work backwards from the end to the head of the basic block
2798 to verify the head is in the RTL chain. */
2799 for (; x != NULL_RTX; x = PREV_INSN (x))
a00d11f0 2800 {
9eab6785
SB
2801 /* While walking over the insn chain, verify insns appear
2802 in only one basic block. */
2803 if (bb_info[INSN_UID (x)] != NULL)
2804 {
2805 error ("insn %d is in multiple basic blocks (%d and %d)",
2806 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2807 err = 1;
2808 }
2809
2810 bb_info[INSN_UID (x)] = bb;
2811
2812 if (x == head)
2813 break;
2814 }
2815 if (!x)
2816 {
2817 error ("head insn %d for block %d not found in the insn stream",
2818 INSN_UID (head), bb->index);
a00d11f0
JH
2819 err = 1;
2820 }
2821
a7b87f73 2822 last_head = PREV_INSN (x);
251a41b9
TJ
2823 }
2824
2825 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2826 {
2827 /* Check that the code before the first basic block has NULL
2828 bb field. */
2829 if (!BARRIER_P (x)
2830 && BLOCK_FOR_INSN (x) != NULL)
2831 {
2832 error ("insn %d outside of basic blocks has non-NULL bb field",
2833 INSN_UID (x));
2834 err = 1;
2835 }
2836 }
2837 free (bb_info);
2838
2839 return err;
2840}
2841
2842/* Verify that fallthru edges point to adjacent blocks in layout order and
2843 that barriers exist after non-fallthru blocks. */
2844
2845static int
2846rtl_verify_fallthru (void)
2847{
2848 basic_block bb;
2849 int err = 0;
2850
4f42035e 2851 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9
TJ
2852 {
2853 edge e;
9eab6785 2854
0fd4b31d 2855 e = find_fallthru_edge (bb->succs);
10e9fecc
JH
2856 if (!e)
2857 {
8879d71c 2858 rtx_insn *insn;
10e9fecc
JH
2859
2860 /* Ensure existence of barrier in BB with no fallthru edges. */
468059bc
DD
2861 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2862 {
2863 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
10e9fecc
JH
2864 {
2865 error ("missing barrier after block %i", bb->index);
2866 err = 1;
2867 break;
2868 }
468059bc
DD
2869 if (BARRIER_P (insn))
2870 break;
2871 }
10e9fecc 2872 }
fefa31b5
DM
2873 else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2874 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
c22cacf3 2875 {
8879d71c 2876 rtx_insn *insn;
10e9fecc
JH
2877
2878 if (e->src->next_bb != e->dest)
2879 {
2880 error
2881 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2882 e->src->index, e->dest->index);
2883 err = 1;
2884 }
2885 else
a813c111 2886 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
10e9fecc 2887 insn = NEXT_INSN (insn))
6be85b25 2888 if (BARRIER_P (insn) || INSN_P (insn))
10e9fecc
JH
2889 {
2890 error ("verify_flow_info: Incorrect fallthru %i->%i",
2891 e->src->index, e->dest->index);
2892 fatal_insn ("wrong insn in the fallthru edge", insn);
2893 err = 1;
2894 }
c22cacf3 2895 }
10e9fecc 2896 }
ca6c03ca 2897
251a41b9
TJ
2898 return err;
2899}
2900
2901/* Verify that blocks are laid out in consecutive order. While walking the
2902 instructions, verify that all expected instructions are inside the basic
2903 blocks, and that all returns are followed by barriers. */
2904
2905static int
2906rtl_verify_bb_layout (void)
2907{
2908 basic_block bb;
2909 int err = 0;
8879d71c 2910 rtx_insn *x;
251a41b9 2911 int num_bb_notes;
8879d71c 2912 rtx_insn * const rtx_first = get_insns ();
fefa31b5 2913 basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
9eab6785 2914
ca6c03ca 2915 num_bb_notes = 0;
fefa31b5 2916 last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
e0082a72 2917
5f0d2358 2918 for (x = rtx_first; x; x = NEXT_INSN (x))
ca6c03ca
JH
2919 {
2920 if (NOTE_INSN_BASIC_BLOCK_P (x))
2921 {
bf77398c 2922 bb = NOTE_BASIC_BLOCK (x);
5f0d2358 2923
ca6c03ca 2924 num_bb_notes++;
e0082a72 2925 if (bb != last_bb_seen->next_bb)
10e9fecc 2926 internal_error ("basic blocks not laid down consecutively");
ca6c03ca 2927
10e9fecc 2928 curr_bb = last_bb_seen = bb;
ca6c03ca
JH
2929 }
2930
10e9fecc 2931 if (!curr_bb)
ca6c03ca
JH
2932 {
2933 switch (GET_CODE (x))
2934 {
2935 case BARRIER:
2936 case NOTE:
2937 break;
2938
2939 case CODE_LABEL:
39718607 2940 /* An ADDR_VEC is placed outside any basic block. */
ca6c03ca 2941 if (NEXT_INSN (x)
481683e1 2942 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
5f0d2358 2943 x = NEXT_INSN (x);
ca6c03ca
JH
2944
2945 /* But in any case, non-deletable labels can appear anywhere. */
2946 break;
2947
2948 default:
1f978f5f 2949 fatal_insn ("insn outside basic block", x);
ca6c03ca
JH
2950 }
2951 }
2952
26cae194 2953 if (JUMP_P (x)
ca6c03ca 2954 && returnjump_p (x) && ! condjump_p (x)
15eb3a2e 2955 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
1f978f5f 2956 fatal_insn ("return not followed by barrier", x);
251a41b9 2957
a813c111 2958 if (curr_bb && x == BB_END (curr_bb))
10e9fecc 2959 curr_bb = NULL;
ca6c03ca
JH
2960 }
2961
0cae8d31 2962 if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
ca6c03ca 2963 internal_error
0b17ab2f 2964 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
0cae8d31 2965 num_bb_notes, n_basic_blocks_for_fn (cfun));
ca6c03ca 2966
10e9fecc 2967 return err;
ca6c03ca 2968}
251a41b9
TJ
2969
2970/* Verify the CFG and RTL consistency common for both underlying RTL and
2971 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
2972
2973 Currently it does following checks:
2974 - all checks of rtl_verify_flow_info_1
2975 - test head/end pointers
2976 - check that blocks are laid out in consecutive order
2977 - check that all insns are in the basic blocks
2978 (except the switch handling code, barriers and notes)
2979 - check that all returns are followed by barriers
600b5b1d
TJ
2980 - check that all fallthru edge points to the adjacent blocks
2981 - verify that there is a single hot/cold partition boundary after bbro */
251a41b9
TJ
2982
2983static int
2984rtl_verify_flow_info (void)
2985{
2986 int err = 0;
2987
2988 err |= rtl_verify_flow_info_1 ();
2989
2990 err |= rtl_verify_bb_insn_chain ();
2991
2992 err |= rtl_verify_fallthru ();
2993
2994 err |= rtl_verify_bb_layout ();
2995
3371a64f
TJ
2996 err |= verify_hot_cold_block_grouping ();
2997
251a41b9
TJ
2998 return err;
2999}
ca6c03ca 3000\f
eaec9b3d 3001/* Assume that the preceding pass has possibly eliminated jump instructions
ca6c03ca
JH
3002 or converted the unconditional jumps. Eliminate the edges from CFG.
3003 Return true if any edges are eliminated. */
3004
3005bool
d329e058 3006purge_dead_edges (basic_block bb)
ca6c03ca 3007{
628f6a4e 3008 edge e;
8879d71c
DM
3009 rtx_insn *insn = BB_END (bb);
3010 rtx note;
ca6c03ca 3011 bool purged = false;
628f6a4e
BE
3012 bool found;
3013 edge_iterator ei;
ca6c03ca 3014
b5b8b0ac
AO
3015 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
3016 do
3017 insn = PREV_INSN (insn);
3018 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3019
70da1d03 3020 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
4b4bf941 3021 if (NONJUMP_INSN_P (insn)
70da1d03
JH
3022 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3023 {
3024 rtx eqnote;
3025
3026 if (! may_trap_p (PATTERN (insn))
3027 || ((eqnote = find_reg_equal_equiv_note (insn))
3028 && ! may_trap_p (XEXP (eqnote, 0))))
3029 remove_note (insn, note);
3030 }
3031
546c093e 3032 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
628f6a4e 3033 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
546c093e 3034 {
1d65f45c
RH
3035 bool remove = false;
3036
e5f9a909
JW
3037 /* There are three types of edges we need to handle correctly here: EH
3038 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3039 latter can appear when nonlocal gotos are used. */
1d65f45c 3040 if (e->flags & EDGE_ABNORMAL_CALL)
546c093e 3041 {
1d65f45c
RH
3042 if (!CALL_P (insn))
3043 remove = true;
3044 else if (can_nonlocal_goto (insn))
3045 ;
3046 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3047 ;
0a35513e
AH
3048 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3049 ;
1d65f45c
RH
3050 else
3051 remove = true;
546c093e 3052 }
1d65f45c
RH
3053 else if (e->flags & EDGE_EH)
3054 remove = !can_throw_internal (insn);
3055
3056 if (remove)
546c093e 3057 {
1d65f45c
RH
3058 remove_edge (e);
3059 df_set_bb_dirty (bb);
3060 purged = true;
546c093e
RH
3061 }
3062 else
1d65f45c 3063 ei_next (&ei);
546c093e 3064 }
5f0d2358 3065
4b4bf941 3066 if (JUMP_P (insn))
ca6c03ca
JH
3067 {
3068 rtx note;
3069 edge b,f;
628f6a4e 3070 edge_iterator ei;
5f0d2358 3071
ca6c03ca
JH
3072 /* We do care only about conditional jumps and simplejumps. */
3073 if (!any_condjump_p (insn)
3074 && !returnjump_p (insn)
3075 && !simplejump_p (insn))
c51d95ec 3076 return purged;
5f0d2358 3077
5a1a3e5e
JH
3078 /* Branch probability/prediction notes are defined only for
3079 condjumps. We've possibly turned condjump into simplejump. */
3080 if (simplejump_p (insn))
3081 {
3082 note = find_reg_note (insn, REG_BR_PROB, NULL);
3083 if (note)
3084 remove_note (insn, note);
3085 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3086 remove_note (insn, note);
3087 }
3088
628f6a4e 3089 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 3090 {
7fcd7218
JH
3091 /* Avoid abnormal flags to leak from computed jumps turned
3092 into simplejumps. */
f87c27b4 3093
0e1638d4 3094 e->flags &= ~EDGE_ABNORMAL;
7fcd7218 3095
5a566bed
MM
3096 /* See if this edge is one we should keep. */
3097 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3098 /* A conditional jump can fall through into the next
3099 block, so we should keep the edge. */
628f6a4e
BE
3100 {
3101 ei_next (&ei);
3102 continue;
3103 }
fefa31b5 3104 else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
a813c111 3105 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
5a566bed
MM
3106 /* If the destination block is the target of the jump,
3107 keep the edge. */
628f6a4e
BE
3108 {
3109 ei_next (&ei);
3110 continue;
3111 }
fefa31b5
DM
3112 else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3113 && returnjump_p (insn))
5a566bed
MM
3114 /* If the destination block is the exit block, and this
3115 instruction is a return, then keep the edge. */
628f6a4e
BE
3116 {
3117 ei_next (&ei);
3118 continue;
3119 }
5a566bed
MM
3120 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3121 /* Keep the edges that correspond to exceptions thrown by
0b75beaa
EB
3122 this instruction and rematerialize the EDGE_ABNORMAL
3123 flag we just cleared above. */
3124 {
3125 e->flags |= EDGE_ABNORMAL;
628f6a4e 3126 ei_next (&ei);
0b75beaa
EB
3127 continue;
3128 }
5f0d2358 3129
5a566bed 3130 /* We do not need this edge. */
6fb5fa3c 3131 df_set_bb_dirty (bb);
ca6c03ca
JH
3132 purged = true;
3133 remove_edge (e);
3134 }
5f0d2358 3135
628f6a4e 3136 if (EDGE_COUNT (bb->succs) == 0 || !purged)
c51d95ec 3137 return purged;
5f0d2358 3138
c263766c
RH
3139 if (dump_file)
3140 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
5f0d2358 3141
ca6c03ca
JH
3142 if (!optimize)
3143 return purged;
3144
3145 /* Redistribute probabilities. */
c5cbcccf 3146 if (single_succ_p (bb))
ca6c03ca 3147 {
c5cbcccf
ZD
3148 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3149 single_succ_edge (bb)->count = bb->count;
f87c27b4 3150 }
ca6c03ca
JH
3151 else
3152 {
3153 note = find_reg_note (insn, REG_BR_PROB, NULL);
3154 if (!note)
3155 return purged;
5f0d2358 3156
ca6c03ca
JH
3157 b = BRANCH_EDGE (bb);
3158 f = FALLTHRU_EDGE (bb);
e5af9ddd 3159 b->probability = XINT (note, 0);
ca6c03ca 3160 f->probability = REG_BR_PROB_BASE - b->probability;
8ddb5a29 3161 /* Update these to use GCOV_COMPUTE_SCALE. */
ca6c03ca
JH
3162 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
3163 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
3164 }
5f0d2358 3165
ca6c03ca
JH
3166 return purged;
3167 }
4b4bf941 3168 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
1722c2c8
RH
3169 {
3170 /* First, there should not be any EH or ABCALL edges resulting
3171 from non-local gotos and the like. If there were, we shouldn't
3172 have created the sibcall in the first place. Second, there
3173 should of course never have been a fallthru edge. */
c5cbcccf
ZD
3174 gcc_assert (single_succ_p (bb));
3175 gcc_assert (single_succ_edge (bb)->flags
3176 == (EDGE_SIBCALL | EDGE_ABNORMAL));
1722c2c8
RH
3177
3178 return 0;
3179 }
ca6c03ca 3180
ca6c03ca
JH
3181 /* If we don't see a jump insn, we don't know exactly why the block would
3182 have been broken at this point. Look for a simple, non-fallthru edge,
3183 as these are only created by conditional branches. If we find such an
3184 edge we know that there used to be a jump here and can then safely
3185 remove all non-fallthru edges. */
628f6a4e
BE
3186 found = false;
3187 FOR_EACH_EDGE (e, ei, bb->succs)
3188 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3189 {
3190 found = true;
3191 break;
3192 }
5f0d2358 3193
628f6a4e 3194 if (!found)
ca6c03ca 3195 return purged;
5f0d2358 3196
2afa8dce
DB
3197 /* Remove all but the fake and fallthru edges. The fake edge may be
3198 the only successor for this block in the case of noreturn
3199 calls. */
628f6a4e 3200 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 3201 {
2afa8dce 3202 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
c51d95ec 3203 {
6fb5fa3c 3204 df_set_bb_dirty (bb);
c51d95ec
JH
3205 remove_edge (e);
3206 purged = true;
3207 }
628f6a4e
BE
3208 else
3209 ei_next (&ei);
ca6c03ca 3210 }
5f0d2358 3211
c5cbcccf 3212 gcc_assert (single_succ_p (bb));
5f0d2358 3213
c5cbcccf
ZD
3214 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3215 single_succ_edge (bb)->count = bb->count;
ca6c03ca 3216
c263766c
RH
3217 if (dump_file)
3218 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
0b17ab2f 3219 bb->index);
ca6c03ca
JH
3220 return purged;
3221}
3222
5f0d2358
RK
3223/* Search all basic blocks for potentially dead edges and purge them. Return
3224 true if some edge has been eliminated. */
ca6c03ca
JH
3225
3226bool
25cd19de 3227purge_all_dead_edges (void)
ca6c03ca 3228{
e0082a72 3229 int purged = false;
e0082a72 3230 basic_block bb;
473fb060 3231
11cd3bed 3232 FOR_EACH_BB_FN (bb, cfun)
473fb060 3233 {
e0082a72 3234 bool purged_here = purge_dead_edges (bb);
5f0d2358 3235
473fb060 3236 purged |= purged_here;
473fb060 3237 }
5f0d2358 3238
ca6c03ca
JH
3239 return purged;
3240}
9ee634e3 3241
ba5e9aca
EB
3242/* This is used by a few passes that emit some instructions after abnormal
3243 calls, moving the basic block's end, while they in fact do want to emit
3244 them on the fallthru edge. Look for abnormal call edges, find backward
3245 the call in the block and insert the instructions on the edge instead.
3246
3247 Similarly, handle instructions throwing exceptions internally.
3248
3249 Return true when instructions have been found and inserted on edges. */
3250
3251bool
3252fixup_abnormal_edges (void)
3253{
3254 bool inserted = false;
3255 basic_block bb;
3256
11cd3bed 3257 FOR_EACH_BB_FN (bb, cfun)
ba5e9aca
EB
3258 {
3259 edge e;
3260 edge_iterator ei;
3261
3262 /* Look for cases we are interested in - calls or instructions causing
3263 exceptions. */
3264 FOR_EACH_EDGE (e, ei, bb->succs)
3265 if ((e->flags & EDGE_ABNORMAL_CALL)
3266 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3267 == (EDGE_ABNORMAL | EDGE_EH)))
3268 break;
3269
3270 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3271 {
8879d71c 3272 rtx_insn *insn;
ba5e9aca
EB
3273
3274 /* Get past the new insns generated. Allow notes, as the insns
3275 may be already deleted. */
3276 insn = BB_END (bb);
3277 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3278 && !can_throw_internal (insn)
3279 && insn != BB_HEAD (bb))
3280 insn = PREV_INSN (insn);
3281
3282 if (CALL_P (insn) || can_throw_internal (insn))
3283 {
8879d71c 3284 rtx_insn *stop, *next;
ba5e9aca
EB
3285
3286 e = find_fallthru_edge (bb->succs);
3287
3288 stop = NEXT_INSN (BB_END (bb));
1130d5e3 3289 BB_END (bb) = insn;
ba5e9aca
EB
3290
3291 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3292 {
3293 next = NEXT_INSN (insn);
3294 if (INSN_P (insn))
3295 {
3296 delete_insn (insn);
3297
3298 /* Sometimes there's still the return value USE.
3299 If it's placed after a trapping call (i.e. that
3300 call is the last insn anyway), we have no fallthru
3301 edge. Simply delete this use and don't try to insert
3302 on the non-existent edge. */
3303 if (GET_CODE (PATTERN (insn)) != USE)
3304 {
3305 /* We're not deleting it, we're moving it. */
4654c0cf 3306 insn->set_undeleted ();
0f82e5c9
DM
3307 SET_PREV_INSN (insn) = NULL_RTX;
3308 SET_NEXT_INSN (insn) = NULL_RTX;
ba5e9aca
EB
3309
3310 insert_insn_on_edge (insn, e);
3311 inserted = true;
3312 }
3313 }
3314 else if (!BARRIER_P (insn))
3315 set_block_for_insn (insn, NULL);
3316 }
3317 }
3318
3319 /* It may be that we don't find any trapping insn. In this
3320 case we discovered quite late that the insn that had been
3321 marked as can_throw_internal in fact couldn't trap at all.
3322 So we should in fact delete the EH edges out of the block. */
3323 else
3324 purge_dead_edges (bb);
3325 }
3326 }
3327
3328 return inserted;
3329}
78bde837
SB
3330\f
3331/* Cut the insns from FIRST to LAST out of the insns stream. */
3332
a90c614a 3333rtx_insn *
9152e0aa 3334unlink_insn_chain (rtx_insn *first, rtx_insn *last)
78bde837 3335{
a90c614a
DM
3336 rtx_insn *prevfirst = PREV_INSN (first);
3337 rtx_insn *nextlast = NEXT_INSN (last);
78bde837 3338
0f82e5c9
DM
3339 SET_PREV_INSN (first) = NULL;
3340 SET_NEXT_INSN (last) = NULL;
78bde837 3341 if (prevfirst)
0f82e5c9 3342 SET_NEXT_INSN (prevfirst) = nextlast;
78bde837 3343 if (nextlast)
0f82e5c9 3344 SET_PREV_INSN (nextlast) = prevfirst;
78bde837
SB
3345 else
3346 set_last_insn (prevfirst);
3347 if (!prevfirst)
3348 set_first_insn (nextlast);
9152e0aa 3349 return first;
78bde837
SB
3350}
3351\f
3352/* Skip over inter-block insns occurring after BB which are typically
3353 associated with BB (e.g., barriers). If there are any such insns,
3354 we return the last one. Otherwise, we return the end of BB. */
3355
8879d71c 3356static rtx_insn *
78bde837
SB
3357skip_insns_after_block (basic_block bb)
3358{
8879d71c 3359 rtx_insn *insn, *last_insn, *next_head, *prev;
78bde837 3360
8879d71c 3361 next_head = NULL;
fefa31b5 3362 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3363 next_head = BB_HEAD (bb->next_bb);
3364
3365 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3366 {
3367 if (insn == next_head)
3368 break;
3369
3370 switch (GET_CODE (insn))
3371 {
3372 case BARRIER:
3373 last_insn = insn;
3374 continue;
3375
3376 case NOTE:
3377 switch (NOTE_KIND (insn))
3378 {
3379 case NOTE_INSN_BLOCK_END:
3380 gcc_unreachable ();
3381 continue;
3382 default:
3383 continue;
3384 break;
3385 }
3386 break;
3387
3388 case CODE_LABEL:
3389 if (NEXT_INSN (insn)
3390 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3391 {
3392 insn = NEXT_INSN (insn);
3393 last_insn = insn;
3394 continue;
3395 }
3396 break;
3397
3398 default:
3399 break;
3400 }
3401
3402 break;
3403 }
3404
3405 /* It is possible to hit contradictory sequence. For instance:
3406
3407 jump_insn
3408 NOTE_INSN_BLOCK_BEG
3409 barrier
3410
3411 Where barrier belongs to jump_insn, but the note does not. This can be
3412 created by removing the basic block originally following
3413 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3414
3415 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3416 {
3417 prev = PREV_INSN (insn);
3418 if (NOTE_P (insn))
3419 switch (NOTE_KIND (insn))
3420 {
3421 case NOTE_INSN_BLOCK_END:
3422 gcc_unreachable ();
3423 break;
3424 case NOTE_INSN_DELETED:
3425 case NOTE_INSN_DELETED_LABEL:
3426 case NOTE_INSN_DELETED_DEBUG_LABEL:
3427 continue;
3428 default:
3429 reorder_insns (insn, insn, last_insn);
3430 }
3431 }
3432
3433 return last_insn;
3434}
3435
3436/* Locate or create a label for a given basic block. */
3437
e67d1102 3438static rtx_insn *
78bde837
SB
3439label_for_bb (basic_block bb)
3440{
e67d1102 3441 rtx_insn *label = BB_HEAD (bb);
78bde837
SB
3442
3443 if (!LABEL_P (label))
3444 {
3445 if (dump_file)
3446 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3447
3448 label = block_label (bb);
3449 }
3450
3451 return label;
3452}
3453
3454/* Locate the effective beginning and end of the insn chain for each
3455 block, as defined by skip_insns_after_block above. */
3456
3457static void
3458record_effective_endpoints (void)
3459{
8879d71c 3460 rtx_insn *next_insn;
78bde837 3461 basic_block bb;
8879d71c 3462 rtx_insn *insn;
78bde837
SB
3463
3464 for (insn = get_insns ();
3465 insn
3466 && NOTE_P (insn)
3467 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3468 insn = NEXT_INSN (insn))
3469 continue;
3470 /* No basic blocks at all? */
3471 gcc_assert (insn);
3472
3473 if (PREV_INSN (insn))
3474 cfg_layout_function_header =
3475 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3476 else
9152e0aa 3477 cfg_layout_function_header = NULL;
78bde837
SB
3478
3479 next_insn = get_insns ();
11cd3bed 3480 FOR_EACH_BB_FN (bb, cfun)
78bde837 3481 {
8879d71c 3482 rtx_insn *end;
78bde837
SB
3483
3484 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
1130d5e3 3485 BB_HEADER (bb) = unlink_insn_chain (next_insn,
190bea87 3486 PREV_INSN (BB_HEAD (bb)));
78bde837
SB
3487 end = skip_insns_after_block (bb);
3488 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
d8ce2eae 3489 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
78bde837
SB
3490 next_insn = NEXT_INSN (BB_END (bb));
3491 }
3492
3493 cfg_layout_function_footer = next_insn;
3494 if (cfg_layout_function_footer)
3495 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3496}
3497\f
27a4cd48
DM
3498namespace {
3499
3500const pass_data pass_data_into_cfg_layout_mode =
3501{
3502 RTL_PASS, /* type */
3503 "into_cfglayout", /* name */
3504 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3505 TV_CFG, /* tv_id */
3506 0, /* properties_required */
3507 PROP_cfglayout, /* properties_provided */
3508 0, /* properties_destroyed */
3509 0, /* todo_flags_start */
3510 0, /* todo_flags_finish */
78bde837
SB
3511};
3512
27a4cd48
DM
3513class pass_into_cfg_layout_mode : public rtl_opt_pass
3514{
3515public:
c3284718
RS
3516 pass_into_cfg_layout_mode (gcc::context *ctxt)
3517 : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
27a4cd48
DM
3518 {}
3519
3520 /* opt_pass methods: */
be55bfe6
TS
3521 virtual unsigned int execute (function *)
3522 {
3523 cfg_layout_initialize (0);
3524 return 0;
3525 }
27a4cd48
DM
3526
3527}; // class pass_into_cfg_layout_mode
3528
3529} // anon namespace
3530
3531rtl_opt_pass *
3532make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3533{
3534 return new pass_into_cfg_layout_mode (ctxt);
3535}
3536
3537namespace {
3538
3539const pass_data pass_data_outof_cfg_layout_mode =
3540{
3541 RTL_PASS, /* type */
3542 "outof_cfglayout", /* name */
3543 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3544 TV_CFG, /* tv_id */
3545 0, /* properties_required */
3546 0, /* properties_provided */
3547 PROP_cfglayout, /* properties_destroyed */
3548 0, /* todo_flags_start */
3549 0, /* todo_flags_finish */
78bde837 3550};
27a4cd48
DM
3551
3552class pass_outof_cfg_layout_mode : public rtl_opt_pass
3553{
3554public:
c3284718
RS
3555 pass_outof_cfg_layout_mode (gcc::context *ctxt)
3556 : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
27a4cd48
DM
3557 {}
3558
3559 /* opt_pass methods: */
be55bfe6 3560 virtual unsigned int execute (function *);
27a4cd48
DM
3561
3562}; // class pass_outof_cfg_layout_mode
3563
be55bfe6
TS
3564unsigned int
3565pass_outof_cfg_layout_mode::execute (function *fun)
3566{
3567 basic_block bb;
3568
3569 FOR_EACH_BB_FN (bb, fun)
3570 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3571 bb->aux = bb->next_bb;
3572
3573 cfg_layout_finalize ();
3574
3575 return 0;
3576}
3577
27a4cd48
DM
3578} // anon namespace
3579
3580rtl_opt_pass *
3581make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3582{
3583 return new pass_outof_cfg_layout_mode (ctxt);
3584}
78bde837
SB
3585\f
3586
3587/* Link the basic blocks in the correct order, compacting the basic
3588 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3589 function also clears the basic block header and footer fields.
3590
3591 This function is usually called after a pass (e.g. tracer) finishes
3592 some transformations while in cfglayout mode. The required sequence
3593 of the basic blocks is in a linked list along the bb->aux field.
3594 This functions re-links the basic block prev_bb and next_bb pointers
532aafad
SB
3595 accordingly, and it compacts and renumbers the blocks.
3596
3597 FIXME: This currently works only for RTL, but the only RTL-specific
3598 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3599 to GIMPLE a long time ago, but it doesn't relink the basic block
3600 chain. It could do that (to give better initial RTL) if this function
3601 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
78bde837
SB
3602
3603void
3604relink_block_chain (bool stay_in_cfglayout_mode)
3605{
3606 basic_block bb, prev_bb;
3607 int index;
3608
3609 /* Maybe dump the re-ordered sequence. */
3610 if (dump_file)
3611 {
3612 fprintf (dump_file, "Reordered sequence:\n");
fefa31b5
DM
3613 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3614 NUM_FIXED_BLOCKS;
78bde837
SB
3615 bb;
3616 bb = (basic_block) bb->aux, index++)
3617 {
3618 fprintf (dump_file, " %i ", index);
3619 if (get_bb_original (bb))
3620 fprintf (dump_file, "duplicate of %i ",
3621 get_bb_original (bb)->index);
3622 else if (forwarder_block_p (bb)
3623 && !LABEL_P (BB_HEAD (bb)))
3624 fprintf (dump_file, "compensation ");
3625 else
3626 fprintf (dump_file, "bb %i ", bb->index);
3627 fprintf (dump_file, " [%i]\n", bb->frequency);
3628 }
3629 }
3630
3631 /* Now reorder the blocks. */
fefa31b5
DM
3632 prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3633 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
78bde837
SB
3634 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3635 {
3636 bb->prev_bb = prev_bb;
3637 prev_bb->next_bb = bb;
3638 }
fefa31b5
DM
3639 prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3640 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
78bde837
SB
3641
3642 /* Then, clean up the aux fields. */
04a90bec 3643 FOR_ALL_BB_FN (bb, cfun)
78bde837
SB
3644 {
3645 bb->aux = NULL;
3646 if (!stay_in_cfglayout_mode)
1130d5e3 3647 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
78bde837
SB
3648 }
3649
3650 /* Maybe reset the original copy tables, they are not valid anymore
3651 when we renumber the basic blocks in compact_blocks. If we are
3652 are going out of cfglayout mode, don't re-allocate the tables. */
3653 free_original_copy_tables ();
3654 if (stay_in_cfglayout_mode)
3655 initialize_original_copy_tables ();
3656
3657 /* Finally, put basic_block_info in the new order. */
3658 compact_blocks ();
3659}
3660\f
3661
3662/* Given a reorder chain, rearrange the code to match. */
3663
3664static void
3665fixup_reorder_chain (void)
3666{
3667 basic_block bb;
9152e0aa 3668 rtx_insn *insn = NULL;
78bde837
SB
3669
3670 if (cfg_layout_function_header)
3671 {
3672 set_first_insn (cfg_layout_function_header);
3673 insn = cfg_layout_function_header;
3674 while (NEXT_INSN (insn))
3675 insn = NEXT_INSN (insn);
3676 }
3677
3678 /* First do the bulk reordering -- rechain the blocks without regard to
3679 the needed changes to jumps and labels. */
3680
fefa31b5
DM
3681 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3682 bb->aux)
78bde837
SB
3683 {
3684 if (BB_HEADER (bb))
3685 {
3686 if (insn)
0f82e5c9 3687 SET_NEXT_INSN (insn) = BB_HEADER (bb);
78bde837
SB
3688 else
3689 set_first_insn (BB_HEADER (bb));
0f82e5c9 3690 SET_PREV_INSN (BB_HEADER (bb)) = insn;
78bde837
SB
3691 insn = BB_HEADER (bb);
3692 while (NEXT_INSN (insn))
3693 insn = NEXT_INSN (insn);
3694 }
3695 if (insn)
0f82e5c9 3696 SET_NEXT_INSN (insn) = BB_HEAD (bb);
78bde837
SB
3697 else
3698 set_first_insn (BB_HEAD (bb));
0f82e5c9 3699 SET_PREV_INSN (BB_HEAD (bb)) = insn;
78bde837
SB
3700 insn = BB_END (bb);
3701 if (BB_FOOTER (bb))
3702 {
0f82e5c9
DM
3703 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3704 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
78bde837
SB
3705 while (NEXT_INSN (insn))
3706 insn = NEXT_INSN (insn);
3707 }
3708 }
3709
0f82e5c9 3710 SET_NEXT_INSN (insn) = cfg_layout_function_footer;
78bde837 3711 if (cfg_layout_function_footer)
0f82e5c9 3712 SET_PREV_INSN (cfg_layout_function_footer) = insn;
78bde837
SB
3713
3714 while (NEXT_INSN (insn))
3715 insn = NEXT_INSN (insn);
3716
3717 set_last_insn (insn);
b2b29377
MM
3718 if (flag_checking)
3719 verify_insn_chain ();
78bde837
SB
3720
3721 /* Now add jumps and labels as needed to match the blocks new
29f3fd5b 3722 outgoing edges. */
78bde837 3723
29f3fd5b
SB
3724 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3725 bb->aux)
78bde837
SB
3726 {
3727 edge e_fall, e_taken, e;
8879d71c 3728 rtx_insn *bb_end_insn;
78bde837 3729 rtx ret_label = NULL_RTX;
3371a64f 3730 basic_block nb;
78bde837
SB
3731 edge_iterator ei;
3732
3733 if (EDGE_COUNT (bb->succs) == 0)
3734 continue;
3735
3736 /* Find the old fallthru edge, and another non-EH edge for
3737 a taken jump. */
3738 e_taken = e_fall = NULL;
3739
3740 FOR_EACH_EDGE (e, ei, bb->succs)
3741 if (e->flags & EDGE_FALLTHRU)
3742 e_fall = e;
3743 else if (! (e->flags & EDGE_EH))
3744 e_taken = e;
3745
3746 bb_end_insn = BB_END (bb);
1476d1bd 3747 if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
78bde837 3748 {
1476d1bd
MM
3749 ret_label = JUMP_LABEL (bb_end_jump);
3750 if (any_condjump_p (bb_end_jump))
78bde837
SB
3751 {
3752 /* This might happen if the conditional jump has side
3753 effects and could therefore not be optimized away.
3754 Make the basic block to end with a barrier in order
3755 to prevent rtl_verify_flow_info from complaining. */
3756 if (!e_fall)
3757 {
1476d1bd
MM
3758 gcc_assert (!onlyjump_p (bb_end_jump)
3759 || returnjump_p (bb_end_jump)
91af97c3 3760 || (e_taken->flags & EDGE_CROSSING));
1476d1bd 3761 emit_barrier_after (bb_end_jump);
78bde837
SB
3762 continue;
3763 }
3764
3765 /* If the old fallthru is still next, nothing to do. */
3766 if (bb->aux == e_fall->dest
fefa31b5 3767 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3768 continue;
3769
3770 /* The degenerated case of conditional jump jumping to the next
3771 instruction can happen for jumps with side effects. We need
3772 to construct a forwarder block and this will be done just
3773 fine by force_nonfallthru below. */
3774 if (!e_taken)
3775 ;
3776
3777 /* There is another special case: if *neither* block is next,
3778 such as happens at the very end of a function, then we'll
3779 need to add a new unconditional jump. Choose the taken
3780 edge based on known or assumed probability. */
3781 else if (bb->aux != e_taken->dest)
3782 {
1476d1bd 3783 rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
78bde837
SB
3784
3785 if (note
e5af9ddd 3786 && XINT (note, 0) < REG_BR_PROB_BASE / 2
1476d1bd 3787 && invert_jump (bb_end_jump,
fefa31b5
DM
3788 (e_fall->dest
3789 == EXIT_BLOCK_PTR_FOR_FN (cfun)
78bde837
SB
3790 ? NULL_RTX
3791 : label_for_bb (e_fall->dest)), 0))
3792 {
3793 e_fall->flags &= ~EDGE_FALLTHRU;
3794 gcc_checking_assert (could_fall_through
3795 (e_taken->src, e_taken->dest));
3796 e_taken->flags |= EDGE_FALLTHRU;
3797 update_br_prob_note (bb);
3798 e = e_fall, e_fall = e_taken, e_taken = e;
3799 }
3800 }
3801
3802 /* If the "jumping" edge is a crossing edge, and the fall
3803 through edge is non-crossing, leave things as they are. */
3804 else if ((e_taken->flags & EDGE_CROSSING)
3805 && !(e_fall->flags & EDGE_CROSSING))
3806 continue;
3807
3808 /* Otherwise we can try to invert the jump. This will
3809 basically never fail, however, keep up the pretense. */
1476d1bd 3810 else if (invert_jump (bb_end_jump,
fefa31b5
DM
3811 (e_fall->dest
3812 == EXIT_BLOCK_PTR_FOR_FN (cfun)
78bde837
SB
3813 ? NULL_RTX
3814 : label_for_bb (e_fall->dest)), 0))
3815 {
3816 e_fall->flags &= ~EDGE_FALLTHRU;
3817 gcc_checking_assert (could_fall_through
3818 (e_taken->src, e_taken->dest));
3819 e_taken->flags |= EDGE_FALLTHRU;
3820 update_br_prob_note (bb);
3821 if (LABEL_NUSES (ret_label) == 0
3822 && single_pred_p (e_taken->dest))
3823 delete_insn (ret_label);
3824 continue;
3825 }
3826 }
3827 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3828 {
3829 /* If the old fallthru is still next or if
3830 asm goto doesn't have a fallthru (e.g. when followed by
3831 __builtin_unreachable ()), nothing to do. */
3832 if (! e_fall
3833 || bb->aux == e_fall->dest
fefa31b5 3834 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3835 continue;
3836
3837 /* Otherwise we'll have to use the fallthru fixup below. */
3838 }
3839 else
3840 {
3841 /* Otherwise we have some return, switch or computed
3842 jump. In the 99% case, there should not have been a
3843 fallthru edge. */
3844 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3845 continue;
3846 }
3847 }
3848 else
3849 {
3850 /* No fallthru implies a noreturn function with EH edges, or
3851 something similarly bizarre. In any case, we don't need to
3852 do anything. */
3853 if (! e_fall)
3854 continue;
3855
3856 /* If the fallthru block is still next, nothing to do. */
3857 if (bb->aux == e_fall->dest)
3858 continue;
3859
3860 /* A fallthru to exit block. */
fefa31b5 3861 if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3862 continue;
3863 }
3864
3865 /* We got here if we need to add a new jump insn.
3866 Note force_nonfallthru can delete E_FALL and thus we have to
3867 save E_FALL->src prior to the call to force_nonfallthru. */
78bde837
SB
3868 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3869 if (nb)
3870 {
3871 nb->aux = bb->aux;
3872 bb->aux = nb;
3873 /* Don't process this new block. */
3874 bb = nb;
78bde837
SB
3875 }
3876 }
3877
3878 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3879
29f3fd5b 3880 /* Annoying special case - jump around dead jumptables left in the code. */
11cd3bed 3881 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
3882 {
3883 edge e = find_fallthru_edge (bb->succs);
3884
29f3fd5b
SB
3885 if (e && !can_fallthru (e->src, e->dest))
3886 force_nonfallthru (e);
78bde837
SB
3887 }
3888
3889 /* Ensure goto_locus from edges has some instructions with that locus
3890 in RTL. */
3891 if (!optimize)
11cd3bed 3892 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
3893 {
3894 edge e;
3895 edge_iterator ei;
3896
3897 FOR_EACH_EDGE (e, ei, bb->succs)
2f13f2de 3898 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
5368224f 3899 && !(e->flags & EDGE_ABNORMAL))
78bde837
SB
3900 {
3901 edge e2;
3902 edge_iterator ei2;
3903 basic_block dest, nb;
8879d71c 3904 rtx_insn *end;
78bde837
SB
3905
3906 insn = BB_END (e->src);
3907 end = PREV_INSN (BB_HEAD (e->src));
3908 while (insn != end
5368224f 3909 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
78bde837
SB
3910 insn = PREV_INSN (insn);
3911 if (insn != end
5368224f 3912 && INSN_LOCATION (insn) == e->goto_locus)
78bde837
SB
3913 continue;
3914 if (simplejump_p (BB_END (e->src))
5368224f 3915 && !INSN_HAS_LOCATION (BB_END (e->src)))
78bde837 3916 {
5368224f 3917 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
78bde837
SB
3918 continue;
3919 }
3920 dest = e->dest;
fefa31b5 3921 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3922 {
3923 /* Non-fallthru edges to the exit block cannot be split. */
3924 if (!(e->flags & EDGE_FALLTHRU))
3925 continue;
3926 }
3927 else
3928 {
3929 insn = BB_HEAD (dest);
3930 end = NEXT_INSN (BB_END (dest));
3931 while (insn != end && !NONDEBUG_INSN_P (insn))
3932 insn = NEXT_INSN (insn);
5368224f
DC
3933 if (insn != end && INSN_HAS_LOCATION (insn)
3934 && INSN_LOCATION (insn) == e->goto_locus)
78bde837
SB
3935 continue;
3936 }
3937 nb = split_edge (e);
3938 if (!INSN_P (BB_END (nb)))
1130d5e3 3939 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
190bea87 3940 nb);
5368224f 3941 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
78bde837
SB
3942
3943 /* If there are other incoming edges to the destination block
3944 with the same goto locus, redirect them to the new block as
3945 well, this can prevent other such blocks from being created
3946 in subsequent iterations of the loop. */
3947 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
2f13f2de 3948 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
78bde837 3949 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
5368224f 3950 && e->goto_locus == e2->goto_locus)
78bde837
SB
3951 redirect_edge_and_branch (e2, nb);
3952 else
3953 ei_next (&ei2);
3954 }
3955 }
3956}
3957\f
3958/* Perform sanity checks on the insn chain.
3959 1. Check that next/prev pointers are consistent in both the forward and
3960 reverse direction.
3961 2. Count insns in chain, going both directions, and check if equal.
3962 3. Check that get_last_insn () returns the actual end of chain. */
3963
3964DEBUG_FUNCTION void
3965verify_insn_chain (void)
3966{
8879d71c 3967 rtx_insn *x, *prevx, *nextx;
78bde837
SB
3968 int insn_cnt1, insn_cnt2;
3969
3970 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3971 x != 0;
3972 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
3973 gcc_assert (PREV_INSN (x) == prevx);
3974
3975 gcc_assert (prevx == get_last_insn ());
3976
3977 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
3978 x != 0;
3979 nextx = x, insn_cnt2++, x = PREV_INSN (x))
3980 gcc_assert (NEXT_INSN (x) == nextx);
3981
3982 gcc_assert (insn_cnt1 == insn_cnt2);
3983}
3984\f
3985/* If we have assembler epilogues, the block falling through to exit must
3986 be the last one in the reordered chain when we reach final. Ensure
3987 that this condition is met. */
3988static void
3989fixup_fallthru_exit_predecessor (void)
3990{
3991 edge e;
3992 basic_block bb = NULL;
3993
3994 /* This transformation is not valid before reload, because we might
3995 separate a call from the instruction that copies the return
3996 value. */
3997 gcc_assert (reload_completed);
3998
fefa31b5 3999 e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
78bde837
SB
4000 if (e)
4001 bb = e->src;
4002
4003 if (bb && bb->aux)
4004 {
fefa31b5 4005 basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
78bde837
SB
4006
4007 /* If the very first block is the one with the fall-through exit
4008 edge, we have to split that block. */
4009 if (c == bb)
4010 {
c4d281b2 4011 bb = split_block_after_labels (bb)->dest;
78bde837
SB
4012 bb->aux = c->aux;
4013 c->aux = bb;
d8ce2eae
DM
4014 BB_FOOTER (bb) = BB_FOOTER (c);
4015 BB_FOOTER (c) = NULL;
78bde837
SB
4016 }
4017
4018 while (c->aux != bb)
4019 c = (basic_block) c->aux;
4020
4021 c->aux = bb->aux;
4022 while (c->aux)
4023 c = (basic_block) c->aux;
4024
4025 c->aux = bb;
4026 bb->aux = NULL;
4027 }
4028}
4029
4030/* In case there are more than one fallthru predecessors of exit, force that
4031 there is only one. */
4032
4033static void
4034force_one_exit_fallthru (void)
4035{
4036 edge e, predecessor = NULL;
4037 bool more = false;
4038 edge_iterator ei;
4039 basic_block forwarder, bb;
4040
fefa31b5 4041 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
78bde837
SB
4042 if (e->flags & EDGE_FALLTHRU)
4043 {
4044 if (predecessor == NULL)
4045 predecessor = e;
4046 else
4047 {
4048 more = true;
4049 break;
4050 }
4051 }
4052
4053 if (!more)
4054 return;
4055
4056 /* Exit has several fallthru predecessors. Create a forwarder block for
4057 them. */
4058 forwarder = split_edge (predecessor);
fefa31b5
DM
4059 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4060 (e = ei_safe_edge (ei)); )
78bde837
SB
4061 {
4062 if (e->src == forwarder
4063 || !(e->flags & EDGE_FALLTHRU))
4064 ei_next (&ei);
4065 else
4066 redirect_edge_and_branch_force (e, forwarder);
4067 }
4068
4069 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4070 exit block. */
11cd3bed 4071 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
4072 {
4073 if (bb->aux == NULL && bb != forwarder)
4074 {
4075 bb->aux = forwarder;
4076 break;
4077 }
4078 }
4079}
4080\f
4081/* Return true in case it is possible to duplicate the basic block BB. */
4082
4083static bool
4084cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4085{
4086 /* Do not attempt to duplicate tablejumps, as we need to unshare
4087 the dispatch table. This is difficult to do, as the instructions
4088 computing jump destination may be hoisted outside the basic block. */
4089 if (tablejump_p (BB_END (bb), NULL, NULL))
4090 return false;
4091
4092 /* Do not duplicate blocks containing insns that can't be copied. */
4093 if (targetm.cannot_copy_insn_p)
4094 {
8879d71c 4095 rtx_insn *insn = BB_HEAD (bb);
78bde837
SB
4096 while (1)
4097 {
4098 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4099 return false;
4100 if (insn == BB_END (bb))
4101 break;
4102 insn = NEXT_INSN (insn);
4103 }
4104 }
4105
4106 return true;
4107}
4108
a90c614a 4109rtx_insn *
5d34b050 4110duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
78bde837 4111{
5d34b050 4112 rtx_insn *insn, *next, *copy;
66e8df53 4113 rtx_note *last;
78bde837
SB
4114
4115 /* Avoid updating of boundaries of previous basic block. The
4116 note will get removed from insn stream in fixup. */
4117 last = emit_note (NOTE_INSN_DELETED);
4118
4119 /* Create copy at the end of INSN chain. The chain will
4120 be reordered later. */
4121 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4122 {
4123 switch (GET_CODE (insn))
4124 {
4125 case DEBUG_INSN:
4126 /* Don't duplicate label debug insns. */
4127 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4128 break;
4129 /* FALLTHRU */
4130 case INSN:
4131 case CALL_INSN:
4132 case JUMP_INSN:
78bde837
SB
4133 copy = emit_copy_of_insn_after (insn, get_last_insn ());
4134 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4135 && ANY_RETURN_P (JUMP_LABEL (insn)))
4136 JUMP_LABEL (copy) = JUMP_LABEL (insn);
4137 maybe_copy_prologue_epilogue_insn (insn, copy);
4138 break;
4139
39718607
SB
4140 case JUMP_TABLE_DATA:
4141 /* Avoid copying of dispatch tables. We never duplicate
4142 tablejumps, so this can hit only in case the table got
4143 moved far from original jump.
4144 Avoid copying following barrier as well if any
4145 (and debug insns in between). */
4146 for (next = NEXT_INSN (insn);
4147 next != NEXT_INSN (to);
4148 next = NEXT_INSN (next))
4149 if (!DEBUG_INSN_P (next))
4150 break;
4151 if (next != NEXT_INSN (to) && BARRIER_P (next))
4152 insn = next;
4153 break;
4154
78bde837
SB
4155 case CODE_LABEL:
4156 break;
4157
4158 case BARRIER:
4159 emit_barrier ();
4160 break;
4161
4162 case NOTE:
4163 switch (NOTE_KIND (insn))
4164 {
4165 /* In case prologue is empty and function contain label
4166 in first BB, we may want to copy the block. */
4167 case NOTE_INSN_PROLOGUE_END:
4168
4169 case NOTE_INSN_DELETED:
4170 case NOTE_INSN_DELETED_LABEL:
4171 case NOTE_INSN_DELETED_DEBUG_LABEL:
4172 /* No problem to strip these. */
4173 case NOTE_INSN_FUNCTION_BEG:
4174 /* There is always just single entry to function. */
4175 case NOTE_INSN_BASIC_BLOCK:
3371a64f
TJ
4176 /* We should only switch text sections once. */
4177 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
78bde837
SB
4178 break;
4179
4180 case NOTE_INSN_EPILOGUE_BEG:
d33606c3 4181 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
66e8df53 4182 emit_note_copy (as_a <rtx_note *> (insn));
78bde837
SB
4183 break;
4184
4185 default:
4186 /* All other notes should have already been eliminated. */
4187 gcc_unreachable ();
4188 }
4189 break;
4190 default:
4191 gcc_unreachable ();
4192 }
4193 }
4194 insn = NEXT_INSN (last);
4195 delete_insn (last);
5d34b050 4196 return insn;
78bde837
SB
4197}
4198
4199/* Create a duplicate of the basic block BB. */
4200
4201static basic_block
4202cfg_layout_duplicate_bb (basic_block bb)
4203{
8879d71c 4204 rtx_insn *insn;
78bde837
SB
4205 basic_block new_bb;
4206
4207 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4208 new_bb = create_basic_block (insn,
4209 insn ? get_last_insn () : NULL,
fefa31b5 4210 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
78bde837
SB
4211
4212 BB_COPY_PARTITION (new_bb, bb);
4213 if (BB_HEADER (bb))
4214 {
4215 insn = BB_HEADER (bb);
4216 while (NEXT_INSN (insn))
4217 insn = NEXT_INSN (insn);
4218 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4219 if (insn)
1130d5e3 4220 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
78bde837
SB
4221 }
4222
4223 if (BB_FOOTER (bb))
4224 {
4225 insn = BB_FOOTER (bb);
4226 while (NEXT_INSN (insn))
4227 insn = NEXT_INSN (insn);
4228 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4229 if (insn)
d8ce2eae 4230 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
78bde837
SB
4231 }
4232
4233 return new_bb;
4234}
4235
4236\f
4237/* Main entry point to this module - initialize the datastructures for
4238 CFG layout changes. It keeps LOOPS up-to-date if not null.
4239
4240 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4241
4242void
4243cfg_layout_initialize (unsigned int flags)
4244{
b5241a5a 4245 rtx_insn_list *x;
78bde837
SB
4246 basic_block bb;
4247
8a9e6b45
BS
4248 /* Once bb partitioning is complete, cfg layout mode should not be
4249 re-entered. Entering cfg layout mode may require fixups. As an
4250 example, if edge forwarding performed when optimizing the cfg
4251 layout required moving a block from the hot to the cold
4252 section. This would create an illegal partitioning unless some
4253 manual fixup was performed. */
4254 gcc_assert (!(crtl->bb_reorder_complete
4255 && flag_reorder_blocks_and_partition));
4ca19309 4256
78bde837
SB
4257 initialize_original_copy_tables ();
4258
4259 cfg_layout_rtl_register_cfg_hooks ();
4260
4261 record_effective_endpoints ();
4262
4263 /* Make sure that the targets of non local gotos are marked. */
2382940b 4264 for (x = nonlocal_goto_handler_labels; x; x = x->next ())
78bde837 4265 {
b5241a5a 4266 bb = BLOCK_FOR_INSN (x->insn ());
78bde837
SB
4267 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4268 }
4269
4270 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4271}
4272
4273/* Splits superblocks. */
4274void
4275break_superblocks (void)
4276{
78bde837
SB
4277 bool need = false;
4278 basic_block bb;
4279
7ba9e72d 4280 auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
f61e445a 4281 bitmap_clear (superblocks);
78bde837 4282
11cd3bed 4283 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
4284 if (bb->flags & BB_SUPERBLOCK)
4285 {
4286 bb->flags &= ~BB_SUPERBLOCK;
d7c028c0 4287 bitmap_set_bit (superblocks, bb->index);
78bde837
SB
4288 need = true;
4289 }
4290
4291 if (need)
4292 {
4293 rebuild_jump_labels (get_insns ());
4294 find_many_sub_basic_blocks (superblocks);
4295 }
78bde837
SB
4296}
4297
4298/* Finalize the changes: reorder insn list according to the sequence specified
29f3fd5b 4299 by aux pointers, enter compensation code, rebuild scope forest. */
78bde837
SB
4300
4301void
4302cfg_layout_finalize (void)
4303{
b2b29377 4304 checking_verify_flow_info ();
63da5ff6 4305 free_dominance_info (CDI_DOMINATORS);
78bde837
SB
4306 force_one_exit_fallthru ();
4307 rtl_register_cfg_hooks ();
e86a9946 4308 if (reload_completed && !targetm.have_epilogue ())
78bde837
SB
4309 fixup_fallthru_exit_predecessor ();
4310 fixup_reorder_chain ();
4311
29f3fd5b
SB
4312 rebuild_jump_labels (get_insns ());
4313 delete_dead_jumptables ();
4314
b2b29377
MM
4315 if (flag_checking)
4316 verify_insn_chain ();
4317 checking_verify_flow_info ();
78bde837
SB
4318}
4319
ba5e9aca 4320
9ee634e3 4321/* Same as split_block but update cfg_layout structures. */
f470c378
ZD
4322
4323static basic_block
d329e058 4324cfg_layout_split_block (basic_block bb, void *insnp)
9ee634e3 4325{
ae50c0cb 4326 rtx insn = (rtx) insnp;
f470c378 4327 basic_block new_bb = rtl_split_block (bb, insn);
9ee634e3 4328
d8ce2eae
DM
4329 BB_FOOTER (new_bb) = BB_FOOTER (bb);
4330 BB_FOOTER (bb) = NULL;
9ee634e3 4331
f470c378 4332 return new_bb;
9ee634e3
JH
4333}
4334
9ee634e3 4335/* Redirect Edge to DEST. */
6de9cd9a 4336static edge
d329e058 4337cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
9ee634e3
JH
4338{
4339 basic_block src = e->src;
6de9cd9a 4340 edge ret;
9ee634e3 4341
bc35512f 4342 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 4343 return NULL;
bc35512f 4344
3348b696 4345 if (e->dest == dest)
6de9cd9a 4346 return e;
bc35512f 4347
fefa31b5 4348 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
6de9cd9a 4349 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
f345f21a 4350 {
6fb5fa3c 4351 df_set_bb_dirty (src);
6de9cd9a 4352 return ret;
f345f21a 4353 }
bc35512f 4354
fefa31b5 4355 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
bc35512f
JH
4356 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4357 {
c263766c
RH
4358 if (dump_file)
4359 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
bc35512f
JH
4360 e->src->index, dest->index);
4361
6fb5fa3c 4362 df_set_bb_dirty (e->src);
bc35512f 4363 redirect_edge_succ (e, dest);
6de9cd9a 4364 return e;
bc35512f
JH
4365 }
4366
9ee634e3
JH
4367 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4368 in the case the basic block appears to be in sequence. Avoid this
4369 transformation. */
4370
9ee634e3
JH
4371 if (e->flags & EDGE_FALLTHRU)
4372 {
4373 /* Redirect any branch edges unified with the fallthru one. */
4b4bf941 4374 if (JUMP_P (BB_END (src))
432f982f
JH
4375 && label_is_jump_target_p (BB_HEAD (e->dest),
4376 BB_END (src)))
9ee634e3 4377 {
341c100f 4378 edge redirected;
c22cacf3 4379
c263766c
RH
4380 if (dump_file)
4381 fprintf (dump_file, "Fallthru edge unified with branch "
432f982f
JH
4382 "%i->%i redirected to %i\n",
4383 e->src->index, e->dest->index, dest->index);
4384 e->flags &= ~EDGE_FALLTHRU;
341c100f
NS
4385 redirected = redirect_branch_edge (e, dest);
4386 gcc_assert (redirected);
0c617be4
JL
4387 redirected->flags |= EDGE_FALLTHRU;
4388 df_set_bb_dirty (redirected->src);
4389 return redirected;
9ee634e3
JH
4390 }
4391 /* In case we are redirecting fallthru edge to the branch edge
c22cacf3 4392 of conditional jump, remove it. */
628f6a4e 4393 if (EDGE_COUNT (src->succs) == 2)
9ee634e3 4394 {
03101c6f
KH
4395 /* Find the edge that is different from E. */
4396 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
628f6a4e 4397
9ee634e3 4398 if (s->dest == dest
a813c111
SB
4399 && any_condjump_p (BB_END (src))
4400 && onlyjump_p (BB_END (src)))
4401 delete_insn (BB_END (src));
9ee634e3 4402 }
c263766c 4403 if (dump_file)
dc764d10 4404 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
bc35512f 4405 e->src->index, e->dest->index, dest->index);
0c617be4 4406 ret = redirect_edge_succ_nodup (e, dest);
9ee634e3
JH
4407 }
4408 else
bc35512f 4409 ret = redirect_branch_edge (e, dest);
9ee634e3
JH
4410
4411 /* We don't want simplejumps in the insn stream during cfglayout. */
341c100f 4412 gcc_assert (!simplejump_p (BB_END (src)));
9ee634e3 4413
6fb5fa3c 4414 df_set_bb_dirty (src);
9ee634e3
JH
4415 return ret;
4416}
4417
4418/* Simple wrapper as we always can redirect fallthru edges. */
4419static basic_block
d329e058 4420cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
9ee634e3 4421{
341c100f
NS
4422 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4423
4424 gcc_assert (redirected);
9ee634e3
JH
4425 return NULL;
4426}
4427
f470c378
ZD
4428/* Same as delete_basic_block but update cfg_layout structures. */
4429
9ee634e3 4430static void
d329e058 4431cfg_layout_delete_block (basic_block bb)
9ee634e3 4432{
8879d71c 4433 rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
1130d5e3 4434 rtx_insn **to;
9ee634e3 4435
bcc708fc 4436 if (BB_HEADER (bb))
9ee634e3 4437 {
a813c111 4438 next = BB_HEAD (bb);
9ee634e3 4439 if (prev)
0f82e5c9 4440 SET_NEXT_INSN (prev) = BB_HEADER (bb);
9ee634e3 4441 else
bcc708fc 4442 set_first_insn (BB_HEADER (bb));
0f82e5c9 4443 SET_PREV_INSN (BB_HEADER (bb)) = prev;
bcc708fc 4444 insn = BB_HEADER (bb);
9ee634e3
JH
4445 while (NEXT_INSN (insn))
4446 insn = NEXT_INSN (insn);
0f82e5c9
DM
4447 SET_NEXT_INSN (insn) = next;
4448 SET_PREV_INSN (next) = insn;
9ee634e3 4449 }
a813c111 4450 next = NEXT_INSN (BB_END (bb));
bcc708fc 4451 if (BB_FOOTER (bb))
9ee634e3 4452 {
bcc708fc 4453 insn = BB_FOOTER (bb);
bc35512f
JH
4454 while (insn)
4455 {
4b4bf941 4456 if (BARRIER_P (insn))
bc35512f
JH
4457 {
4458 if (PREV_INSN (insn))
0f82e5c9 4459 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
bc35512f 4460 else
d8ce2eae 4461 BB_FOOTER (bb) = NEXT_INSN (insn);
bc35512f 4462 if (NEXT_INSN (insn))
0f82e5c9 4463 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
bc35512f 4464 }
4b4bf941 4465 if (LABEL_P (insn))
bc35512f
JH
4466 break;
4467 insn = NEXT_INSN (insn);
4468 }
bcc708fc 4469 if (BB_FOOTER (bb))
bc35512f 4470 {
a813c111 4471 insn = BB_END (bb);
0f82e5c9
DM
4472 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4473 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
bc35512f
JH
4474 while (NEXT_INSN (insn))
4475 insn = NEXT_INSN (insn);
0f82e5c9 4476 SET_NEXT_INSN (insn) = next;
bc35512f 4477 if (next)
0f82e5c9 4478 SET_PREV_INSN (next) = insn;
bc35512f
JH
4479 else
4480 set_last_insn (insn);
4481 }
9ee634e3 4482 }
fefa31b5 4483 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1130d5e3 4484 to = &BB_HEADER (bb->next_bb);
9ee634e3
JH
4485 else
4486 to = &cfg_layout_function_footer;
997de8ed 4487
9ee634e3
JH
4488 rtl_delete_block (bb);
4489
4490 if (prev)
4491 prev = NEXT_INSN (prev);
d329e058 4492 else
9ee634e3
JH
4493 prev = get_insns ();
4494 if (next)
4495 next = PREV_INSN (next);
d329e058 4496 else
9ee634e3
JH
4497 next = get_last_insn ();
4498
4499 if (next && NEXT_INSN (next) != prev)
4500 {
4501 remaints = unlink_insn_chain (prev, next);
4502 insn = remaints;
4503 while (NEXT_INSN (insn))
4504 insn = NEXT_INSN (insn);
0f82e5c9 4505 SET_NEXT_INSN (insn) = *to;
9ee634e3 4506 if (*to)
0f82e5c9 4507 SET_PREV_INSN (*to) = insn;
9ee634e3
JH
4508 *to = remaints;
4509 }
4510}
4511
beb235f8 4512/* Return true when blocks A and B can be safely merged. */
b48d0358 4513
bc35512f 4514static bool
b48d0358 4515cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
bc35512f 4516{
750054a2
CT
4517 /* If we are partitioning hot/cold basic blocks, we don't want to
4518 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
4519 and cold sections.
4520
8e8d5162 4521 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
4522 be optimizable (or blocks that appear to be mergeable), but which really
4523 must be left untouched (they are required to make it safely across
4524 partition boundaries). See the comments at the top of
8e8d5162
CT
4525 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4526
87c8b4be 4527 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 4528 return false;
750054a2 4529
7d776ee2
RG
4530 /* Protect the loop latches. */
4531 if (current_loops && b->loop_father->latch == b)
4532 return false;
4533
894a84b5
BS
4534 /* If we would end up moving B's instructions, make sure it doesn't fall
4535 through into the exit block, since we cannot recover from a fallthrough
4536 edge into the exit block occurring in the middle of a function. */
4537 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4538 {
4539 edge e = find_fallthru_edge (b->succs);
fefa31b5 4540 if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
894a84b5
BS
4541 return false;
4542 }
4543
bc35512f 4544 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
4545 return (single_succ_p (a)
4546 && single_succ (a) == b
4547 && single_pred_p (b) == 1
628f6a4e 4548 && a != b
bc35512f 4549 /* Must be simple edge. */
c5cbcccf 4550 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
fefa31b5
DM
4551 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4552 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
49ea3702
SB
4553 /* If the jump insn has side effects, we can't kill the edge.
4554 When not optimizing, try_redirect_by_replacing_jump will
4555 not allow us to redirect an edge by replacing a table jump. */
4b4bf941 4556 && (!JUMP_P (BB_END (a))
49ea3702 4557 || ((!optimize || reload_completed)
a813c111 4558 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f
JH
4559}
4560
41806d92
NS
4561/* Merge block A and B. The blocks must be mergeable. */
4562
bc35512f
JH
4563static void
4564cfg_layout_merge_blocks (basic_block a, basic_block b)
4565{
50a36e42 4566 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
8879d71c 4567 rtx_insn *insn;
50a36e42 4568
77a74ed7 4569 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
bc35512f 4570
6fb5fa3c 4571 if (dump_file)
50a36e42
EB
4572 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4573 a->index);
6fb5fa3c 4574
bc35512f 4575 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 4576 if (LABEL_P (BB_HEAD (b)))
2c97f8e4 4577 {
2c97f8e4
RH
4578 delete_insn (BB_HEAD (b));
4579 }
bc35512f
JH
4580
4581 /* We should have fallthru edge in a, or we can do dummy redirection to get
4582 it cleaned up. */
4b4bf941 4583 if (JUMP_P (BB_END (a)))
628f6a4e 4584 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
341c100f 4585 gcc_assert (!JUMP_P (BB_END (a)));
bc35512f 4586
8a829274 4587 /* When not optimizing and the edge is the only place in RTL which holds
7241571e 4588 some unique locus, emit a nop with that locus in between. */
9be94227
EB
4589 if (!optimize)
4590 emit_nop_for_unique_locus_between (a, b);
7241571e 4591
4c8af858
SB
4592 /* Move things from b->footer after a->footer. */
4593 if (BB_FOOTER (b))
bc35512f 4594 {
4c8af858 4595 if (!BB_FOOTER (a))
d8ce2eae 4596 BB_FOOTER (a) = BB_FOOTER (b);
4c8af858
SB
4597 else
4598 {
8879d71c 4599 rtx_insn *last = BB_FOOTER (a);
4c8af858
SB
4600
4601 while (NEXT_INSN (last))
4602 last = NEXT_INSN (last);
0f82e5c9
DM
4603 SET_NEXT_INSN (last) = BB_FOOTER (b);
4604 SET_PREV_INSN (BB_FOOTER (b)) = last;
4c8af858 4605 }
d8ce2eae 4606 BB_FOOTER (b) = NULL;
4c8af858
SB
4607 }
4608
4609 /* Move things from b->header before a->footer.
4610 Note that this may include dead tablejump data, but we don't clean
4611 those up until we go out of cfglayout mode. */
4612 if (BB_HEADER (b))
4613 {
4614 if (! BB_FOOTER (a))
d8ce2eae 4615 BB_FOOTER (a) = BB_HEADER (b);
4c8af858
SB
4616 else
4617 {
8879d71c 4618 rtx_insn *last = BB_HEADER (b);
4c8af858
SB
4619
4620 while (NEXT_INSN (last))
4621 last = NEXT_INSN (last);
0f82e5c9
DM
4622 SET_NEXT_INSN (last) = BB_FOOTER (a);
4623 SET_PREV_INSN (BB_FOOTER (a)) = last;
d8ce2eae 4624 BB_FOOTER (a) = BB_HEADER (b);
4c8af858 4625 }
1130d5e3 4626 BB_HEADER (b) = NULL;
bc35512f
JH
4627 }
4628
4629 /* In the case basic blocks are not adjacent, move them around. */
a813c111 4630 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
bc35512f 4631 {
f9df6f16 4632 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
6c3d0e31 4633
f9df6f16 4634 emit_insn_after_noloc (insn, BB_END (a), a);
bc35512f
JH
4635 }
4636 /* Otherwise just re-associate the instructions. */
4637 else
4638 {
a813c111 4639 insn = BB_HEAD (b);
1130d5e3 4640 BB_END (a) = BB_END (b);
bc35512f
JH
4641 }
4642
f9df6f16
JJ
4643 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4644 We need to explicitly call. */
4645 update_bb_for_insn_chain (insn, BB_END (b), a);
4646
4647 /* Skip possible DELETED_LABEL insn. */
4648 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4649 insn = NEXT_INSN (insn);
4650 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
1130d5e3 4651 BB_HEAD (b) = BB_END (b) = NULL;
f9df6f16
JJ
4652 delete_insn (insn);
4653
6fb5fa3c
DB
4654 df_bb_delete (b->index);
4655
50a36e42 4656 /* If B was a forwarder block, propagate the locus on the edge. */
2f13f2de 4657 if (forwarder_p
fbc68f2a 4658 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
50a36e42
EB
4659 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4660
c263766c 4661 if (dump_file)
50a36e42 4662 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
bc35512f
JH
4663}
4664
4665/* Split edge E. */
f470c378 4666
bc35512f
JH
4667static basic_block
4668cfg_layout_split_edge (edge e)
4669{
bc35512f 4670 basic_block new_bb =
fefa31b5 4671 create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
a813c111 4672 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
bc35512f
JH
4673 NULL_RTX, e->src);
4674
fefa31b5 4675 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3e7eb734
JJ
4676 BB_COPY_PARTITION (new_bb, e->src);
4677 else
4678 BB_COPY_PARTITION (new_bb, e->dest);
a9b2ee88 4679 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
bc35512f
JH
4680 redirect_edge_and_branch_force (e, new_bb);
4681
4682 return new_bb;
4683}
4684
f470c378
ZD
4685/* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4686
4687static void
4688rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4689{
4690}
4691
df92c640
SB
4692/* Return true if BB contains only labels or non-executable
4693 instructions. */
4694
4695static bool
4696rtl_block_empty_p (basic_block bb)
4697{
8879d71c 4698 rtx_insn *insn;
df92c640 4699
fefa31b5
DM
4700 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4701 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
df92c640
SB
4702 return true;
4703
4704 FOR_BB_INSNS (bb, insn)
4705 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4706 return false;
4707
4708 return true;
4709}
4710
4711/* Split a basic block if it ends with a conditional branch and if
4712 the other part of the block is not empty. */
4713
4714static basic_block
4715rtl_split_block_before_cond_jump (basic_block bb)
4716{
8879d71c
DM
4717 rtx_insn *insn;
4718 rtx_insn *split_point = NULL;
4719 rtx_insn *last = NULL;
df92c640
SB
4720 bool found_code = false;
4721
4722 FOR_BB_INSNS (bb, insn)
4723 {
4724 if (any_condjump_p (insn))
4725 split_point = last;
4726 else if (NONDEBUG_INSN_P (insn))
4727 found_code = true;
4728 last = insn;
4729 }
4730
4731 /* Did not find everything. */
4732 if (found_code && split_point)
4733 return split_block (bb, split_point)->dest;
4734 else
4735 return NULL;
4736}
4737
6de9cd9a
DN
4738/* Return 1 if BB ends with a call, possibly followed by some
4739 instructions that must stay with the call, 0 otherwise. */
4740
4741static bool
b48d0358 4742rtl_block_ends_with_call_p (basic_block bb)
6de9cd9a 4743{
8879d71c 4744 rtx_insn *insn = BB_END (bb);
6de9cd9a 4745
4b4bf941 4746 while (!CALL_P (insn)
6de9cd9a 4747 && insn != BB_HEAD (bb)
92ddef69 4748 && (keep_with_call_p (insn)
b5b8b0ac
AO
4749 || NOTE_P (insn)
4750 || DEBUG_INSN_P (insn)))
6de9cd9a 4751 insn = PREV_INSN (insn);
4b4bf941 4752 return (CALL_P (insn));
6de9cd9a
DN
4753}
4754
4755/* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4756
4757static bool
9678086d 4758rtl_block_ends_with_condjump_p (const_basic_block bb)
6de9cd9a
DN
4759{
4760 return any_condjump_p (BB_END (bb));
4761}
4762
4763/* Return true if we need to add fake edge to exit.
4764 Helper function for rtl_flow_call_edges_add. */
4765
4766static bool
8879d71c 4767need_fake_edge_p (const rtx_insn *insn)
6de9cd9a
DN
4768{
4769 if (!INSN_P (insn))
4770 return false;
4771
4b4bf941 4772 if ((CALL_P (insn)
6de9cd9a
DN
4773 && !SIBLING_CALL_P (insn)
4774 && !find_reg_note (insn, REG_NORETURN, NULL)
becfd6e5 4775 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
6de9cd9a
DN
4776 return true;
4777
4778 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4779 && MEM_VOLATILE_P (PATTERN (insn)))
4780 || (GET_CODE (PATTERN (insn)) == PARALLEL
4781 && asm_noperands (insn) != -1
4782 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4783 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4784}
4785
4786/* Add fake edges to the function exit for any non constant and non noreturn
4787 calls, volatile inline assembly in the bitmap of blocks specified by
4788 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4789 that were split.
4790
4791 The goal is to expose cases in which entering a basic block does not imply
4792 that all subsequent instructions must be executed. */
4793
4794static int
4795rtl_flow_call_edges_add (sbitmap blocks)
4796{
4797 int i;
4798 int blocks_split = 0;
8b1c6fd7 4799 int last_bb = last_basic_block_for_fn (cfun);
6de9cd9a
DN
4800 bool check_last_block = false;
4801
0cae8d31 4802 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
6de9cd9a
DN
4803 return 0;
4804
4805 if (! blocks)
4806 check_last_block = true;
4807 else
fefa31b5
DM
4808 check_last_block = bitmap_bit_p (blocks,
4809 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
6de9cd9a
DN
4810
4811 /* In the last basic block, before epilogue generation, there will be
4812 a fallthru edge to EXIT. Special care is required if the last insn
4813 of the last basic block is a call because make_edge folds duplicate
4814 edges, which would result in the fallthru edge also being marked
4815 fake, which would result in the fallthru edge being removed by
4816 remove_fake_edges, which would result in an invalid CFG.
4817
4818 Moreover, we can't elide the outgoing fake edge, since the block
4819 profiler needs to take this into account in order to solve the minimal
4820 spanning tree in the case that the call doesn't return.
4821
4822 Handle this by adding a dummy instruction in a new last basic block. */
4823 if (check_last_block)
4824 {
fefa31b5 4825 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8879d71c 4826 rtx_insn *insn = BB_END (bb);
6de9cd9a
DN
4827
4828 /* Back up past insns that must be kept in the same block as a call. */
4829 while (insn != BB_HEAD (bb)
4830 && keep_with_call_p (insn))
4831 insn = PREV_INSN (insn);
4832
4833 if (need_fake_edge_p (insn))
4834 {
4835 edge e;
4836
fefa31b5 4837 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
9ff3d2de
JL
4838 if (e)
4839 {
c41c1387 4840 insert_insn_on_edge (gen_use (const0_rtx), e);
9ff3d2de
JL
4841 commit_edge_insertions ();
4842 }
6de9cd9a
DN
4843 }
4844 }
4845
4846 /* Now add fake edges to the function exit for any non constant
4847 calls since there is no way that we can determine if they will
4848 return or not... */
4849
24bd1a0b 4850 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
6de9cd9a 4851 {
06e28de2 4852 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8879d71c
DM
4853 rtx_insn *insn;
4854 rtx_insn *prev_insn;
6de9cd9a
DN
4855
4856 if (!bb)
4857 continue;
4858
d7c028c0 4859 if (blocks && !bitmap_bit_p (blocks, i))
6de9cd9a
DN
4860 continue;
4861
4862 for (insn = BB_END (bb); ; insn = prev_insn)
4863 {
4864 prev_insn = PREV_INSN (insn);
4865 if (need_fake_edge_p (insn))
4866 {
4867 edge e;
8879d71c 4868 rtx_insn *split_at_insn = insn;
6de9cd9a
DN
4869
4870 /* Don't split the block between a call and an insn that should
c22cacf3 4871 remain in the same block as the call. */
4b4bf941 4872 if (CALL_P (insn))
6de9cd9a
DN
4873 while (split_at_insn != BB_END (bb)
4874 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4875 split_at_insn = NEXT_INSN (split_at_insn);
4876
4877 /* The handling above of the final block before the epilogue
c22cacf3 4878 should be enough to verify that there is no edge to the exit
6de9cd9a
DN
4879 block in CFG already. Calling make_edge in such case would
4880 cause us to mark that edge as fake and remove it later. */
4881
b2b29377 4882 if (flag_checking && split_at_insn == BB_END (bb))
628f6a4e 4883 {
fefa31b5 4884 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
9ff3d2de 4885 gcc_assert (e == NULL);
628f6a4e 4886 }
6de9cd9a
DN
4887
4888 /* Note that the following may create a new basic block
4889 and renumber the existing basic blocks. */
4890 if (split_at_insn != BB_END (bb))
4891 {
4892 e = split_block (bb, split_at_insn);
4893 if (e)
4894 blocks_split++;
4895 }
4896
fefa31b5 4897 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
6de9cd9a
DN
4898 }
4899
4900 if (insn == BB_HEAD (bb))
4901 break;
4902 }
4903 }
4904
4905 if (blocks_split)
4906 verify_flow_info ();
4907
4908 return blocks_split;
4909}
4910
1cb7dfc3 4911/* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
315682fb 4912 the conditional branch target, SECOND_HEAD should be the fall-thru
1cb7dfc3
MH
4913 there is no need to handle this here the loop versioning code handles
4914 this. the reason for SECON_HEAD is that it is needed for condition
4915 in trees, and this should be of the same type since it is a hook. */
4916static void
4917rtl_lv_add_condition_to_bb (basic_block first_head ,
c22cacf3
MS
4918 basic_block second_head ATTRIBUTE_UNUSED,
4919 basic_block cond_bb, void *comp_rtx)
1cb7dfc3 4920{
1476d1bd 4921 rtx_code_label *label;
8879d71c 4922 rtx_insn *seq, *jump;
1cb7dfc3
MH
4923 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4924 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4925 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
ef4bddc2 4926 machine_mode mode;
1cb7dfc3
MH
4927
4928
4929 label = block_label (first_head);
4930 mode = GET_MODE (op0);
4931 if (mode == VOIDmode)
4932 mode = GET_MODE (op1);
4933
4934 start_sequence ();
4935 op0 = force_operand (op0, NULL_RTX);
4936 op1 = force_operand (op1, NULL_RTX);
1476d1bd 4937 do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label, -1);
1cb7dfc3
MH
4938 jump = get_last_insn ();
4939 JUMP_LABEL (jump) = label;
4940 LABEL_NUSES (label)++;
4941 seq = get_insns ();
4942 end_sequence ();
4943
2b9c63a2 4944 /* Add the new cond, in the new head. */
c3284718 4945 emit_insn_after (seq, BB_END (cond_bb));
1cb7dfc3
MH
4946}
4947
4948
4949/* Given a block B with unconditional branch at its end, get the
4950 store the return the branch edge and the fall-thru edge in
4951 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4952static void
4953rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4954 edge *fallthru_edge)
4955{
4956 edge e = EDGE_SUCC (b, 0);
4957
4958 if (e->flags & EDGE_FALLTHRU)
4959 {
4960 *fallthru_edge = e;
4961 *branch_edge = EDGE_SUCC (b, 1);
4962 }
4963 else
4964 {
4965 *branch_edge = e;
4966 *fallthru_edge = EDGE_SUCC (b, 1);
4967 }
4968}
4969
5e2d947c
JH
4970void
4971init_rtl_bb_info (basic_block bb)
4972{
bcc708fc
MM
4973 gcc_assert (!bb->il.x.rtl);
4974 bb->il.x.head_ = NULL;
766090c2 4975 bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5e2d947c
JH
4976}
4977
14fa2cc0
ZD
4978/* Returns true if it is possible to remove edge E by redirecting
4979 it to the destination of the other edge from E->src. */
4980
4981static bool
9678086d 4982rtl_can_remove_branch_p (const_edge e)
14fa2cc0 4983{
9678086d
KG
4984 const_basic_block src = e->src;
4985 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
8879d71c
DM
4986 const rtx_insn *insn = BB_END (src);
4987 rtx set;
14fa2cc0
ZD
4988
4989 /* The conditions are taken from try_redirect_by_replacing_jump. */
fefa31b5 4990 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
14fa2cc0
ZD
4991 return false;
4992
4993 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4994 return false;
4995
3371a64f 4996 if (BB_PARTITION (src) != BB_PARTITION (target))
14fa2cc0
ZD
4997 return false;
4998
4999 if (!onlyjump_p (insn)
5000 || tablejump_p (insn, NULL, NULL))
5001 return false;
5002
5003 set = single_set (insn);
5004 if (!set || side_effects_p (set))
5005 return false;
5006
5007 return true;
5008}
5009
ffe14686
AM
5010static basic_block
5011rtl_duplicate_bb (basic_block bb)
5012{
5013 bb = cfg_layout_duplicate_bb (bb);
5014 bb->aux = NULL;
5015 return bb;
5016}
5017
aa4723d7
SB
5018/* Do book-keeping of basic block BB for the profile consistency checker.
5019 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
5020 then do post-pass accounting. Store the counting in RECORD. */
5021static void
5022rtl_account_profile_record (basic_block bb, int after_pass,
5023 struct profile_record *record)
5024{
8879d71c 5025 rtx_insn *insn;
aa4723d7
SB
5026 FOR_BB_INSNS (bb, insn)
5027 if (INSN_P (insn))
5028 {
5029 record->size[after_pass]
5030 += insn_rtx_cost (PATTERN (insn), false);
0a6a6ac9 5031 if (profile_status_for_fn (cfun) == PROFILE_READ)
aa4723d7
SB
5032 record->time[after_pass]
5033 += insn_rtx_cost (PATTERN (insn), true) * bb->count;
0a6a6ac9 5034 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
aa4723d7
SB
5035 record->time[after_pass]
5036 += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
5037 }
5038}
5039
9ee634e3
JH
5040/* Implementation of CFG manipulation for linearized RTL. */
5041struct cfg_hooks rtl_cfg_hooks = {
f470c378 5042 "rtl",
9ee634e3 5043 rtl_verify_flow_info,
10e9fecc 5044 rtl_dump_bb,
2c895bd1 5045 rtl_dump_bb_for_graph,
bc35512f 5046 rtl_create_basic_block,
9ee634e3
JH
5047 rtl_redirect_edge_and_branch,
5048 rtl_redirect_edge_and_branch_force,
14fa2cc0 5049 rtl_can_remove_branch_p,
9ee634e3
JH
5050 rtl_delete_block,
5051 rtl_split_block,
f470c378 5052 rtl_move_block_after,
bc35512f
JH
5053 rtl_can_merge_blocks, /* can_merge_blocks_p */
5054 rtl_merge_blocks,
6de9cd9a
DN
5055 rtl_predict_edge,
5056 rtl_predicted_by_p,
ffe14686
AM
5057 cfg_layout_can_duplicate_bb_p,
5058 rtl_duplicate_bb,
f470c378
ZD
5059 rtl_split_edge,
5060 rtl_make_forwarder_block,
6de9cd9a 5061 rtl_tidy_fallthru_edge,
cf103ca4 5062 rtl_force_nonfallthru,
6de9cd9a
DN
5063 rtl_block_ends_with_call_p,
5064 rtl_block_ends_with_condjump_p,
d9d4706f
KH
5065 rtl_flow_call_edges_add,
5066 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
5067 NULL, /* execute_on_shrinking_pred */
5068 NULL, /* duplicate loop for trees */
5069 NULL, /* lv_add_condition_to_bb */
5070 NULL, /* lv_adjust_loop_header_phi*/
5071 NULL, /* extract_cond_bb_edges */
df92c640
SB
5072 NULL, /* flush_pending_stmts */
5073 rtl_block_empty_p, /* block_empty_p */
5074 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
aa4723d7 5075 rtl_account_profile_record,
9ee634e3
JH
5076};
5077
5078/* Implementation of CFG manipulation for cfg layout RTL, where
5079 basic block connected via fallthru edges does not have to be adjacent.
5080 This representation will hopefully become the default one in future
5081 version of the compiler. */
6de9cd9a 5082
9ee634e3 5083struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
f470c378 5084 "cfglayout mode",
bc35512f 5085 rtl_verify_flow_info_1,
10e9fecc 5086 rtl_dump_bb,
2c895bd1 5087 rtl_dump_bb_for_graph,
bc35512f 5088 cfg_layout_create_basic_block,
9ee634e3
JH
5089 cfg_layout_redirect_edge_and_branch,
5090 cfg_layout_redirect_edge_and_branch_force,
14fa2cc0 5091 rtl_can_remove_branch_p,
9ee634e3
JH
5092 cfg_layout_delete_block,
5093 cfg_layout_split_block,
f470c378 5094 rtl_move_block_after,
bc35512f
JH
5095 cfg_layout_can_merge_blocks_p,
5096 cfg_layout_merge_blocks,
6de9cd9a
DN
5097 rtl_predict_edge,
5098 rtl_predicted_by_p,
5099 cfg_layout_can_duplicate_bb_p,
5100 cfg_layout_duplicate_bb,
f470c378
ZD
5101 cfg_layout_split_edge,
5102 rtl_make_forwarder_block,
cf103ca4
EB
5103 NULL, /* tidy_fallthru_edge */
5104 rtl_force_nonfallthru,
6de9cd9a
DN
5105 rtl_block_ends_with_call_p,
5106 rtl_block_ends_with_condjump_p,
d9d4706f
KH
5107 rtl_flow_call_edges_add,
5108 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
5109 NULL, /* execute_on_shrinking_pred */
5110 duplicate_loop_to_header_edge, /* duplicate loop for trees */
5111 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5112 NULL, /* lv_adjust_loop_header_phi*/
5113 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
df92c640
SB
5114 NULL, /* flush_pending_stmts */
5115 rtl_block_empty_p, /* block_empty_p */
5116 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
aa4723d7 5117 rtl_account_profile_record,
9ee634e3 5118};
78bde837
SB
5119
5120#include "gt-cfgrtl.h"