]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgrtl.c
re PR target/79041 (aarch64 backend emits R_AARCH64_ADR_PREL_PG_HI21 relocation despi...
[thirdparty/gcc.git] / gcc / cfgrtl.c
CommitLineData
ca6c03ca 1/* Control flow graph manipulation code for GNU compiler.
cbe34bb5 2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
ca6c03ca
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
ca6c03ca
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ca6c03ca 19
5f0d2358
RK
20/* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
ca6c03ca
JH
22
23 Available functionality:
bc35512f 24 - Basic CFG/RTL manipulation API documented in cfghooks.h
5f0d2358 25 - CFG-aware instruction chain manipulation
ca6c03ca 26 delete_insn, delete_insn_chain
bc35512f
JH
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
ba5e9aca
EB
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
bc35512f
JH
33
34 Functions not supposed for generic use:
5f0d2358 35 - Infrastructure to determine quickly basic block for insn
ca6c03ca 36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
5f0d2358 37 - Edge redirection with updating and optimizing of insn chain
bc35512f 38 block_label, tidy_fallthru_edge, force_nonfallthru */
ca6c03ca
JH
39\f
40#include "config.h"
41#include "system.h"
4977bab6 42#include "coretypes.h"
c7131fb2 43#include "backend.h"
957060b5 44#include "target.h"
c7131fb2 45#include "rtl.h"
957060b5
AM
46#include "tree.h"
47#include "cfghooks.h"
c7131fb2 48#include "df.h"
957060b5 49#include "insn-config.h"
4d0cdd0c 50#include "memmodel.h"
957060b5 51#include "emit-rtl.h"
60393bbc
AM
52#include "cfgrtl.h"
53#include "cfganal.h"
54#include "cfgbuild.h"
55#include "cfgcleanup.h"
60393bbc 56#include "bb-reorder.h"
0cbd9993 57#include "rtl-error.h"
2a34bece 58#include "insn-attr.h"
36566b39 59#include "dojump.h"
ff25ef99 60#include "expr.h"
1cb7dfc3 61#include "cfgloop.h"
ef330312 62#include "tree-pass.h"
013a8899 63#include "print-rtl.h"
ca6c03ca 64
78bde837
SB
65/* Holds the interesting leading and trailing notes for the function.
66 Only applicable if the CFG is in cfglayout mode. */
1130d5e3 67static GTY(()) rtx_insn *cfg_layout_function_footer;
9152e0aa 68static GTY(()) rtx_insn *cfg_layout_function_header;
78bde837 69
8879d71c 70static rtx_insn *skip_insns_after_block (basic_block);
78bde837 71static void record_effective_endpoints (void);
78bde837
SB
72static void fixup_reorder_chain (void);
73
74void verify_insn_chain (void);
75static void fixup_fallthru_exit_predecessor (void);
8879d71c
DM
76static int can_delete_note_p (const rtx_note *);
77static int can_delete_label_p (const rtx_code_label *);
d329e058 78static basic_block rtl_split_edge (edge);
f470c378 79static bool rtl_move_block_after (basic_block, basic_block);
d329e058 80static int rtl_verify_flow_info (void);
f470c378 81static basic_block cfg_layout_split_block (basic_block, void *);
6de9cd9a 82static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
d329e058
AJ
83static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
84static void cfg_layout_delete_block (basic_block);
85static void rtl_delete_block (basic_block);
86static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
6de9cd9a 87static edge rtl_redirect_edge_and_branch (edge, basic_block);
f470c378 88static basic_block rtl_split_block (basic_block, void *);
1a817418 89static void rtl_dump_bb (FILE *, basic_block, int, dump_flags_t);
d329e058 90static int rtl_verify_flow_info_1 (void);
f470c378 91static void rtl_make_forwarder_block (edge);
ca6c03ca
JH
92\f
93/* Return true if NOTE is not one of the ones that must be kept paired,
5f0d2358 94 so that we may simply delete it. */
ca6c03ca
JH
95
96static int
8879d71c 97can_delete_note_p (const rtx_note *note)
ca6c03ca 98{
cd9c1ca8
RH
99 switch (NOTE_KIND (note))
100 {
101 case NOTE_INSN_DELETED:
102 case NOTE_INSN_BASIC_BLOCK:
103 case NOTE_INSN_EPILOGUE_BEG:
104 return true;
105
106 default:
107 return false;
108 }
ca6c03ca
JH
109}
110
111/* True if a given label can be deleted. */
112
113static int
8879d71c 114can_delete_label_p (const rtx_code_label *label)
ca6c03ca 115{
5f0d2358
RK
116 return (!LABEL_PRESERVE_P (label)
117 /* User declared labels must be preserved. */
118 && LABEL_NAME (label) == 0
6f7eba34
TS
119 && !vec_safe_contains<rtx_insn *> (forced_labels,
120 const_cast<rtx_code_label *> (label)));
ca6c03ca
JH
121}
122
03fbe718 123/* Delete INSN by patching it out. */
ca6c03ca 124
03fbe718 125void
a29f6a2b 126delete_insn (rtx_insn *insn)
ca6c03ca 127{
ca6c03ca
JH
128 rtx note;
129 bool really_delete = true;
130
4b4bf941 131 if (LABEL_P (insn))
ca6c03ca
JH
132 {
133 /* Some labels can't be directly removed from the INSN chain, as they
c22cacf3
MS
134 might be references via variables, constant pool etc.
135 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
8879d71c 136 if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
ca6c03ca
JH
137 {
138 const char *name = LABEL_NAME (insn);
5e5df392 139 basic_block bb = BLOCK_FOR_INSN (insn);
8879d71c 140 rtx_insn *bb_note = NEXT_INSN (insn);
ca6c03ca
JH
141
142 really_delete = false;
143 PUT_CODE (insn, NOTE);
a38e7aa5 144 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
6773e15f 145 NOTE_DELETED_LABEL_NAME (insn) = name;
03fbe718 146
0f33baa9 147 /* If the note following the label starts a basic block, and the
5e5df392 148 label is a member of the same basic block, interchange the two. */
0f33baa9
TV
149 if (bb_note != NULL_RTX
150 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
5e5df392
TV
151 && bb != NULL
152 && bb == BLOCK_FOR_INSN (bb_note))
03fbe718
TV
153 {
154 reorder_insns_nobb (insn, insn, bb_note);
1130d5e3 155 BB_HEAD (bb) = bb_note;
03fbe718 156 if (BB_END (bb) == bb_note)
1130d5e3 157 BB_END (bb) = insn;
03fbe718 158 }
ca6c03ca 159 }
5f0d2358 160
b5241a5a 161 remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
ca6c03ca
JH
162 }
163
164 if (really_delete)
165 {
cda94cbb 166 /* If this insn has already been deleted, something is very wrong. */
4654c0cf 167 gcc_assert (!insn->deleted ());
1f397f45
SB
168 if (INSN_P (insn))
169 df_insn_delete (insn);
ca6c03ca 170 remove_insn (insn);
4654c0cf 171 insn->set_deleted ();
ca6c03ca
JH
172 }
173
174 /* If deleting a jump, decrement the use count of the label. Deleting
175 the label itself should happen in the normal course of block merging. */
cf7c4aa6 176 if (JUMP_P (insn))
9295a326 177 {
cf7c4aa6
HPN
178 if (JUMP_LABEL (insn)
179 && LABEL_P (JUMP_LABEL (insn)))
180 LABEL_NUSES (JUMP_LABEL (insn))--;
181
182 /* If there are more targets, remove them too. */
183 while ((note
184 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
4b4bf941 185 && LABEL_P (XEXP (note, 0)))
9295a326
JZ
186 {
187 LABEL_NUSES (XEXP (note, 0))--;
188 remove_note (insn, note);
189 }
190 }
ca6c03ca 191
cf7c4aa6
HPN
192 /* Also if deleting any insn that references a label as an operand. */
193 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
194 && LABEL_P (XEXP (note, 0)))
195 {
196 LABEL_NUSES (XEXP (note, 0))--;
197 remove_note (insn, note);
198 }
199
95c43227 200 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
ca6c03ca 201 {
95c43227
DM
202 rtvec vec = table->get_labels ();
203 int len = GET_NUM_ELEM (vec);
ca6c03ca
JH
204 int i;
205
206 for (i = 0; i < len; i++)
a124fcda 207 {
95c43227 208 rtx label = XEXP (RTVEC_ELT (vec, i), 0);
a124fcda
RH
209
210 /* When deleting code in bulk (e.g. removing many unreachable
211 blocks) we can delete a label that's a target of the vector
212 before deleting the vector itself. */
4b4bf941 213 if (!NOTE_P (label))
a124fcda
RH
214 LABEL_NUSES (label)--;
215 }
ca6c03ca 216 }
ca6c03ca
JH
217}
218
98ccd1d7
IE
219/* Like delete_insn but also purge dead edges from BB.
220 Return true if any edges are eliminated. */
9ed7b221 221
98ccd1d7 222bool
373fc786 223delete_insn_and_edges (rtx_insn *insn)
3dec4024 224{
3dec4024
JH
225 bool purge = false;
226
ba4f7968 227 if (INSN_P (insn)
3dec4024 228 && BLOCK_FOR_INSN (insn)
a813c111 229 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
3dec4024 230 purge = true;
03fbe718 231 delete_insn (insn);
3dec4024 232 if (purge)
98ccd1d7
IE
233 return purge_dead_edges (BLOCK_FOR_INSN (insn));
234 return false;
3dec4024
JH
235}
236
ca6c03ca 237/* Unlink a chain of insns between START and FINISH, leaving notes
a7b87f73
ZD
238 that must be paired. If CLEAR_BB is true, we set bb field for
239 insns that cannot be removed to NULL. */
ca6c03ca
JH
240
241void
2d93cd20 242delete_insn_chain (rtx start, rtx_insn *finish, bool clear_bb)
ca6c03ca 243{
5f0d2358
RK
244 /* Unchain the insns one by one. It would be quicker to delete all of these
245 with a single unchaining, rather than one at a time, but we need to keep
246 the NOTE's. */
2d93cd20 247 rtx_insn *current = finish;
ca6c03ca
JH
248 while (1)
249 {
2d93cd20 250 rtx_insn *prev = PREV_INSN (current);
8879d71c 251 if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
ca6c03ca
JH
252 ;
253 else
03fbe718 254 delete_insn (current);
ca6c03ca 255
4654c0cf 256 if (clear_bb && !current->deleted ())
03fbe718 257 set_block_for_insn (current, NULL);
a7b87f73 258
03fbe718 259 if (current == start)
ca6c03ca 260 break;
03fbe718 261 current = prev;
ca6c03ca
JH
262 }
263}
264\f
5f0d2358
RK
265/* Create a new basic block consisting of the instructions between HEAD and END
266 inclusive. This function is designed to allow fast BB construction - reuses
267 the note and basic block struct in BB_NOTE, if any and do not grow
268 BASIC_BLOCK chain and should be used directly only by CFG construction code.
269 END can be NULL in to create new empty basic block before HEAD. Both END
918ed612
ZD
270 and HEAD can be NULL to create basic block at the end of INSN chain.
271 AFTER is the basic block we should be put after. */
ca6c03ca
JH
272
273basic_block
e00022e9 274create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
66e8df53 275 basic_block after)
ca6c03ca
JH
276{
277 basic_block bb;
278
279 if (bb_note
ca6c03ca
JH
280 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
281 && bb->aux == NULL)
282 {
283 /* If we found an existing note, thread it back onto the chain. */
284
fee3e72c 285 rtx_insn *after;
ca6c03ca 286
4b4bf941 287 if (LABEL_P (head))
ca6c03ca
JH
288 after = head;
289 else
290 {
291 after = PREV_INSN (head);
292 head = bb_note;
293 }
294
295 if (after != bb_note && NEXT_INSN (after) != bb_note)
ba4f7968 296 reorder_insns_nobb (bb_note, bb_note, after);
ca6c03ca
JH
297 }
298 else
299 {
300 /* Otherwise we must create a note and a basic block structure. */
301
302 bb = alloc_block ();
303
5e2d947c 304 init_rtl_bb_info (bb);
ca6c03ca 305 if (!head && !end)
5f0d2358
RK
306 head = end = bb_note
307 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
4b4bf941 308 else if (LABEL_P (head) && end)
ca6c03ca
JH
309 {
310 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
311 if (head == end)
312 end = bb_note;
313 }
314 else
315 {
316 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
317 head = bb_note;
318 if (!end)
319 end = head;
320 }
5f0d2358 321
ca6c03ca
JH
322 NOTE_BASIC_BLOCK (bb_note) = bb;
323 }
324
325 /* Always include the bb note in the block. */
326 if (NEXT_INSN (end) == bb_note)
327 end = bb_note;
328
1130d5e3
DM
329 BB_HEAD (bb) = head;
330 BB_END (bb) = end;
8b1c6fd7 331 bb->index = last_basic_block_for_fn (cfun)++;
5e2d947c 332 bb->flags = BB_NEW | BB_RTL;
918ed612 333 link_block (bb, after);
557c4b49 334 SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
6fb5fa3c 335 df_bb_refs_record (bb->index, false);
ba4f7968 336 update_bb_for_insn (bb);
076c7ab8 337 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
ca6c03ca
JH
338
339 /* Tag the block so that we know it has been used when considering
340 other basic block notes. */
341 bb->aux = bb;
342
343 return bb;
344}
345
5f0d2358 346/* Create new basic block consisting of instructions in between HEAD and END
ffe14686
AM
347 and place it to the BB chain after block AFTER. END can be NULL to
348 create a new empty basic block before HEAD. Both END and HEAD can be
349 NULL to create basic block at the end of INSN chain. */
ca6c03ca 350
bc35512f
JH
351static basic_block
352rtl_create_basic_block (void *headp, void *endp, basic_block after)
ca6c03ca 353{
e00022e9
DM
354 rtx_insn *head = (rtx_insn *) headp;
355 rtx_insn *end = (rtx_insn *) endp;
ca6c03ca 356 basic_block bb;
0b17ab2f 357
7eca0767 358 /* Grow the basic block array if needed. */
8b1c6fd7
DM
359 if ((size_t) last_basic_block_for_fn (cfun)
360 >= basic_block_info_for_fn (cfun)->length ())
7eca0767 361 {
8b1c6fd7
DM
362 size_t new_size =
363 (last_basic_block_for_fn (cfun)
364 + (last_basic_block_for_fn (cfun) + 3) / 4);
c5f05f68 365 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
7eca0767 366 }
0b17ab2f 367
0cae8d31 368 n_basic_blocks_for_fn (cfun)++;
ca6c03ca 369
852c6ec7 370 bb = create_basic_block_structure (head, end, NULL, after);
ca6c03ca
JH
371 bb->aux = NULL;
372 return bb;
373}
bc35512f
JH
374
375static basic_block
376cfg_layout_create_basic_block (void *head, void *end, basic_block after)
377{
378 basic_block newbb = rtl_create_basic_block (head, end, after);
379
bc35512f
JH
380 return newbb;
381}
ca6c03ca
JH
382\f
383/* Delete the insns in a (non-live) block. We physically delete every
384 non-deleted-note insn, and update the flow graph appropriately.
385
386 Return nonzero if we deleted an exception handler. */
387
388/* ??? Preserving all such notes strikes me as wrong. It would be nice
389 to post-process the stream to remove empty blocks, loops, ranges, etc. */
390
f0fda11c 391static void
d329e058 392rtl_delete_block (basic_block b)
ca6c03ca 393{
8879d71c 394 rtx_insn *insn, *end;
ca6c03ca
JH
395
396 /* If the head of this block is a CODE_LABEL, then it might be the
f39e46ba
SB
397 label for an exception handler which can't be reached. We need
398 to remove the label from the exception_handler_label list. */
a813c111 399 insn = BB_HEAD (b);
ca6c03ca 400
96370780 401 end = get_last_bb_insn (b);
ca6c03ca
JH
402
403 /* Selectively delete the entire chain. */
1130d5e3 404 BB_HEAD (b) = NULL;
a7b87f73
ZD
405 delete_insn_chain (insn, end, true);
406
6fb5fa3c
DB
407
408 if (dump_file)
409 fprintf (dump_file, "deleting block %d\n", b->index);
410 df_bb_delete (b->index);
ca6c03ca
JH
411}
412\f
852c6ec7 413/* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
ca6c03ca
JH
414
415void
d329e058 416compute_bb_for_insn (void)
ca6c03ca 417{
e0082a72 418 basic_block bb;
ca6c03ca 419
11cd3bed 420 FOR_EACH_BB_FN (bb, cfun)
ca6c03ca 421 {
8879d71c
DM
422 rtx_insn *end = BB_END (bb);
423 rtx_insn *insn;
ca6c03ca 424
a813c111 425 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
ca6c03ca 426 {
ba4f7968 427 BLOCK_FOR_INSN (insn) = bb;
ca6c03ca
JH
428 if (insn == end)
429 break;
ca6c03ca
JH
430 }
431 }
432}
433
434/* Release the basic_block_for_insn array. */
435
c2924966 436unsigned int
d329e058 437free_bb_for_insn (void)
ca6c03ca 438{
8879d71c 439 rtx_insn *insn;
ba4f7968 440 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 441 if (!BARRIER_P (insn))
ba4f7968 442 BLOCK_FOR_INSN (insn) = NULL;
c2924966 443 return 0;
ca6c03ca
JH
444}
445
27a4cd48
DM
446namespace {
447
448const pass_data pass_data_free_cfg =
449{
450 RTL_PASS, /* type */
451 "*free_cfg", /* name */
452 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
453 TV_NONE, /* tv_id */
454 0, /* properties_required */
455 0, /* properties_provided */
456 PROP_cfg, /* properties_destroyed */
457 0, /* todo_flags_start */
458 0, /* todo_flags_finish */
ef330312
PB
459};
460
27a4cd48
DM
461class pass_free_cfg : public rtl_opt_pass
462{
463public:
c3284718
RS
464 pass_free_cfg (gcc::context *ctxt)
465 : rtl_opt_pass (pass_data_free_cfg, ctxt)
27a4cd48
DM
466 {}
467
468 /* opt_pass methods: */
be55bfe6 469 virtual unsigned int execute (function *);
27a4cd48
DM
470
471}; // class pass_free_cfg
472
be55bfe6
TS
473unsigned int
474pass_free_cfg::execute (function *)
475{
be55bfe6
TS
476 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
477 valid at that point so it would be too late to call df_analyze. */
e90bedf5 478 if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
be55bfe6
TS
479 {
480 df_note_add_problem ();
481 df_analyze ();
482 }
be55bfe6
TS
483
484 if (crtl->has_bb_partition)
485 insert_section_boundary_note ();
486
487 free_bb_for_insn ();
488 return 0;
489}
490
27a4cd48
DM
491} // anon namespace
492
493rtl_opt_pass *
494make_pass_free_cfg (gcc::context *ctxt)
495{
496 return new pass_free_cfg (ctxt);
497}
498
91278841 499/* Return RTX to emit after when we want to emit code on the entry of function. */
8a2e8325 500rtx_insn *
91278841
AP
501entry_of_function (void)
502{
0cae8d31 503 return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
fefa31b5 504 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
91278841
AP
505}
506
11b904a1
BS
507/* Emit INSN at the entry point of the function, ensuring that it is only
508 executed once per function. */
509void
510emit_insn_at_entry (rtx insn)
511{
fefa31b5 512 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
11b904a1 513 edge e = ei_safe_edge (ei);
5419bc7f 514 gcc_assert (e->flags & EDGE_FALLTHRU);
11b904a1
BS
515
516 insert_insn_on_edge (insn, e);
517 commit_edge_insertions ();
518}
519
6c3d0e31 520/* Update BLOCK_FOR_INSN of insns between BEGIN and END
b8698a0f 521 (or BARRIER if found) and notify df of the bb change.
6c3d0e31
SP
522 The insn chain range is inclusive
523 (i.e. both BEGIN and END will be updated. */
ca6c03ca 524
6c3d0e31 525static void
8879d71c 526update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
ca6c03ca 527{
8879d71c 528 rtx_insn *insn;
ca6c03ca 529
6c3d0e31
SP
530 end = NEXT_INSN (end);
531 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
63642d5a
AO
532 if (!BARRIER_P (insn))
533 df_insn_change_bb (insn, bb);
ca6c03ca 534}
6c3d0e31
SP
535
536/* Update BLOCK_FOR_INSN of insns in BB to BB,
537 and notify df of the change. */
538
539void
540update_bb_for_insn (basic_block bb)
541{
542 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
543}
544
532aafad
SB
545\f
546/* Like active_insn_p, except keep the return value clobber around
547 even after reload. */
548
549static bool
8879d71c 550flow_active_insn_p (const rtx_insn *insn)
532aafad
SB
551{
552 if (active_insn_p (insn))
553 return true;
554
555 /* A clobber of the function return value exists for buggy
556 programs that fail to return a value. Its effect is to
557 keep the return value from being live across the entire
558 function. If we allow it to be skipped, we introduce the
559 possibility for register lifetime confusion. */
560 if (GET_CODE (PATTERN (insn)) == CLOBBER
561 && REG_P (XEXP (PATTERN (insn), 0))
562 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
563 return true;
564
565 return false;
566}
567
568/* Return true if the block has no effect and only forwards control flow to
569 its single destination. */
532aafad
SB
570
571bool
2407343c 572contains_no_active_insn_p (const_basic_block bb)
532aafad 573{
8879d71c 574 rtx_insn *insn;
532aafad 575
4833f561
JJ
576 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
577 || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
578 || !single_succ_p (bb)
579 || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
532aafad
SB
580 return false;
581
2407343c
JR
582 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
583 if (INSN_P (insn) && flow_active_insn_p (insn))
584 return false;
585
586 return (!INSN_P (insn)
587 || (JUMP_P (insn) && simplejump_p (insn))
588 || !flow_active_insn_p (insn));
589}
590
591/* Likewise, but protect loop latches, headers and preheaders. */
592/* FIXME: Make this a cfg hook. */
593
594bool
595forwarder_block_p (const_basic_block bb)
596{
597 if (!contains_no_active_insn_p (bb))
598 return false;
599
532aafad
SB
600 /* Protect loop latches, headers and preheaders. */
601 if (current_loops)
602 {
603 basic_block dest;
604 if (bb->loop_father->header == bb)
605 return false;
606 dest = EDGE_SUCC (bb, 0)->dest;
607 if (dest->loop_father->header == dest)
608 return false;
609 }
610
2407343c 611 return true;
532aafad
SB
612}
613
614/* Return nonzero if we can reach target from src by falling through. */
d7b6661b 615/* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
532aafad
SB
616
617bool
618can_fallthru (basic_block src, basic_block target)
619{
8879d71c
DM
620 rtx_insn *insn = BB_END (src);
621 rtx_insn *insn2;
532aafad
SB
622 edge e;
623 edge_iterator ei;
624
fefa31b5 625 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
532aafad
SB
626 return true;
627 if (src->next_bb != target)
d7b6661b
SB
628 return false;
629
630 /* ??? Later we may add code to move jump tables offline. */
631 if (tablejump_p (insn, NULL, NULL))
632 return false;
633
532aafad 634 FOR_EACH_EDGE (e, ei, src->succs)
fefa31b5 635 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
532aafad 636 && e->flags & EDGE_FALLTHRU)
d7b6661b 637 return false;
532aafad
SB
638
639 insn2 = BB_HEAD (target);
d7b6661b 640 if (!active_insn_p (insn2))
532aafad
SB
641 insn2 = next_active_insn (insn2);
642
532aafad
SB
643 return next_active_insn (insn) == insn2;
644}
645
646/* Return nonzero if we could reach target from src by falling through,
647 if the target was made adjacent. If we already have a fall-through
648 edge to the exit block, we can't do that. */
649static bool
650could_fall_through (basic_block src, basic_block target)
651{
652 edge e;
653 edge_iterator ei;
654
fefa31b5 655 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
532aafad
SB
656 return true;
657 FOR_EACH_EDGE (e, ei, src->succs)
fefa31b5 658 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
532aafad
SB
659 && e->flags & EDGE_FALLTHRU)
660 return 0;
661 return true;
662}
ca6c03ca 663\f
ef2be249 664/* Return the NOTE_INSN_BASIC_BLOCK of BB. */
ed270997 665rtx_note *
ef2be249
RS
666bb_note (basic_block bb)
667{
8879d71c 668 rtx_insn *note;
ef2be249
RS
669
670 note = BB_HEAD (bb);
671 if (LABEL_P (note))
672 note = NEXT_INSN (note);
673
674 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
ed270997 675 return as_a <rtx_note *> (note);
ef2be249
RS
676}
677
6fb5fa3c
DB
678/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
679 note associated with the BLOCK. */
680
8879d71c 681static rtx_insn *
6fb5fa3c
DB
682first_insn_after_basic_block_note (basic_block block)
683{
8879d71c 684 rtx_insn *insn;
6fb5fa3c
DB
685
686 /* Get the first instruction in the block. */
687 insn = BB_HEAD (block);
688
689 if (insn == NULL_RTX)
8879d71c 690 return NULL;
6fb5fa3c
DB
691 if (LABEL_P (insn))
692 insn = NEXT_INSN (insn);
693 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
694
695 return NEXT_INSN (insn);
696}
697
2b9c63a2
FY
698/* Creates a new basic block just after basic block BB by splitting
699 everything after specified instruction INSNP. */
ca6c03ca 700
f470c378 701static basic_block
d329e058 702rtl_split_block (basic_block bb, void *insnp)
ca6c03ca
JH
703{
704 basic_block new_bb;
8879d71c 705 rtx_insn *insn = (rtx_insn *) insnp;
f470c378 706 edge e;
628f6a4e 707 edge_iterator ei;
ca6c03ca 708
f470c378
ZD
709 if (!insn)
710 {
711 insn = first_insn_after_basic_block_note (bb);
712
713 if (insn)
b5b8b0ac 714 {
8879d71c 715 rtx_insn *next = insn;
b5b8b0ac
AO
716
717 insn = PREV_INSN (insn);
718
719 /* If the block contains only debug insns, insn would have
720 been NULL in a non-debug compilation, and then we'd end
721 up emitting a DELETED note. For -fcompare-debug
722 stability, emit the note too. */
723 if (insn != BB_END (bb)
724 && DEBUG_INSN_P (next)
725 && DEBUG_INSN_P (BB_END (bb)))
726 {
727 while (next != BB_END (bb) && DEBUG_INSN_P (next))
728 next = NEXT_INSN (next);
729
730 if (next == BB_END (bb))
731 emit_note_after (NOTE_INSN_DELETED, next);
732 }
733 }
f470c378
ZD
734 else
735 insn = get_last_insn ();
736 }
737
738 /* We probably should check type of the insn so that we do not create
739 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
740 bother. */
741 if (insn == BB_END (bb))
742 emit_note_after (NOTE_INSN_DELETED, insn);
ca6c03ca
JH
743
744 /* Create the new basic block. */
a813c111 745 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
076c7ab8 746 BB_COPY_PARTITION (new_bb, bb);
1130d5e3 747 BB_END (bb) = insn;
ca6c03ca
JH
748
749 /* Redirect the outgoing edges. */
628f6a4e
BE
750 new_bb->succs = bb->succs;
751 bb->succs = NULL;
752 FOR_EACH_EDGE (e, ei, new_bb->succs)
ca6c03ca
JH
753 e->src = new_bb;
754
6fb5fa3c
DB
755 /* The new block starts off being dirty. */
756 df_set_bb_dirty (bb);
f470c378 757 return new_bb;
bc35512f
JH
758}
759
9be94227
EB
760/* Return true if the single edge between blocks A and B is the only place
761 in RTL which holds some unique locus. */
762
763static bool
764unique_locus_on_edge_between_p (basic_block a, basic_block b)
765{
5368224f 766 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
8879d71c 767 rtx_insn *insn, *end;
9be94227 768
2f13f2de 769 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
9be94227
EB
770 return false;
771
772 /* First scan block A backward. */
773 insn = BB_END (a);
774 end = PREV_INSN (BB_HEAD (a));
5368224f 775 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
9be94227
EB
776 insn = PREV_INSN (insn);
777
5368224f 778 if (insn != end && INSN_LOCATION (insn) == goto_locus)
9be94227
EB
779 return false;
780
781 /* Then scan block B forward. */
782 insn = BB_HEAD (b);
783 if (insn)
784 {
785 end = NEXT_INSN (BB_END (b));
786 while (insn != end && !NONDEBUG_INSN_P (insn))
787 insn = NEXT_INSN (insn);
788
5368224f
DC
789 if (insn != end && INSN_HAS_LOCATION (insn)
790 && INSN_LOCATION (insn) == goto_locus)
9be94227
EB
791 return false;
792 }
793
794 return true;
795}
796
797/* If the single edge between blocks A and B is the only place in RTL which
798 holds some unique locus, emit a nop with that locus between the blocks. */
799
800static void
801emit_nop_for_unique_locus_between (basic_block a, basic_block b)
802{
803 if (!unique_locus_on_edge_between_p (a, b))
804 return;
805
1130d5e3 806 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
5368224f 807 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
9be94227
EB
808}
809
ca6c03ca 810/* Blocks A and B are to be merged into a single block A. The insns
bc35512f 811 are already contiguous. */
ca6c03ca 812
bc35512f
JH
813static void
814rtl_merge_blocks (basic_block a, basic_block b)
ca6c03ca 815{
8879d71c
DM
816 rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
817 rtx_insn *del_first = NULL, *del_last = NULL;
818 rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
50a36e42 819 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
ca6c03ca
JH
820 int b_empty = 0;
821
6fb5fa3c 822 if (dump_file)
50a36e42
EB
823 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
824 a->index);
6fb5fa3c 825
b5b8b0ac
AO
826 while (DEBUG_INSN_P (b_end))
827 b_end = PREV_INSN (b_debug_start = b_end);
828
ca6c03ca 829 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 830 if (LABEL_P (b_head))
ca6c03ca
JH
831 {
832 /* Detect basic blocks with nothing but a label. This can happen
833 in particular at the end of a function. */
834 if (b_head == b_end)
835 b_empty = 1;
5f0d2358 836
ca6c03ca
JH
837 del_first = del_last = b_head;
838 b_head = NEXT_INSN (b_head);
839 }
840
5f0d2358
RK
841 /* Delete the basic block note and handle blocks containing just that
842 note. */
ca6c03ca
JH
843 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
844 {
845 if (b_head == b_end)
846 b_empty = 1;
847 if (! del_last)
848 del_first = b_head;
5f0d2358 849
ca6c03ca
JH
850 del_last = b_head;
851 b_head = NEXT_INSN (b_head);
852 }
853
854 /* If there was a jump out of A, delete it. */
4b4bf941 855 if (JUMP_P (a_end))
ca6c03ca 856 {
8879d71c 857 rtx_insn *prev;
ca6c03ca
JH
858
859 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
4b4bf941 860 if (!NOTE_P (prev)
a38e7aa5 861 || NOTE_INSN_BASIC_BLOCK_P (prev)
a813c111 862 || prev == BB_HEAD (a))
ca6c03ca
JH
863 break;
864
865 del_first = a_end;
866
ca6c03ca
JH
867 /* If this was a conditional jump, we need to also delete
868 the insn that set cc0. */
058eb3b0 869 if (HAVE_cc0 && only_sets_cc0_p (prev))
ca6c03ca 870 {
8879d71c 871 rtx_insn *tmp = prev;
5f0d2358 872
ca6c03ca
JH
873 prev = prev_nonnote_insn (prev);
874 if (!prev)
a813c111 875 prev = BB_HEAD (a);
ca6c03ca
JH
876 del_first = tmp;
877 }
ca6c03ca
JH
878
879 a_end = PREV_INSN (del_first);
880 }
4b4bf941 881 else if (BARRIER_P (NEXT_INSN (a_end)))
ca6c03ca
JH
882 del_first = NEXT_INSN (a_end);
883
ca6c03ca
JH
884 /* Delete everything marked above as well as crap that might be
885 hanging out between the two blocks. */
1130d5e3
DM
886 BB_END (a) = a_end;
887 BB_HEAD (b) = b_empty ? NULL : b_head;
a7b87f73 888 delete_insn_chain (del_first, del_last, true);
ca6c03ca 889
8a829274 890 /* When not optimizing and the edge is the only place in RTL which holds
9be94227
EB
891 some unique locus, emit a nop with that locus in between. */
892 if (!optimize)
893 {
894 emit_nop_for_unique_locus_between (a, b);
895 a_end = BB_END (a);
896 }
897
ca6c03ca
JH
898 /* Reassociate the insns of B with A. */
899 if (!b_empty)
900 {
b5b8b0ac 901 update_bb_for_insn_chain (a_end, b_debug_end, a);
5f0d2358 902
1130d5e3
DM
903 BB_END (a) = b_debug_end;
904 BB_HEAD (b) = NULL;
b5b8b0ac
AO
905 }
906 else if (b_end != b_debug_end)
907 {
908 /* Move any deleted labels and other notes between the end of A
909 and the debug insns that make up B after the debug insns,
910 bringing the debug insns into A while keeping the notes after
911 the end of A. */
912 if (NEXT_INSN (a_end) != b_debug_start)
913 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
914 b_debug_end);
915 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
1130d5e3 916 BB_END (a) = b_debug_end;
ca6c03ca 917 }
5f0d2358 918
6fb5fa3c 919 df_bb_delete (b->index);
50a36e42
EB
920
921 /* If B was a forwarder block, propagate the locus on the edge. */
fbc68f2a
DC
922 if (forwarder_p
923 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
50a36e42
EB
924 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
925
926 if (dump_file)
927 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
ca6c03ca 928}
bc35512f 929
6fb5fa3c 930
bc35512f 931/* Return true when block A and B can be merged. */
9678086d 932
b48d0358
DN
933static bool
934rtl_can_merge_blocks (basic_block a, basic_block b)
bc35512f 935{
750054a2
CT
936 /* If we are partitioning hot/cold basic blocks, we don't want to
937 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
938 and cold sections.
939
8e8d5162 940 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
941 be optimizable (or blocks that appear to be mergeable), but which really
942 must be left untouched (they are required to make it safely across
943 partition boundaries). See the comments at the top of
8e8d5162 944 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
076c7ab8 945
87c8b4be 946 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 947 return false;
750054a2 948
7d776ee2
RG
949 /* Protect the loop latches. */
950 if (current_loops && b->loop_father->latch == b)
951 return false;
952
bc35512f 953 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
954 return (single_succ_p (a)
955 && single_succ (a) == b
956 && single_pred_p (b)
628f6a4e 957 && a != b
bc35512f 958 /* Must be simple edge. */
c5cbcccf 959 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
bc35512f 960 && a->next_bb == b
fefa31b5
DM
961 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
962 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
bc35512f
JH
963 /* If the jump insn has side effects,
964 we can't kill the edge. */
4b4bf941 965 && (!JUMP_P (BB_END (a))
e24e7211 966 || (reload_completed
a813c111 967 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f 968}
ca6c03ca 969\f
5f0d2358
RK
970/* Return the label in the head of basic block BLOCK. Create one if it doesn't
971 exist. */
ca6c03ca 972
1476d1bd 973rtx_code_label *
d329e058 974block_label (basic_block block)
ca6c03ca 975{
fefa31b5 976 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
1476d1bd 977 return NULL;
5f0d2358 978
4b4bf941 979 if (!LABEL_P (BB_HEAD (block)))
ca6c03ca 980 {
1130d5e3 981 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
ca6c03ca 982 }
5f0d2358 983
1476d1bd 984 return as_a <rtx_code_label *> (BB_HEAD (block));
ca6c03ca
JH
985}
986
987/* Attempt to perform edge redirection by replacing possibly complex jump
5f0d2358
RK
988 instruction by unconditional jump or removing jump completely. This can
989 apply only if all edges now point to the same block. The parameters and
990 return values are equivalent to redirect_edge_and_branch. */
ca6c03ca 991
6de9cd9a 992edge
bc35512f 993try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
ca6c03ca
JH
994{
995 basic_block src = e->src;
8879d71c 996 rtx_insn *insn = BB_END (src), *kill_from;
e1233a7d 997 rtx set;
ca6c03ca 998 int fallthru = 0;
750054a2
CT
999
1000 /* If we are partitioning hot/cold basic blocks, we don't want to
1001 mess up unconditional or indirect jumps that cross between hot
8e8d5162
CT
1002 and cold sections.
1003
1004 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
1005 be optimizable (or blocks that appear to be mergeable), but which really
1006 must be left untouched (they are required to make it safely across
1007 partition boundaries). See the comments at the top of
8e8d5162 1008 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
c22cacf3 1009
3371a64f 1010 if (BB_PARTITION (src) != BB_PARTITION (target))
9cf84a3c 1011 return NULL;
750054a2 1012
6a66a8a7
KH
1013 /* We can replace or remove a complex jump only when we have exactly
1014 two edges. Also, if we have exactly one outgoing edge, we can
1015 redirect that. */
1016 if (EDGE_COUNT (src->succs) >= 3
1017 /* Verify that all targets will be TARGET. Specifically, the
1018 edge that is not E must also go to TARGET. */
1019 || (EDGE_COUNT (src->succs) == 2
1020 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1021 return NULL;
5f0d2358 1022
6a66a8a7 1023 if (!onlyjump_p (insn))
6de9cd9a 1024 return NULL;
3348b696 1025 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
6de9cd9a 1026 return NULL;
ca6c03ca
JH
1027
1028 /* Avoid removing branch with side effects. */
1029 set = single_set (insn);
1030 if (!set || side_effects_p (set))
6de9cd9a 1031 return NULL;
ca6c03ca
JH
1032
1033 /* In case we zap a conditional jump, we'll need to kill
1034 the cc0 setter too. */
1035 kill_from = insn;
058eb3b0 1036 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn))
9caea4a7 1037 && only_sets_cc0_p (PREV_INSN (insn)))
ca6c03ca 1038 kill_from = PREV_INSN (insn);
ca6c03ca
JH
1039
1040 /* See if we can create the fallthru edge. */
bc35512f 1041 if (in_cfglayout || can_fallthru (src, target))
ca6c03ca 1042 {
c263766c
RH
1043 if (dump_file)
1044 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
ca6c03ca
JH
1045 fallthru = 1;
1046
eaec9b3d 1047 /* Selectively unlink whole insn chain. */
bc35512f
JH
1048 if (in_cfglayout)
1049 {
8879d71c 1050 rtx_insn *insn = BB_FOOTER (src);
bc35512f 1051
a7b87f73 1052 delete_insn_chain (kill_from, BB_END (src), false);
bc35512f
JH
1053
1054 /* Remove barriers but keep jumptables. */
1055 while (insn)
1056 {
4b4bf941 1057 if (BARRIER_P (insn))
bc35512f
JH
1058 {
1059 if (PREV_INSN (insn))
0f82e5c9 1060 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
bc35512f 1061 else
d8ce2eae 1062 BB_FOOTER (src) = NEXT_INSN (insn);
bc35512f 1063 if (NEXT_INSN (insn))
0f82e5c9 1064 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
bc35512f 1065 }
4b4bf941 1066 if (LABEL_P (insn))
bc35512f
JH
1067 break;
1068 insn = NEXT_INSN (insn);
1069 }
1070 }
1071 else
a7b87f73
ZD
1072 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1073 false);
ca6c03ca 1074 }
5f0d2358 1075
ca6c03ca
JH
1076 /* If this already is simplejump, redirect it. */
1077 else if (simplejump_p (insn))
1078 {
1079 if (e->dest == target)
6de9cd9a 1080 return NULL;
c263766c
RH
1081 if (dump_file)
1082 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
0b17ab2f 1083 INSN_UID (insn), e->dest->index, target->index);
1476d1bd
MM
1084 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1085 block_label (target), 0))
6ee3c8e4 1086 {
fefa31b5 1087 gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
341c100f 1088 return NULL;
6ee3c8e4 1089 }
ca6c03ca 1090 }
5f0d2358 1091
6ee3c8e4 1092 /* Cannot do anything for target exit block. */
fefa31b5 1093 else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
6de9cd9a 1094 return NULL;
6ee3c8e4 1095
ca6c03ca
JH
1096 /* Or replace possibly complicated jump insn by simple jump insn. */
1097 else
1098 {
e67d1102 1099 rtx_code_label *target_label = block_label (target);
8879d71c 1100 rtx_insn *barrier;
dfe08bc4 1101 rtx_insn *label;
8942ee0f 1102 rtx_jump_table_data *table;
ca6c03ca 1103
ec4a505f 1104 emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
a813c111 1105 JUMP_LABEL (BB_END (src)) = target_label;
ca6c03ca 1106 LABEL_NUSES (target_label)++;
c263766c
RH
1107 if (dump_file)
1108 fprintf (dump_file, "Replacing insn %i by jump %i\n",
a813c111 1109 INSN_UID (insn), INSN_UID (BB_END (src)));
ca6c03ca 1110
4da2eb6b 1111
ba4807a0
LB
1112 delete_insn_chain (kill_from, insn, false);
1113
4da2eb6b
RH
1114 /* Recognize a tablejump that we are converting to a
1115 simple jump and remove its associated CODE_LABEL
1116 and ADDR_VEC or ADDR_DIFF_VEC. */
ba4807a0
LB
1117 if (tablejump_p (insn, &label, &table))
1118 delete_insn_chain (label, table, false);
eb5b8ad4 1119
a813c111 1120 barrier = next_nonnote_insn (BB_END (src));
4b4bf941 1121 if (!barrier || !BARRIER_P (barrier))
a813c111 1122 emit_barrier_after (BB_END (src));
5d693491
JZ
1123 else
1124 {
a813c111 1125 if (barrier != NEXT_INSN (BB_END (src)))
5d693491
JZ
1126 {
1127 /* Move the jump before barrier so that the notes
1128 which originally were or were created before jump table are
1129 inside the basic block. */
8879d71c 1130 rtx_insn *new_insn = BB_END (src);
5d693491 1131
6c3d0e31
SP
1132 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1133 PREV_INSN (barrier), src);
5d693491 1134
0f82e5c9
DM
1135 SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1136 SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
5d693491 1137
0f82e5c9
DM
1138 SET_NEXT_INSN (new_insn) = barrier;
1139 SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
5d693491 1140
0f82e5c9
DM
1141 SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1142 SET_PREV_INSN (barrier) = new_insn;
5d693491
JZ
1143 }
1144 }
ca6c03ca
JH
1145 }
1146
1147 /* Keep only one edge out and set proper flags. */
c5cbcccf 1148 if (!single_succ_p (src))
628f6a4e 1149 remove_edge (e);
c5cbcccf 1150 gcc_assert (single_succ_p (src));
628f6a4e 1151
c5cbcccf 1152 e = single_succ_edge (src);
ca6c03ca
JH
1153 if (fallthru)
1154 e->flags = EDGE_FALLTHRU;
1155 else
1156 e->flags = 0;
5f0d2358 1157
357067f2 1158 e->probability = profile_probability::always ();
ca6c03ca
JH
1159 e->count = src->count;
1160
ca6c03ca
JH
1161 if (e->dest != target)
1162 redirect_edge_succ (e, target);
6de9cd9a 1163 return e;
ca6c03ca
JH
1164}
1165
4e3825db
MM
1166/* Subroutine of redirect_branch_edge that tries to patch the jump
1167 instruction INSN so that it reaches block NEW. Do this
1168 only when it originally reached block OLD. Return true if this
1169 worked or the original target wasn't OLD, return false if redirection
1170 doesn't work. */
1171
1172static bool
8879d71c 1173patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
ca6c03ca 1174{
8942ee0f 1175 rtx_jump_table_data *table;
ca6c03ca 1176 rtx tmp;
ca6c03ca 1177 /* Recognize a tablejump and adjust all matching cases. */
8942ee0f 1178 if (tablejump_p (insn, NULL, &table))
ca6c03ca
JH
1179 {
1180 rtvec vec;
1181 int j;
e67d1102 1182 rtx_code_label *new_label = block_label (new_bb);
ca6c03ca 1183
fefa31b5 1184 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4e3825db 1185 return false;
95c43227 1186 vec = table->get_labels ();
ca6c03ca
JH
1187
1188 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1189 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1190 {
1191 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1192 --LABEL_NUSES (old_label);
1193 ++LABEL_NUSES (new_label);
1194 }
1195
f9da5064 1196 /* Handle casesi dispatch insns. */
ca6c03ca
JH
1197 if ((tmp = single_set (insn)) != NULL
1198 && SET_DEST (tmp) == pc_rtx
1199 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1200 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
04a121a7 1201 && label_ref_label (XEXP (SET_SRC (tmp), 2)) == old_label)
ca6c03ca 1202 {
4c33cb26 1203 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
ca6c03ca
JH
1204 new_label);
1205 --LABEL_NUSES (old_label);
1206 ++LABEL_NUSES (new_label);
1207 }
1208 }
1c384bf1
RH
1209 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1210 {
1211 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
e67d1102 1212 rtx note;
1c384bf1 1213
fefa31b5 1214 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1c384bf1 1215 return false;
e67d1102 1216 rtx_code_label *new_label = block_label (new_bb);
1c384bf1
RH
1217
1218 for (i = 0; i < n; ++i)
1219 {
1220 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1221 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1222 if (XEXP (old_ref, 0) == old_label)
1223 {
1224 ASM_OPERANDS_LABEL (tmp, i)
1225 = gen_rtx_LABEL_REF (Pmode, new_label);
1226 --LABEL_NUSES (old_label);
1227 ++LABEL_NUSES (new_label);
1228 }
1229 }
1230
1231 if (JUMP_LABEL (insn) == old_label)
1232 {
1233 JUMP_LABEL (insn) = new_label;
1234 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1235 if (note)
1236 remove_note (insn, note);
1237 }
1238 else
1239 {
1240 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1241 if (note)
1242 remove_note (insn, note);
1243 if (JUMP_LABEL (insn) != new_label
1244 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1245 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1246 }
3b5fda81
JJ
1247 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1248 != NULL_RTX)
1249 XEXP (note, 0) = new_label;
1c384bf1 1250 }
ca6c03ca
JH
1251 else
1252 {
1253 /* ?? We may play the games with moving the named labels from
1254 one basic block to the other in case only one computed_jump is
1255 available. */
5f0d2358
RK
1256 if (computed_jump_p (insn)
1257 /* A return instruction can't be redirected. */
1258 || returnjump_p (insn))
4e3825db 1259 return false;
6ee3c8e4 1260
4e3825db 1261 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
6ee3c8e4 1262 {
4e3825db
MM
1263 /* If the insn doesn't go where we think, we're confused. */
1264 gcc_assert (JUMP_LABEL (insn) == old_label);
1265
1266 /* If the substitution doesn't succeed, die. This can happen
1267 if the back end emitted unrecognizable instructions or if
1268 target is exit block on some arches. */
1476d1bd
MM
1269 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1270 block_label (new_bb), 0))
4e3825db 1271 {
fefa31b5 1272 gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun));
4e3825db
MM
1273 return false;
1274 }
6ee3c8e4 1275 }
ca6c03ca 1276 }
4e3825db
MM
1277 return true;
1278}
1279
1280
1281/* Redirect edge representing branch of (un)conditional jump or tablejump,
1282 NULL on failure */
1283static edge
1284redirect_branch_edge (edge e, basic_block target)
1285{
8879d71c 1286 rtx_insn *old_label = BB_HEAD (e->dest);
4e3825db 1287 basic_block src = e->src;
8879d71c 1288 rtx_insn *insn = BB_END (src);
4e3825db
MM
1289
1290 /* We can only redirect non-fallthru edges of jump insn. */
1291 if (e->flags & EDGE_FALLTHRU)
1292 return NULL;
1293 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1294 return NULL;
1295
1296 if (!currently_expanding_to_rtl)
1297 {
1476d1bd 1298 if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
4e3825db
MM
1299 return NULL;
1300 }
1301 else
1302 /* When expanding this BB might actually contain multiple
1303 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1304 Redirect all of those that match our label. */
0f346928 1305 FOR_BB_INSNS (src, insn)
1476d1bd
MM
1306 if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
1307 old_label, target))
4e3825db 1308 return NULL;
ca6c03ca 1309
c263766c
RH
1310 if (dump_file)
1311 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
0b17ab2f 1312 e->src->index, e->dest->index, target->index);
5f0d2358 1313
ca6c03ca 1314 if (e->dest != target)
6de9cd9a 1315 e = redirect_edge_succ_nodup (e, target);
6fb5fa3c 1316
6de9cd9a 1317 return e;
bc35512f
JH
1318}
1319
3371a64f
TJ
1320/* Called when edge E has been redirected to a new destination,
1321 in order to update the region crossing flag on the edge and
1322 jump. */
1323
1324static void
1325fixup_partition_crossing (edge e)
1326{
fefa31b5
DM
1327 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1328 == EXIT_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1329 return;
1330 /* If we redirected an existing edge, it may already be marked
1331 crossing, even though the new src is missing a reg crossing note.
1332 But make sure reg crossing note doesn't already exist before
1333 inserting. */
1334 if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1335 {
1336 e->flags |= EDGE_CROSSING;
3371a64f 1337 if (JUMP_P (BB_END (e->src))
339ba33b
RS
1338 && !CROSSING_JUMP_P (BB_END (e->src)))
1339 CROSSING_JUMP_P (BB_END (e->src)) = 1;
3371a64f
TJ
1340 }
1341 else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1342 {
1343 e->flags &= ~EDGE_CROSSING;
1344 /* Remove the section crossing note from jump at end of
1345 src if it exists, and if no other successors are
1346 still crossing. */
339ba33b 1347 if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
3371a64f
TJ
1348 {
1349 bool has_crossing_succ = false;
1350 edge e2;
1351 edge_iterator ei;
1352 FOR_EACH_EDGE (e2, ei, e->src->succs)
1353 {
1354 has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1355 if (has_crossing_succ)
1356 break;
1357 }
1358 if (!has_crossing_succ)
339ba33b 1359 CROSSING_JUMP_P (BB_END (e->src)) = 0;
3371a64f
TJ
1360 }
1361 }
1362}
1363
600b5b1d
TJ
1364/* Called when block BB has been reassigned to the cold partition,
1365 because it is now dominated by another cold block,
1366 to ensure that the region crossing attributes are updated. */
1367
1368static void
1369fixup_new_cold_bb (basic_block bb)
1370{
1371 edge e;
1372 edge_iterator ei;
1373
1374 /* This is called when a hot bb is found to now be dominated
1375 by a cold bb and therefore needs to become cold. Therefore,
1376 its preds will no longer be region crossing. Any non-dominating
1377 preds that were previously hot would also have become cold
1378 in the caller for the same region. Any preds that were previously
1379 region-crossing will be adjusted in fixup_partition_crossing. */
1380 FOR_EACH_EDGE (e, ei, bb->preds)
1381 {
1382 fixup_partition_crossing (e);
1383 }
1384
1385 /* Possibly need to make bb's successor edges region crossing,
1386 or remove stale region crossing. */
1387 FOR_EACH_EDGE (e, ei, bb->succs)
1388 {
1389 /* We can't have fall-through edges across partition boundaries.
1390 Note that force_nonfallthru will do any necessary partition
1391 boundary fixup by calling fixup_partition_crossing itself. */
1392 if ((e->flags & EDGE_FALLTHRU)
1393 && BB_PARTITION (bb) != BB_PARTITION (e->dest)
fefa31b5 1394 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
600b5b1d
TJ
1395 force_nonfallthru (e);
1396 else
1397 fixup_partition_crossing (e);
1398 }
1399}
1400
bc35512f
JH
1401/* Attempt to change code to redirect edge E to TARGET. Don't do that on
1402 expense of adding new instructions or reordering basic blocks.
1403
1404 Function can be also called with edge destination equivalent to the TARGET.
1405 Then it should try the simplifications and do nothing if none is possible.
1406
6de9cd9a
DN
1407 Return edge representing the branch if transformation succeeded. Return NULL
1408 on failure.
1409 We still return NULL in case E already destinated TARGET and we didn't
1410 managed to simplify instruction stream. */
bc35512f 1411
6de9cd9a 1412static edge
5671bf27 1413rtl_redirect_edge_and_branch (edge e, basic_block target)
bc35512f 1414{
6de9cd9a 1415 edge ret;
f345f21a 1416 basic_block src = e->src;
3371a64f 1417 basic_block dest = e->dest;
f345f21a 1418
bc35512f 1419 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 1420 return NULL;
bc35512f 1421
3371a64f 1422 if (dest == target)
6de9cd9a 1423 return e;
3348b696 1424
6de9cd9a 1425 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
f345f21a 1426 {
6fb5fa3c 1427 df_set_bb_dirty (src);
3371a64f 1428 fixup_partition_crossing (ret);
6de9cd9a 1429 return ret;
f345f21a 1430 }
bc35512f 1431
6de9cd9a
DN
1432 ret = redirect_branch_edge (e, target);
1433 if (!ret)
1434 return NULL;
5f0d2358 1435
6fb5fa3c 1436 df_set_bb_dirty (src);
3371a64f 1437 fixup_partition_crossing (ret);
6de9cd9a 1438 return ret;
ca6c03ca
JH
1439}
1440
3371a64f
TJ
1441/* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1442
1443void
1444emit_barrier_after_bb (basic_block bb)
1445{
cd459bf8 1446 rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
c3284718 1447 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
3371a64f
TJ
1448 || current_ir_type () == IR_RTL_CFGLAYOUT);
1449 if (current_ir_type () == IR_RTL_CFGLAYOUT)
0fb3402f
RX
1450 {
1451 rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1452
1453 if (BB_FOOTER (bb))
1454 {
1455 rtx_insn *footer_tail = BB_FOOTER (bb);
1456
1457 while (NEXT_INSN (footer_tail))
1458 footer_tail = NEXT_INSN (footer_tail);
1459 if (!BARRIER_P (footer_tail))
1460 {
1461 SET_NEXT_INSN (footer_tail) = insn;
1462 SET_PREV_INSN (insn) = footer_tail;
1463 }
1464 }
1465 else
1466 BB_FOOTER (bb) = insn;
1467 }
3371a64f
TJ
1468}
1469
4fe9b91c 1470/* Like force_nonfallthru below, but additionally performs redirection
26898771
BS
1471 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1472 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1473 simple_return_rtx, indicating which kind of returnjump to create.
1474 It should be NULL otherwise. */
ca6c03ca 1475
dc0ff1c8 1476basic_block
26898771 1477force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
ca6c03ca 1478{
a3716585 1479 basic_block jump_block, new_bb = NULL, src = e->src;
ca6c03ca
JH
1480 rtx note;
1481 edge new_edge;
a3716585 1482 int abnormal_edge_flags = 0;
a3e6a37b 1483 bool asm_goto_edge = false;
7241571e 1484 int loc;
ca6c03ca 1485
cb9a1d9b
JH
1486 /* In the case the last instruction is conditional jump to the next
1487 instruction, first redirect the jump itself and then continue
b20b352b 1488 by creating a basic block afterwards to redirect fallthru edge. */
fefa31b5
DM
1489 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1490 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
a813c111 1491 && any_condjump_p (BB_END (e->src))
a813c111 1492 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
cb9a1d9b
JH
1493 {
1494 rtx note;
58e6ae30 1495 edge b = unchecked_make_edge (e->src, target, 0);
341c100f 1496 bool redirected;
cb9a1d9b 1497
1476d1bd
MM
1498 redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
1499 block_label (target), 0);
341c100f 1500 gcc_assert (redirected);
c22cacf3 1501
a813c111 1502 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
cb9a1d9b
JH
1503 if (note)
1504 {
e5af9ddd 1505 int prob = XINT (note, 0);
cb9a1d9b 1506
5fa396ad
JH
1507 b->probability = profile_probability::from_reg_br_prob_note (prob);
1508 b->count = e->count.apply_probability (b->probability);
cb9a1d9b
JH
1509 e->probability -= e->probability;
1510 e->count -= b->count;
cb9a1d9b
JH
1511 }
1512 }
1513
ca6c03ca 1514 if (e->flags & EDGE_ABNORMAL)
a3716585
JH
1515 {
1516 /* Irritating special case - fallthru edge to the same block as abnormal
1517 edge.
1518 We can't redirect abnormal edge, but we still can split the fallthru
d329e058 1519 one and create separate abnormal edge to original destination.
a3716585 1520 This allows bb-reorder to make such edge non-fallthru. */
341c100f 1521 gcc_assert (e->dest == target);
ee44c28d
SB
1522 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1523 e->flags &= EDGE_FALLTHRU;
a3716585 1524 }
341c100f 1525 else
24c545ff 1526 {
341c100f 1527 gcc_assert (e->flags & EDGE_FALLTHRU);
fefa31b5 1528 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
341c100f
NS
1529 {
1530 /* We can't redirect the entry block. Create an empty block
628f6a4e
BE
1531 at the start of the function which we use to add the new
1532 jump. */
1533 edge tmp;
1534 edge_iterator ei;
1535 bool found = false;
c22cacf3 1536
fefa31b5
DM
1537 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1538 ENTRY_BLOCK_PTR_FOR_FN (cfun));
c22cacf3 1539
341c100f
NS
1540 /* Change the existing edge's source to be the new block, and add
1541 a new edge from the entry block to the new block. */
1542 e->src = bb;
fefa31b5
DM
1543 for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1544 (tmp = ei_safe_edge (ei)); )
628f6a4e
BE
1545 {
1546 if (tmp == e)
1547 {
fefa31b5 1548 ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
628f6a4e
BE
1549 found = true;
1550 break;
1551 }
1552 else
1553 ei_next (&ei);
1554 }
c22cacf3 1555
628f6a4e 1556 gcc_assert (found);
c22cacf3 1557
9771b263 1558 vec_safe_push (bb->succs, e);
fefa31b5
DM
1559 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1560 EDGE_FALLTHRU);
341c100f 1561 }
24c545ff
BS
1562 }
1563
a3e6a37b 1564 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
891ca07d 1565 don't point to the target or fallthru label. */
a3e6a37b 1566 if (JUMP_P (BB_END (e->src))
fefa31b5 1567 && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
a3e6a37b
JJ
1568 && (e->flags & EDGE_FALLTHRU)
1569 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
ca6c03ca 1570 {
a3e6a37b 1571 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
5d68b7e6 1572 bool adjust_jump_target = false;
a3e6a37b
JJ
1573
1574 for (i = 0; i < n; ++i)
891ca07d
JJ
1575 {
1576 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
5d68b7e6
JJ
1577 {
1578 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1579 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1580 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1581 adjust_jump_target = true;
1582 }
891ca07d 1583 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
a3e6a37b 1584 asm_goto_edge = true;
891ca07d 1585 }
5d68b7e6
JJ
1586 if (adjust_jump_target)
1587 {
8879d71c
DM
1588 rtx_insn *insn = BB_END (e->src);
1589 rtx note;
1590 rtx_insn *old_label = BB_HEAD (e->dest);
1591 rtx_insn *new_label = BB_HEAD (target);
5d68b7e6
JJ
1592
1593 if (JUMP_LABEL (insn) == old_label)
1594 {
1595 JUMP_LABEL (insn) = new_label;
1596 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1597 if (note)
1598 remove_note (insn, note);
1599 }
1600 else
1601 {
1602 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1603 if (note)
1604 remove_note (insn, note);
1605 if (JUMP_LABEL (insn) != new_label
1606 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1607 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1608 }
1609 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1610 != NULL_RTX)
1611 XEXP (note, 0) = new_label;
1612 }
a3e6a37b
JJ
1613 }
1614
1615 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1616 {
dc01c3d1 1617 rtx_insn *new_head;
3995f3a2 1618 profile_count count = e->count;
357067f2 1619 profile_probability probability = e->probability;
ca6c03ca 1620 /* Create the new structures. */
31a78298 1621
79019985
RH
1622 /* If the old block ended with a tablejump, skip its table
1623 by searching forward from there. Otherwise start searching
1624 forward from the last instruction of the old block. */
8942ee0f
DM
1625 rtx_jump_table_data *table;
1626 if (tablejump_p (BB_END (e->src), NULL, &table))
dc01c3d1 1627 new_head = table;
8942ee0f 1628 else
dc01c3d1
DM
1629 new_head = BB_END (e->src);
1630 new_head = NEXT_INSN (new_head);
31a78298 1631
dc01c3d1 1632 jump_block = create_basic_block (new_head, NULL, e->src);
a3e6a37b 1633 jump_block->count = count;
ca6c03ca 1634 jump_block->frequency = EDGE_FREQUENCY (e);
ca6c03ca 1635
750054a2
CT
1636 /* Make sure new block ends up in correct hot/cold section. */
1637
076c7ab8 1638 BB_COPY_PARTITION (jump_block, e->src);
c22cacf3 1639
ca6c03ca
JH
1640 /* Wire edge in. */
1641 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
a3e6a37b
JJ
1642 new_edge->probability = probability;
1643 new_edge->count = count;
ca6c03ca
JH
1644
1645 /* Redirect old edge. */
1646 redirect_edge_pred (e, jump_block);
357067f2 1647 e->probability = profile_probability::always ();
ca6c03ca 1648
3371a64f
TJ
1649 /* If e->src was previously region crossing, it no longer is
1650 and the reg crossing note should be removed. */
1651 fixup_partition_crossing (new_edge);
1652
a3e6a37b
JJ
1653 /* If asm goto has any label refs to target's label,
1654 add also edge from asm goto bb to target. */
1655 if (asm_goto_edge)
1656 {
357067f2 1657 new_edge->probability = new_edge->probability.apply_scale (1, 2);
3995f3a2
JH
1658 new_edge->count = new_edge->count.apply_scale (1, 2);
1659 jump_block->count = jump_block->count.apply_scale (1, 2);
a3e6a37b 1660 jump_block->frequency /= 2;
3995f3a2
JH
1661 edge new_edge2 = make_edge (new_edge->src, target,
1662 e->flags & ~EDGE_FALLTHRU);
1663 new_edge2->probability = probability - new_edge->probability;
1664 new_edge2->count = count - new_edge->count;
a3e6a37b
JJ
1665 }
1666
ca6c03ca
JH
1667 new_bb = jump_block;
1668 }
1669 else
1670 jump_block = e->src;
5f0d2358 1671
2f13f2de 1672 loc = e->goto_locus;
ca6c03ca 1673 e->flags &= ~EDGE_FALLTHRU;
fefa31b5 1674 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
ca6c03ca 1675 {
26898771 1676 if (jump_label == ret_rtx)
4bccb39e
RS
1677 emit_jump_insn_after_setloc (targetm.gen_return (),
1678 BB_END (jump_block), loc);
26898771
BS
1679 else
1680 {
1681 gcc_assert (jump_label == simple_return_rtx);
4bccb39e 1682 emit_jump_insn_after_setloc (targetm.gen_simple_return (),
26898771 1683 BB_END (jump_block), loc);
26898771 1684 }
387748de 1685 set_return_jump_label (BB_END (jump_block));
ca6c03ca
JH
1686 }
1687 else
1688 {
e67d1102 1689 rtx_code_label *label = block_label (target);
ec4a505f
RS
1690 emit_jump_insn_after_setloc (targetm.gen_jump (label),
1691 BB_END (jump_block), loc);
a813c111 1692 JUMP_LABEL (BB_END (jump_block)) = label;
ca6c03ca
JH
1693 LABEL_NUSES (label)++;
1694 }
5f0d2358 1695
3371a64f
TJ
1696 /* We might be in cfg layout mode, and if so, the following routine will
1697 insert the barrier correctly. */
1698 emit_barrier_after_bb (jump_block);
ca6c03ca
JH
1699 redirect_edge_succ_nodup (e, target);
1700
a3716585
JH
1701 if (abnormal_edge_flags)
1702 make_edge (src, target, abnormal_edge_flags);
1703
b8698a0f 1704 df_mark_solutions_dirty ();
3371a64f 1705 fixup_partition_crossing (e);
ca6c03ca
JH
1706 return new_bb;
1707}
1708
1709/* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1710 (and possibly create new basic block) to make edge non-fallthru.
1711 Return newly created BB or NULL if none. */
5f0d2358 1712
cf103ca4
EB
1713static basic_block
1714rtl_force_nonfallthru (edge e)
ca6c03ca 1715{
26898771 1716 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
ca6c03ca
JH
1717}
1718
1719/* Redirect edge even at the expense of creating new jump insn or
1720 basic block. Return new basic block if created, NULL otherwise.
41806d92 1721 Conversion must be possible. */
ca6c03ca 1722
9ee634e3 1723static basic_block
d329e058 1724rtl_redirect_edge_and_branch_force (edge e, basic_block target)
ca6c03ca 1725{
5f0d2358
RK
1726 if (redirect_edge_and_branch (e, target)
1727 || e->dest == target)
ca6c03ca
JH
1728 return NULL;
1729
1730 /* In case the edge redirection failed, try to force it to be non-fallthru
1731 and redirect newly created simplejump. */
6fb5fa3c 1732 df_set_bb_dirty (e->src);
26898771 1733 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
ca6c03ca
JH
1734}
1735
1736/* The given edge should potentially be a fallthru edge. If that is in
1737 fact true, delete the jump and barriers that are in the way. */
1738
f470c378
ZD
1739static void
1740rtl_tidy_fallthru_edge (edge e)
ca6c03ca 1741{
8879d71c 1742 rtx_insn *q;
f470c378
ZD
1743 basic_block b = e->src, c = b->next_bb;
1744
ca6c03ca
JH
1745 /* ??? In a late-running flow pass, other folks may have deleted basic
1746 blocks by nopping out blocks, leaving multiple BARRIERs between here
0fa2e4df 1747 and the target label. They ought to be chastised and fixed.
ca6c03ca
JH
1748
1749 We can also wind up with a sequence of undeletable labels between
1750 one block and the next.
1751
1752 So search through a sequence of barriers, labels, and notes for
1753 the head of block C and assert that we really do fall through. */
1754
a813c111 1755 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
9c0a0632
RH
1756 if (INSN_P (q))
1757 return;
ca6c03ca
JH
1758
1759 /* Remove what will soon cease being the jump insn from the source block.
1760 If block B consisted only of this single jump, turn it into a deleted
1761 note. */
a813c111 1762 q = BB_END (b);
4b4bf941 1763 if (JUMP_P (q)
ca6c03ca
JH
1764 && onlyjump_p (q)
1765 && (any_uncondjump_p (q)
c5cbcccf 1766 || single_succ_p (b)))
ca6c03ca 1767 {
dfe08bc4 1768 rtx_insn *label;
adfac8df
JJ
1769 rtx_jump_table_data *table;
1770
1771 if (tablejump_p (q, &label, &table))
1772 {
1773 /* The label is likely mentioned in some instruction before
1774 the tablejump and might not be DCEd, so turn it into
1775 a note instead and move before the tablejump that is going to
1776 be deleted. */
1777 const char *name = LABEL_NAME (label);
1778 PUT_CODE (label, NOTE);
1779 NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1780 NOTE_DELETED_LABEL_NAME (label) = name;
dfe08bc4 1781 reorder_insns (label, label, PREV_INSN (q));
adfac8df
JJ
1782 delete_insn (table);
1783 }
1784
ca6c03ca
JH
1785 /* If this was a conditional jump, we need to also delete
1786 the insn that set cc0. */
058eb3b0 1787 if (HAVE_cc0 && any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
ca6c03ca 1788 q = PREV_INSN (q);
ca6c03ca
JH
1789
1790 q = PREV_INSN (q);
ca6c03ca 1791 }
b34f29a1
JJ
1792 /* Unconditional jumps with side-effects (i.e. which we can't just delete
1793 together with the barrier) should never have a fallthru edge. */
1794 else if (JUMP_P (q) && any_uncondjump_p (q))
1795 return;
ca6c03ca
JH
1796
1797 /* Selectively unlink the sequence. */
a813c111 1798 if (q != PREV_INSN (BB_HEAD (c)))
a7b87f73 1799 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
ca6c03ca
JH
1800
1801 e->flags |= EDGE_FALLTHRU;
1802}
ca6c03ca 1803\f
f470c378
ZD
1804/* Should move basic block BB after basic block AFTER. NIY. */
1805
1806static bool
1807rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1808 basic_block after ATTRIBUTE_UNUSED)
1809{
1810 return false;
1811}
1812
3371a64f
TJ
1813/* Locate the last bb in the same partition as START_BB. */
1814
1815static basic_block
1816last_bb_in_partition (basic_block start_bb)
1817{
1818 basic_block bb;
fefa31b5 1819 FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
3371a64f
TJ
1820 {
1821 if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1822 return bb;
1823 }
6626665f 1824 /* Return bb before the exit block. */
3371a64f
TJ
1825 return bb->prev_bb;
1826}
1827
ca6c03ca 1828/* Split a (typically critical) edge. Return the new block.
41806d92 1829 The edge must not be abnormal.
ca6c03ca
JH
1830
1831 ??? The code generally expects to be called on critical edges.
1832 The case of a block ending in an unconditional jump to a
1833 block with multiple predecessors is not handled optimally. */
1834
8ce33230 1835static basic_block
d329e058 1836rtl_split_edge (edge edge_in)
ca6c03ca 1837{
3371a64f 1838 basic_block bb, new_bb;
8879d71c 1839 rtx_insn *before;
ca6c03ca
JH
1840
1841 /* Abnormal edges cannot be split. */
341c100f 1842 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
ca6c03ca
JH
1843
1844 /* We are going to place the new block in front of edge destination.
eaec9b3d 1845 Avoid existence of fallthru predecessors. */
ca6c03ca
JH
1846 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1847 {
0fd4b31d 1848 edge e = find_fallthru_edge (edge_in->dest->preds);
ca6c03ca
JH
1849
1850 if (e)
1851 force_nonfallthru (e);
1852 }
1853
96e82e0a 1854 /* Create the basic block note. */
fefa31b5 1855 if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
a813c111 1856 before = BB_HEAD (edge_in->dest);
ca6c03ca 1857 else
8879d71c 1858 before = NULL;
ca6c03ca 1859
623a66fa 1860 /* If this is a fall through edge to the exit block, the blocks might be
ffe14686 1861 not adjacent, and the right place is after the source. */
fefa31b5
DM
1862 if ((edge_in->flags & EDGE_FALLTHRU)
1863 && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
623a66fa
R
1864 {
1865 before = NEXT_INSN (BB_END (edge_in->src));
623a66fa 1866 bb = create_basic_block (before, NULL, edge_in->src);
076c7ab8 1867 BB_COPY_PARTITION (bb, edge_in->src);
623a66fa
R
1868 }
1869 else
9fb32434 1870 {
fefa31b5 1871 if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1872 {
1873 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1874 BB_COPY_PARTITION (bb, edge_in->dest);
1875 }
1876 else
1877 {
1878 basic_block after = edge_in->dest->prev_bb;
1879 /* If this is post-bb reordering, and the edge crosses a partition
1880 boundary, the new block needs to be inserted in the bb chain
1881 at the end of the src partition (since we put the new bb into
1882 that partition, see below). Otherwise we may end up creating
1883 an extra partition crossing in the chain, which is illegal.
1884 It can't go after the src, because src may have a fall-through
1885 to a different block. */
1886 if (crtl->bb_reorder_complete
1887 && (edge_in->flags & EDGE_CROSSING))
1888 {
1889 after = last_bb_in_partition (edge_in->src);
b3a3ae09 1890 before = get_last_bb_insn (after);
3371a64f
TJ
1891 /* The instruction following the last bb in partition should
1892 be a barrier, since it cannot end in a fall-through. */
1893 gcc_checking_assert (BARRIER_P (before));
1894 before = NEXT_INSN (before);
1895 }
1896 bb = create_basic_block (before, NULL, after);
1897 /* Put the split bb into the src partition, to avoid creating
1898 a situation where a cold bb dominates a hot bb, in the case
1899 where src is cold and dest is hot. The src will dominate
1900 the new bb (whereas it might not have dominated dest). */
1901 BB_COPY_PARTITION (bb, edge_in->src);
1902 }
9fb32434 1903 }
ca6c03ca 1904
4977bab6 1905 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
ca6c03ca 1906
3371a64f
TJ
1907 /* Can't allow a region crossing edge to be fallthrough. */
1908 if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
fefa31b5 1909 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3371a64f
TJ
1910 {
1911 new_bb = force_nonfallthru (single_succ_edge (bb));
1912 gcc_assert (!new_bb);
1913 }
1914
4d6922ee 1915 /* For non-fallthru edges, we must adjust the predecessor's
ca6c03ca
JH
1916 jump instruction to target our new block. */
1917 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1918 {
341c100f
NS
1919 edge redirected = redirect_edge_and_branch (edge_in, bb);
1920 gcc_assert (redirected);
ca6c03ca
JH
1921 }
1922 else
3b5fda81 1923 {
fefa31b5 1924 if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
3b5fda81
JJ
1925 {
1926 /* For asm goto even splitting of fallthru edge might
1927 need insn patching, as other labels might point to the
1928 old label. */
8879d71c 1929 rtx_insn *last = BB_END (edge_in->src);
3b5fda81
JJ
1930 if (last
1931 && JUMP_P (last)
fefa31b5 1932 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
81e63b63
SB
1933 && (extract_asm_operands (PATTERN (last))
1934 || JUMP_LABEL (last) == before)
3b5fda81
JJ
1935 && patch_jump_insn (last, before, bb))
1936 df_set_bb_dirty (edge_in->src);
1937 }
1938 redirect_edge_succ (edge_in, bb);
1939 }
ca6c03ca
JH
1940
1941 return bb;
1942}
1943
1944/* Queue instructions for insertion on an edge between two basic blocks.
1945 The new instructions and basic blocks (if any) will not appear in the
1946 CFG until commit_edge_insertions is called. */
1947
1948void
d329e058 1949insert_insn_on_edge (rtx pattern, edge e)
ca6c03ca
JH
1950{
1951 /* We cannot insert instructions on an abnormal critical edge.
1952 It will be easier to find the culprit if we die now. */
341c100f 1953 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
ca6c03ca 1954
6de9cd9a 1955 if (e->insns.r == NULL_RTX)
ca6c03ca
JH
1956 start_sequence ();
1957 else
6de9cd9a 1958 push_to_sequence (e->insns.r);
ca6c03ca
JH
1959
1960 emit_insn (pattern);
1961
6de9cd9a 1962 e->insns.r = get_insns ();
ca6c03ca
JH
1963 end_sequence ();
1964}
1965
1966/* Update the CFG for the instructions queued on edge E. */
1967
4e3825db 1968void
2ac66157 1969commit_one_edge_insertion (edge e)
ca6c03ca 1970{
8879d71c 1971 rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
898c90c0 1972 basic_block bb;
ca6c03ca
JH
1973
1974 /* Pull the insns off the edge now since the edge might go away. */
3ffa95c2
DM
1975 insns = e->insns.r;
1976 e->insns.r = NULL;
ca6c03ca 1977
898c90c0
EB
1978 /* Figure out where to put these insns. If the destination has
1979 one predecessor, insert there. Except for the exit block. */
fefa31b5 1980 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
ca6c03ca 1981 {
898c90c0
EB
1982 bb = e->dest;
1983
1984 /* Get the location correct wrt a code label, and "nice" wrt
1985 a basic block note, and before everything else. */
1986 tmp = BB_HEAD (bb);
1987 if (LABEL_P (tmp))
1988 tmp = NEXT_INSN (tmp);
1989 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1990 tmp = NEXT_INSN (tmp);
1991 if (tmp == BB_HEAD (bb))
1992 before = tmp;
1993 else if (tmp)
1994 after = PREV_INSN (tmp);
1995 else
1996 after = get_last_insn ();
1997 }
3dec4024 1998
898c90c0 1999 /* If the source has one successor and the edge is not abnormal,
a1d50386
JJ
2000 insert there. Except for the entry block.
2001 Don't do this if the predecessor ends in a jump other than
2002 unconditional simple jump. E.g. for asm goto that points all
2003 its labels at the fallthru basic block, we can't insert instructions
2004 before the asm goto, as the asm goto can have various of side effects,
2005 and can't emit instructions after the asm goto, as it must end
2006 the basic block. */
898c90c0
EB
2007 else if ((e->flags & EDGE_ABNORMAL) == 0
2008 && single_succ_p (e->src)
fefa31b5 2009 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
a1d50386
JJ
2010 && (!JUMP_P (BB_END (e->src))
2011 || simplejump_p (BB_END (e->src))))
898c90c0
EB
2012 {
2013 bb = e->src;
3dec4024 2014
898c90c0
EB
2015 /* It is possible to have a non-simple jump here. Consider a target
2016 where some forms of unconditional jumps clobber a register. This
2017 happens on the fr30 for example.
ca6c03ca 2018
898c90c0
EB
2019 We know this block has a single successor, so we can just emit
2020 the queued insns before the jump. */
2021 if (JUMP_P (BB_END (bb)))
2022 before = BB_END (bb);
3dec4024
JH
2023 else
2024 {
898c90c0
EB
2025 /* We'd better be fallthru, or we've lost track of what's what. */
2026 gcc_assert (e->flags & EDGE_FALLTHRU);
750054a2 2027
898c90c0 2028 after = BB_END (bb);
ca6c03ca
JH
2029 }
2030 }
2031
898c90c0
EB
2032 /* Otherwise we must split the edge. */
2033 else
2034 {
2035 bb = split_edge (e);
3371a64f
TJ
2036
2037 /* If E crossed a partition boundary, we needed to make bb end in
2038 a region-crossing jump, even though it was originally fallthru. */
2039 if (JUMP_P (BB_END (bb)))
2040 before = BB_END (bb);
2041 else
2042 after = BB_END (bb);
898c90c0
EB
2043 }
2044
2045 /* Now that we've found the spot, do the insertion. */
ca6c03ca
JH
2046 if (before)
2047 {
6fb5fa3c 2048 emit_insn_before_noloc (insns, before, bb);
ca6c03ca
JH
2049 last = prev_nonnote_insn (before);
2050 }
2051 else
6fb5fa3c 2052 last = emit_insn_after_noloc (insns, after, bb);
ca6c03ca
JH
2053
2054 if (returnjump_p (last))
2055 {
2056 /* ??? Remove all outgoing edges from BB and add one for EXIT.
c22cacf3
MS
2057 This is not currently a problem because this only happens
2058 for the (single) epilogue, which already has a fallthru edge
2059 to EXIT. */
ca6c03ca 2060
c5cbcccf 2061 e = single_succ_edge (bb);
fefa31b5 2062 gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
c5cbcccf 2063 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
ca6c03ca 2064
5f0d2358 2065 e->flags &= ~EDGE_FALLTHRU;
ca6c03ca 2066 emit_barrier_after (last);
0b17ab2f 2067
ca6c03ca
JH
2068 if (before)
2069 delete_insn (before);
2070 }
341c100f
NS
2071 else
2072 gcc_assert (!JUMP_P (last));
ca6c03ca
JH
2073}
2074
2075/* Update the CFG for all queued instructions. */
2076
2077void
d329e058 2078commit_edge_insertions (void)
ca6c03ca 2079{
ca6c03ca
JH
2080 basic_block bb;
2081
600b5b1d
TJ
2082 /* Optimization passes that invoke this routine can cause hot blocks
2083 previously reached by both hot and cold blocks to become dominated only
2084 by cold blocks. This will cause the verification below to fail,
2085 and lead to now cold code in the hot section. In some cases this
2086 may only be visible after newly unreachable blocks are deleted,
2087 which will be done by fixup_partitions. */
2088 fixup_partitions ();
2089
b2b29377 2090 checking_verify_flow_info ();
ca6c03ca 2091
fefa31b5
DM
2092 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2093 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
ca6c03ca 2094 {
628f6a4e
BE
2095 edge e;
2096 edge_iterator ei;
ca6c03ca 2097
628f6a4e
BE
2098 FOR_EACH_EDGE (e, ei, bb->succs)
2099 if (e->insns.r)
cf103ca4 2100 commit_one_edge_insertion (e);
3dec4024 2101 }
ca6c03ca
JH
2102}
2103\f
6fb5fa3c 2104
f470c378 2105/* Print out RTL-specific basic block information (live information
a315c44c
SB
2106 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2107 documented in dumpfile.h. */
ca6c03ca 2108
10e9fecc 2109static void
1a817418 2110rtl_dump_bb (FILE *outf, basic_block bb, int indent, dump_flags_t flags)
ca6c03ca 2111{
8879d71c
DM
2112 rtx_insn *insn;
2113 rtx_insn *last;
f470c378 2114 char *s_indent;
ca6c03ca 2115
ae50c0cb 2116 s_indent = (char *) alloca ((size_t) indent + 1);
400e39e3 2117 memset (s_indent, ' ', (size_t) indent);
f470c378 2118 s_indent[indent] = '\0';
b8698a0f 2119
a315c44c 2120 if (df && (flags & TDF_DETAILS))
6fb5fa3c
DB
2121 {
2122 df_dump_top (bb, outf);
2123 putc ('\n', outf);
2124 }
ca6c03ca 2125
68cc3174
EB
2126 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2127 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
2128 insn = NEXT_INSN (insn))
a315c44c 2129 {
7b19209f
SB
2130 if (flags & TDF_DETAILS)
2131 df_dump_insn_top (insn, outf);
a315c44c
SB
2132 if (! (flags & TDF_SLIM))
2133 print_rtl_single (outf, insn);
2134 else
2135 dump_insn_slim (outf, insn);
7b19209f
SB
2136 if (flags & TDF_DETAILS)
2137 df_dump_insn_bottom (insn, outf);
a315c44c
SB
2138 }
2139
2140 if (df && (flags & TDF_DETAILS))
6fb5fa3c
DB
2141 {
2142 df_dump_bottom (bb, outf);
2143 putc ('\n', outf);
2144 }
2145
ca6c03ca
JH
2146}
2147\f
c4669594
SB
2148/* Like dump_function_to_file, but for RTL. Print out dataflow information
2149 for the start of each basic block. FLAGS are the TDF_* masks documented
2150 in dumpfile.h. */
ca6c03ca
JH
2151
2152void
1a817418 2153print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, dump_flags_t flags)
ca6c03ca 2154{
b2908ba6 2155 const rtx_insn *tmp_rtx;
ca6c03ca
JH
2156 if (rtx_first == 0)
2157 fprintf (outf, "(nil)\n");
2158 else
2159 {
ca6c03ca
JH
2160 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2161 int max_uid = get_max_uid ();
5ed6ace5
MD
2162 basic_block *start = XCNEWVEC (basic_block, max_uid);
2163 basic_block *end = XCNEWVEC (basic_block, max_uid);
2164 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
e0082a72
ZD
2165 basic_block bb;
2166
c4669594
SB
2167 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2168 insns, but the CFG is not maintained so the basic block info
2169 is not reliable. Therefore it's omitted from the dumps. */
2170 if (! (cfun->curr_properties & PROP_cfg))
2171 flags &= ~TDF_BLOCKS;
2172
6fb5fa3c
DB
2173 if (df)
2174 df_dump_start (outf);
2175
c4669594 2176 if (flags & TDF_BLOCKS)
ca6c03ca 2177 {
4f42035e 2178 FOR_EACH_BB_REVERSE_FN (bb, cfun)
ca6c03ca 2179 {
8879d71c 2180 rtx_insn *x;
c4669594
SB
2181
2182 start[INSN_UID (BB_HEAD (bb))] = bb;
2183 end[INSN_UID (BB_END (bb))] = bb;
2184 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2185 {
2186 enum bb_state state = IN_MULTIPLE_BB;
5f0d2358 2187
c4669594
SB
2188 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2189 state = IN_ONE_BB;
2190 in_bb_p[INSN_UID (x)] = state;
ca6c03ca 2191
c4669594
SB
2192 if (x == BB_END (bb))
2193 break;
2194 }
ca6c03ca
JH
2195 }
2196 }
2197
2198 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
2199 {
c4669594
SB
2200 if (flags & TDF_BLOCKS)
2201 {
2202 bb = start[INSN_UID (tmp_rtx)];
2203 if (bb != NULL)
2204 {
9d9573d5 2205 dump_bb_info (outf, bb, 0, dump_flags, true, false);
c4669594
SB
2206 if (df && (flags & TDF_DETAILS))
2207 df_dump_top (bb, outf);
2208 }
ca6c03ca 2209
c4669594
SB
2210 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2211 && !NOTE_P (tmp_rtx)
2212 && !BARRIER_P (tmp_rtx))
2213 fprintf (outf, ";; Insn is not within a basic block\n");
2214 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2215 fprintf (outf, ";; Insn is in multiple basic blocks\n");
2216 }
ca6c03ca 2217
7b19209f
SB
2218 if (flags & TDF_DETAILS)
2219 df_dump_insn_top (tmp_rtx, outf);
a315c44c
SB
2220 if (! (flags & TDF_SLIM))
2221 print_rtl_single (outf, tmp_rtx);
2222 else
2223 dump_insn_slim (outf, tmp_rtx);
7b19209f
SB
2224 if (flags & TDF_DETAILS)
2225 df_dump_insn_bottom (tmp_rtx, outf);
ca6c03ca 2226
c4669594
SB
2227 if (flags & TDF_BLOCKS)
2228 {
2229 bb = end[INSN_UID (tmp_rtx)];
2230 if (bb != NULL)
2231 {
9d9573d5 2232 dump_bb_info (outf, bb, 0, dump_flags, false, true);
c4669594
SB
2233 if (df && (flags & TDF_DETAILS))
2234 df_dump_bottom (bb, outf);
1b03a96d 2235 putc ('\n', outf);
c4669594
SB
2236 }
2237 }
ca6c03ca
JH
2238 }
2239
2240 free (start);
2241 free (end);
2242 free (in_bb_p);
2243 }
ca6c03ca
JH
2244}
2245\f
532aafad
SB
2246/* Update the branch probability of BB if a REG_BR_PROB is present. */
2247
b446e5a2 2248void
d329e058 2249update_br_prob_note (basic_block bb)
b446e5a2
JH
2250{
2251 rtx note;
357067f2 2252 if (!JUMP_P (BB_END (bb)) || !BRANCH_EDGE (bb)->probability.initialized_p ())
b446e5a2 2253 return;
a813c111 2254 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
357067f2 2255 if (!note
5fa396ad 2256 || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ())
b446e5a2 2257 return;
5fa396ad 2258 XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ();
b446e5a2 2259}
96370780
MK
2260
2261/* Get the last insn associated with block BB (that includes barriers and
2262 tablejumps after BB). */
636eb204 2263rtx_insn *
96370780
MK
2264get_last_bb_insn (basic_block bb)
2265{
8942ee0f 2266 rtx_jump_table_data *table;
636eb204
DM
2267 rtx_insn *tmp;
2268 rtx_insn *end = BB_END (bb);
96370780
MK
2269
2270 /* Include any jump table following the basic block. */
8942ee0f
DM
2271 if (tablejump_p (end, NULL, &table))
2272 end = table;
96370780
MK
2273
2274 /* Include any barriers that may follow the basic block. */
1e211590 2275 tmp = next_nonnote_insn_bb (end);
96370780
MK
2276 while (tmp && BARRIER_P (tmp))
2277 {
2278 end = tmp;
1e211590 2279 tmp = next_nonnote_insn_bb (end);
96370780
MK
2280 }
2281
2282 return end;
2283}
af205f67 2284
d9af4fea
JH
2285/* Add all BBs reachable from entry via hot paths into the SET. */
2286
2287void
2288find_bbs_reachable_by_hot_paths (hash_set<basic_block> *set)
2289{
2290 auto_vec<basic_block, 64> worklist;
2291
2292 set->add (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2293 worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2294
2295 while (worklist.length () > 0)
2296 {
2297 basic_block bb = worklist.pop ();
2298 edge_iterator ei;
2299 edge e;
2300
2301 FOR_EACH_EDGE (e, ei, bb->succs)
2302 if (BB_PARTITION (e->dest) != BB_COLD_PARTITION
2303 && !set->add (e->dest))
2304 worklist.safe_push (e->dest);
2305 }
2306}
2307
600b5b1d
TJ
2308/* Sanity check partition hotness to ensure that basic blocks in
2309   the cold partition don't dominate basic blocks in the hot partition.
2310 If FLAG_ONLY is true, report violations as errors. Otherwise
2311 re-mark the dominated blocks as cold, since this is run after
2312 cfg optimizations that may make hot blocks previously reached
2313 by both hot and cold blocks now only reachable along cold paths. */
2314
2315static vec<basic_block>
2316find_partition_fixes (bool flag_only)
2317{
2318 basic_block bb;
2319 vec<basic_block> bbs_in_cold_partition = vNULL;
2320 vec<basic_block> bbs_to_fix = vNULL;
d9af4fea 2321 hash_set<basic_block> set;
600b5b1d
TJ
2322
2323 /* Callers check this. */
2324 gcc_checking_assert (crtl->has_bb_partition);
2325
d9af4fea 2326 find_bbs_reachable_by_hot_paths (&set);
600b5b1d 2327
d9af4fea
JH
2328 FOR_EACH_BB_FN (bb, cfun)
2329 if (!set.contains (bb)
2330 && BB_PARTITION (bb) != BB_COLD_PARTITION)
2331 {
2332 if (flag_only)
2333 error ("non-cold basic block %d reachable only "
2334 "by paths crossing the cold partition", bb->index);
2335 else
2336 BB_SET_PARTITION (bb, BB_COLD_PARTITION);
2337 bbs_to_fix.safe_push (bb);
2338 bbs_in_cold_partition.safe_push (bb);
2339 }
600b5b1d
TJ
2340
2341 return bbs_to_fix;
2342}
2343
2344/* Perform cleanup on the hot/cold bb partitioning after optimization
2345 passes that modify the cfg. */
2346
2347void
2348fixup_partitions (void)
2349{
2350 basic_block bb;
2351
2352 if (!crtl->has_bb_partition)
2353 return;
2354
2355 /* Delete any blocks that became unreachable and weren't
2356 already cleaned up, for example during edge forwarding
2357 and convert_jumps_to_returns. This will expose more
2358 opportunities for fixing the partition boundaries here.
2359 Also, the calculation of the dominance graph during verification
2360 will assert if there are unreachable nodes. */
2361 delete_unreachable_blocks ();
2362
2363 /* If there are partitions, do a sanity check on them: A basic block in
2364   a cold partition cannot dominate a basic block in a hot partition.
2365 Fixup any that now violate this requirement, as a result of edge
2366 forwarding and unreachable block deletion.  */
2367 vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2368
2369 /* Do the partition fixup after all necessary blocks have been converted to
2370 cold, so that we only update the region crossings the minimum number of
2371 places, which can require forcing edges to be non fallthru. */
2372 while (! bbs_to_fix.is_empty ())
2373 {
2374 bb = bbs_to_fix.pop ();
2375 fixup_new_cold_bb (bb);
2376 }
2377}
2378
af205f67
TJ
2379/* Verify, in the basic block chain, that there is at most one switch
2380 between hot/cold partitions. This condition will not be true until
2381 after reorder_basic_blocks is called. */
2382
251a41b9 2383static int
af205f67
TJ
2384verify_hot_cold_block_grouping (void)
2385{
2386 basic_block bb;
2387 int err = 0;
2388 bool switched_sections = false;
2389 int current_partition = BB_UNPARTITIONED;
2390
3371a64f
TJ
2391 /* Even after bb reordering is complete, we go into cfglayout mode
2392 again (in compgoto). Ensure we don't call this before going back
2393 into linearized RTL when any layout fixes would have been committed. */
2394 if (!crtl->bb_reorder_complete
c3284718 2395 || current_ir_type () != IR_RTL_CFGRTL)
251a41b9 2396 return err;
af205f67 2397
11cd3bed 2398 FOR_EACH_BB_FN (bb, cfun)
af205f67
TJ
2399 {
2400 if (current_partition != BB_UNPARTITIONED
2401 && BB_PARTITION (bb) != current_partition)
2402 {
2403 if (switched_sections)
2404 {
2405 error ("multiple hot/cold transitions found (bb %i)",
2406 bb->index);
2407 err = 1;
2408 }
2409 else
2410 switched_sections = true;
2411
2412 if (!crtl->has_bb_partition)
2413 error ("partition found but function partition flag not set");
2414 }
2415 current_partition = BB_PARTITION (bb);
2416 }
2417
251a41b9 2418 return err;
af205f67 2419}
b446e5a2 2420\f
ca6c03ca 2421
251a41b9
TJ
2422/* Perform several checks on the edges out of each block, such as
2423 the consistency of the branch probabilities, the correctness
2424 of hot/cold partition crossing edges, and the number of expected
600b5b1d
TJ
2425 successor edges. Also verify that the dominance relationship
2426 between hot/cold blocks is sane. */
f470c378 2427
10e9fecc 2428static int
251a41b9 2429rtl_verify_edges (void)
ca6c03ca 2430{
10e9fecc 2431 int err = 0;
94eb5ddb 2432 basic_block bb;
ca6c03ca 2433
4f42035e 2434 FOR_EACH_BB_REVERSE_FN (bb, cfun)
ca6c03ca 2435 {
f99ffaa3
EB
2436 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2437 int n_eh = 0, n_abnormal = 0;
3cf54412 2438 edge e, fallthru = NULL;
628f6a4e 2439 edge_iterator ei;
251a41b9 2440 rtx note;
3371a64f 2441 bool has_crossing_edge = false;
ca6c03ca 2442
2085a21f 2443 if (JUMP_P (BB_END (bb))
a813c111 2444 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
628f6a4e 2445 && EDGE_COUNT (bb->succs) >= 2
a813c111 2446 && any_condjump_p (BB_END (bb)))
5a1a3e5e 2447 {
357067f2
JH
2448 if (!BRANCH_EDGE (bb)->probability.initialized_p ())
2449 {
a4c57f86 2450 if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
357067f2 2451 {
a4c57f86
JH
2452 error ("verify_flow_info: "
2453 "REG_BR_PROB is set but cfg probability is not");
2454 err = 1;
357067f2
JH
2455 }
2456 }
2457 else if (XINT (note, 0)
5fa396ad 2458 != BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ()
357067f2 2459 && profile_status_for_fn (cfun) != PROFILE_ABSENT)
5a1a3e5e 2460 {
e5af9ddd 2461 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
357067f2 2462 XINT (note, 0),
5fa396ad 2463 BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ());
5a1a3e5e
JH
2464 err = 1;
2465 }
2466 }
251a41b9 2467
628f6a4e 2468 FOR_EACH_EDGE (e, ei, bb->succs)
ca6c03ca 2469 {
0be7e7a6
RH
2470 bool is_crossing;
2471
ca6c03ca 2472 if (e->flags & EDGE_FALLTHRU)
0be7e7a6
RH
2473 n_fallthru++, fallthru = e;
2474
2475 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
fefa31b5
DM
2476 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2477 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
3371a64f 2478 has_crossing_edge |= is_crossing;
0be7e7a6 2479 if (e->flags & EDGE_CROSSING)
750054a2 2480 {
0be7e7a6
RH
2481 if (!is_crossing)
2482 {
2483 error ("EDGE_CROSSING incorrectly set across same section");
2484 err = 1;
2485 }
2486 if (e->flags & EDGE_FALLTHRU)
2487 {
f99ffaa3 2488 error ("fallthru edge crosses section boundary in bb %i",
750054a2
CT
2489 e->src->index);
2490 err = 1;
2491 }
0be7e7a6
RH
2492 if (e->flags & EDGE_EH)
2493 {
f99ffaa3 2494 error ("EH edge crosses section boundary in bb %i",
0be7e7a6
RH
2495 e->src->index);
2496 err = 1;
2497 }
339ba33b 2498 if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
3371a64f
TJ
2499 {
2500 error ("No region crossing jump at section boundary in bb %i",
2501 bb->index);
2502 err = 1;
2503 }
0be7e7a6
RH
2504 }
2505 else if (is_crossing)
2506 {
2507 error ("EDGE_CROSSING missing across section boundary");
2508 err = 1;
750054a2 2509 }
3dec4024 2510
65f43cdf
ZD
2511 if ((e->flags & ~(EDGE_DFS_BACK
2512 | EDGE_CAN_FALLTHRU
2513 | EDGE_IRREDUCIBLE_LOOP
9beb1c84 2514 | EDGE_LOOP_EXIT
7e872b90
JJ
2515 | EDGE_CROSSING
2516 | EDGE_PRESERVE)) == 0)
3dec4024
JH
2517 n_branch++;
2518
2519 if (e->flags & EDGE_ABNORMAL_CALL)
f99ffaa3
EB
2520 n_abnormal_call++;
2521
2522 if (e->flags & EDGE_SIBCALL)
2523 n_sibcall++;
3dec4024
JH
2524
2525 if (e->flags & EDGE_EH)
2526 n_eh++;
f99ffaa3
EB
2527
2528 if (e->flags & EDGE_ABNORMAL)
3dec4024 2529 n_abnormal++;
ca6c03ca 2530 }
5f0d2358 2531
3371a64f 2532 if (!has_crossing_edge
339ba33b
RS
2533 && JUMP_P (BB_END (bb))
2534 && CROSSING_JUMP_P (BB_END (bb)))
3371a64f 2535 {
8264c84d 2536 print_rtl_with_bb (stderr, get_insns (), TDF_BLOCKS | TDF_DETAILS);
3371a64f
TJ
2537 error ("Region crossing jump across same section in bb %i",
2538 bb->index);
2539 err = 1;
2540 }
2541
1d65f45c 2542 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
3dec4024 2543 {
f99ffaa3 2544 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
3dec4024
JH
2545 err = 1;
2546 }
1d65f45c
RH
2547 if (n_eh > 1)
2548 {
f99ffaa3 2549 error ("too many exception handling edges in bb %i", bb->index);
1d65f45c
RH
2550 err = 1;
2551 }
3dec4024 2552 if (n_branch
4b4bf941 2553 && (!JUMP_P (BB_END (bb))
a813c111
SB
2554 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2555 || any_condjump_p (BB_END (bb))))))
3dec4024 2556 {
ab532386 2557 error ("too many outgoing branch edges from bb %i", bb->index);
3dec4024
JH
2558 err = 1;
2559 }
a813c111 2560 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
3dec4024 2561 {
f99ffaa3 2562 error ("fallthru edge after unconditional jump in bb %i", bb->index);
3dec4024
JH
2563 err = 1;
2564 }
a813c111 2565 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
3dec4024 2566 {
f99ffaa3
EB
2567 error ("wrong number of branch edges after unconditional jump"
2568 " in bb %i", bb->index);
3dec4024
JH
2569 err = 1;
2570 }
a813c111 2571 if (n_branch != 1 && any_condjump_p (BB_END (bb))
c11fd0b2 2572 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
3dec4024 2573 {
f99ffaa3
EB
2574 error ("wrong amount of branch edges after conditional jump"
2575 " in bb %i", bb->index);
2576 err = 1;
2577 }
2578 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2579 {
2580 error ("abnormal call edges for non-call insn in bb %i", bb->index);
3dec4024
JH
2581 err = 1;
2582 }
f99ffaa3 2583 if (n_sibcall && !CALL_P (BB_END (bb)))
3dec4024 2584 {
f99ffaa3 2585 error ("sibcall edges for non-call insn in bb %i", bb->index);
3dec4024
JH
2586 err = 1;
2587 }
f99ffaa3
EB
2588 if (n_abnormal > n_eh
2589 && !(CALL_P (BB_END (bb))
2590 && n_abnormal == n_abnormal_call + n_sibcall)
4b4bf941 2591 && (!JUMP_P (BB_END (bb))
a813c111
SB
2592 || any_condjump_p (BB_END (bb))
2593 || any_uncondjump_p (BB_END (bb))))
3dec4024 2594 {
ab532386 2595 error ("abnormal edges for no purpose in bb %i", bb->index);
3dec4024
JH
2596 err = 1;
2597 }
251a41b9 2598 }
f87c27b4 2599
600b5b1d
TJ
2600 /* If there are partitions, do a sanity check on them: A basic block in
2601   a cold partition cannot dominate a basic block in a hot partition.  */
2602 if (crtl->has_bb_partition && !err)
2603 {
2604 vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2605 err = !bbs_to_fix.is_empty ();
2606 }
2607
251a41b9
TJ
2608 /* Clean up. */
2609 return err;
2610}
5f0d2358 2611
251a41b9
TJ
2612/* Checks on the instructions within blocks. Currently checks that each
2613 block starts with a basic block note, and that basic block notes and
2614 control flow jumps are not found in the middle of the block. */
ca6c03ca 2615
251a41b9
TJ
2616static int
2617rtl_verify_bb_insns (void)
2618{
8879d71c 2619 rtx_insn *x;
251a41b9
TJ
2620 int err = 0;
2621 basic_block bb;
2622
4f42035e 2623 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9
TJ
2624 {
2625 /* Now check the header of basic
c22cacf3 2626 block. It ought to contain optional CODE_LABEL followed
ca6c03ca 2627 by NOTE_BASIC_BLOCK. */
a813c111 2628 x = BB_HEAD (bb);
4b4bf941 2629 if (LABEL_P (x))
ca6c03ca 2630 {
a813c111 2631 if (BB_END (bb) == x)
ca6c03ca
JH
2632 {
2633 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 2634 bb->index);
ca6c03ca
JH
2635 err = 1;
2636 }
5f0d2358 2637
ca6c03ca
JH
2638 x = NEXT_INSN (x);
2639 }
5f0d2358 2640
ca6c03ca
JH
2641 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2642 {
2643 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
0b17ab2f 2644 bb->index);
ca6c03ca
JH
2645 err = 1;
2646 }
2647
a813c111 2648 if (BB_END (bb) == x)
49243025 2649 /* Do checks for empty blocks here. */
5f0d2358 2650 ;
ca6c03ca 2651 else
5f0d2358
RK
2652 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2653 {
2654 if (NOTE_INSN_BASIC_BLOCK_P (x))
2655 {
2656 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
0b17ab2f 2657 INSN_UID (x), bb->index);
5f0d2358
RK
2658 err = 1;
2659 }
2660
a813c111 2661 if (x == BB_END (bb))
5f0d2358 2662 break;
ca6c03ca 2663
83fd323c 2664 if (control_flow_insn_p (x))
5f0d2358 2665 {
0b17ab2f 2666 error ("in basic block %d:", bb->index);
5f0d2358
RK
2667 fatal_insn ("flow control insn inside a basic block", x);
2668 }
2669 }
ca6c03ca
JH
2670 }
2671
251a41b9
TJ
2672 /* Clean up. */
2673 return err;
2674}
2675
2676/* Verify that block pointers for instructions in basic blocks, headers and
2677 footers are set appropriately. */
2678
2679static int
2680rtl_verify_bb_pointers (void)
2681{
2682 int err = 0;
2683 basic_block bb;
2684
2685 /* Check the general integrity of the basic blocks. */
4f42035e 2686 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9 2687 {
8879d71c 2688 rtx_insn *insn;
251a41b9
TJ
2689
2690 if (!(bb->flags & BB_RTL))
2691 {
2692 error ("BB_RTL flag not set for block %d", bb->index);
2693 err = 1;
2694 }
2695
2696 FOR_BB_INSNS (bb, insn)
2697 if (BLOCK_FOR_INSN (insn) != bb)
2698 {
2699 error ("insn %d basic block pointer is %d, should be %d",
2700 INSN_UID (insn),
2701 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2702 bb->index);
2703 err = 1;
2704 }
2705
2706 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2707 if (!BARRIER_P (insn)
2708 && BLOCK_FOR_INSN (insn) != NULL)
2709 {
2710 error ("insn %d in header of bb %d has non-NULL basic block",
2711 INSN_UID (insn), bb->index);
2712 err = 1;
2713 }
2714 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2715 if (!BARRIER_P (insn)
2716 && BLOCK_FOR_INSN (insn) != NULL)
2717 {
2718 error ("insn %d in footer of bb %d has non-NULL basic block",
2719 INSN_UID (insn), bb->index);
2720 err = 1;
2721 }
2722 }
af205f67 2723
10e9fecc 2724 /* Clean up. */
10e9fecc
JH
2725 return err;
2726}
5f0d2358 2727
10e9fecc
JH
2728/* Verify the CFG and RTL consistency common for both underlying RTL and
2729 cfglayout RTL.
5f0d2358 2730
10e9fecc 2731 Currently it does following checks:
251a41b9
TJ
2732
2733 - overlapping of basic blocks
2734 - insns with wrong BLOCK_FOR_INSN pointers
2735 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2736 - tails of basic blocks (ensure that boundary is necessary)
2737 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2738 and NOTE_INSN_BASIC_BLOCK
2739 - verify that no fall_thru edge crosses hot/cold partition boundaries
2740 - verify that there are no pending RTL branch predictions
600b5b1d 2741 - verify that hot blocks are not dominated by cold blocks
251a41b9
TJ
2742
2743 In future it can be extended check a lot of other stuff as well
2744 (reachability of basic blocks, life information, etc. etc.). */
9eab6785 2745
10e9fecc 2746static int
251a41b9
TJ
2747rtl_verify_flow_info_1 (void)
2748{
2749 int err = 0;
2750
2751 err |= rtl_verify_bb_pointers ();
2752
2753 err |= rtl_verify_bb_insns ();
2754
2755 err |= rtl_verify_edges ();
2756
251a41b9
TJ
2757 return err;
2758}
2759
2760/* Walk the instruction chain and verify that bb head/end pointers
2761 are correct, and that instructions are in exactly one bb and have
2762 correct block pointers. */
2763
2764static int
2765rtl_verify_bb_insn_chain (void)
10e9fecc
JH
2766{
2767 basic_block bb;
251a41b9 2768 int err = 0;
8879d71c
DM
2769 rtx_insn *x;
2770 rtx_insn *last_head = get_last_insn ();
9eab6785 2771 basic_block *bb_info;
9eab6785
SB
2772 const int max_uid = get_max_uid ();
2773
2774 bb_info = XCNEWVEC (basic_block, max_uid);
ca6c03ca 2775
4f42035e 2776 FOR_EACH_BB_REVERSE_FN (bb, cfun)
10e9fecc 2777 {
8879d71c
DM
2778 rtx_insn *head = BB_HEAD (bb);
2779 rtx_insn *end = BB_END (bb);
628f6a4e 2780
9eab6785 2781 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
a7b87f73
ZD
2782 {
2783 /* Verify the end of the basic block is in the INSN chain. */
2784 if (x == end)
2785 break;
2786
251a41b9
TJ
2787 /* And that the code outside of basic blocks has NULL bb field. */
2788 if (!BARRIER_P (x)
2789 && BLOCK_FOR_INSN (x) != NULL)
2790 {
2791 error ("insn %d outside of basic blocks has non-NULL bb field",
2792 INSN_UID (x));
2793 err = 1;
2794 }
a7b87f73 2795 }
9eab6785
SB
2796
2797 if (!x)
2798 {
2799 error ("end insn %d for block %d not found in the insn stream",
2800 INSN_UID (end), bb->index);
2801 err = 1;
2802 }
2803
2804 /* Work backwards from the end to the head of the basic block
2805 to verify the head is in the RTL chain. */
2806 for (; x != NULL_RTX; x = PREV_INSN (x))
a00d11f0 2807 {
9eab6785
SB
2808 /* While walking over the insn chain, verify insns appear
2809 in only one basic block. */
2810 if (bb_info[INSN_UID (x)] != NULL)
2811 {
2812 error ("insn %d is in multiple basic blocks (%d and %d)",
2813 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2814 err = 1;
2815 }
2816
2817 bb_info[INSN_UID (x)] = bb;
2818
2819 if (x == head)
2820 break;
2821 }
2822 if (!x)
2823 {
2824 error ("head insn %d for block %d not found in the insn stream",
2825 INSN_UID (head), bb->index);
a00d11f0
JH
2826 err = 1;
2827 }
2828
a7b87f73 2829 last_head = PREV_INSN (x);
251a41b9
TJ
2830 }
2831
2832 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2833 {
2834 /* Check that the code before the first basic block has NULL
2835 bb field. */
2836 if (!BARRIER_P (x)
2837 && BLOCK_FOR_INSN (x) != NULL)
2838 {
2839 error ("insn %d outside of basic blocks has non-NULL bb field",
2840 INSN_UID (x));
2841 err = 1;
2842 }
2843 }
2844 free (bb_info);
2845
2846 return err;
2847}
2848
2849/* Verify that fallthru edges point to adjacent blocks in layout order and
2850 that barriers exist after non-fallthru blocks. */
2851
2852static int
2853rtl_verify_fallthru (void)
2854{
2855 basic_block bb;
2856 int err = 0;
2857
4f42035e 2858 FOR_EACH_BB_REVERSE_FN (bb, cfun)
251a41b9
TJ
2859 {
2860 edge e;
9eab6785 2861
0fd4b31d 2862 e = find_fallthru_edge (bb->succs);
10e9fecc
JH
2863 if (!e)
2864 {
8879d71c 2865 rtx_insn *insn;
10e9fecc
JH
2866
2867 /* Ensure existence of barrier in BB with no fallthru edges. */
468059bc
DD
2868 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2869 {
2870 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
10e9fecc
JH
2871 {
2872 error ("missing barrier after block %i", bb->index);
2873 err = 1;
2874 break;
2875 }
468059bc
DD
2876 if (BARRIER_P (insn))
2877 break;
2878 }
10e9fecc 2879 }
fefa31b5
DM
2880 else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2881 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
c22cacf3 2882 {
8879d71c 2883 rtx_insn *insn;
10e9fecc
JH
2884
2885 if (e->src->next_bb != e->dest)
2886 {
2887 error
2888 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2889 e->src->index, e->dest->index);
2890 err = 1;
2891 }
2892 else
a813c111 2893 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
10e9fecc 2894 insn = NEXT_INSN (insn))
6be85b25 2895 if (BARRIER_P (insn) || INSN_P (insn))
10e9fecc
JH
2896 {
2897 error ("verify_flow_info: Incorrect fallthru %i->%i",
2898 e->src->index, e->dest->index);
2899 fatal_insn ("wrong insn in the fallthru edge", insn);
2900 err = 1;
2901 }
c22cacf3 2902 }
10e9fecc 2903 }
ca6c03ca 2904
251a41b9
TJ
2905 return err;
2906}
2907
2908/* Verify that blocks are laid out in consecutive order. While walking the
2909 instructions, verify that all expected instructions are inside the basic
2910 blocks, and that all returns are followed by barriers. */
2911
2912static int
2913rtl_verify_bb_layout (void)
2914{
2915 basic_block bb;
2916 int err = 0;
8879d71c 2917 rtx_insn *x;
251a41b9 2918 int num_bb_notes;
8879d71c 2919 rtx_insn * const rtx_first = get_insns ();
fefa31b5 2920 basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
9eab6785 2921
ca6c03ca 2922 num_bb_notes = 0;
fefa31b5 2923 last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
e0082a72 2924
5f0d2358 2925 for (x = rtx_first; x; x = NEXT_INSN (x))
ca6c03ca
JH
2926 {
2927 if (NOTE_INSN_BASIC_BLOCK_P (x))
2928 {
bf77398c 2929 bb = NOTE_BASIC_BLOCK (x);
5f0d2358 2930
ca6c03ca 2931 num_bb_notes++;
e0082a72 2932 if (bb != last_bb_seen->next_bb)
10e9fecc 2933 internal_error ("basic blocks not laid down consecutively");
ca6c03ca 2934
10e9fecc 2935 curr_bb = last_bb_seen = bb;
ca6c03ca
JH
2936 }
2937
10e9fecc 2938 if (!curr_bb)
ca6c03ca
JH
2939 {
2940 switch (GET_CODE (x))
2941 {
2942 case BARRIER:
2943 case NOTE:
2944 break;
2945
2946 case CODE_LABEL:
39718607 2947 /* An ADDR_VEC is placed outside any basic block. */
ca6c03ca 2948 if (NEXT_INSN (x)
481683e1 2949 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
5f0d2358 2950 x = NEXT_INSN (x);
ca6c03ca
JH
2951
2952 /* But in any case, non-deletable labels can appear anywhere. */
2953 break;
2954
2955 default:
1f978f5f 2956 fatal_insn ("insn outside basic block", x);
ca6c03ca
JH
2957 }
2958 }
2959
26cae194 2960 if (JUMP_P (x)
ca6c03ca 2961 && returnjump_p (x) && ! condjump_p (x)
15eb3a2e 2962 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
1f978f5f 2963 fatal_insn ("return not followed by barrier", x);
251a41b9 2964
a813c111 2965 if (curr_bb && x == BB_END (curr_bb))
10e9fecc 2966 curr_bb = NULL;
ca6c03ca
JH
2967 }
2968
0cae8d31 2969 if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
ca6c03ca 2970 internal_error
0b17ab2f 2971 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
0cae8d31 2972 num_bb_notes, n_basic_blocks_for_fn (cfun));
ca6c03ca 2973
10e9fecc 2974 return err;
ca6c03ca 2975}
251a41b9
TJ
2976
2977/* Verify the CFG and RTL consistency common for both underlying RTL and
2978 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
2979
2980 Currently it does following checks:
2981 - all checks of rtl_verify_flow_info_1
2982 - test head/end pointers
2983 - check that blocks are laid out in consecutive order
2984 - check that all insns are in the basic blocks
2985 (except the switch handling code, barriers and notes)
2986 - check that all returns are followed by barriers
600b5b1d
TJ
2987 - check that all fallthru edge points to the adjacent blocks
2988 - verify that there is a single hot/cold partition boundary after bbro */
251a41b9
TJ
2989
2990static int
2991rtl_verify_flow_info (void)
2992{
2993 int err = 0;
2994
2995 err |= rtl_verify_flow_info_1 ();
2996
2997 err |= rtl_verify_bb_insn_chain ();
2998
2999 err |= rtl_verify_fallthru ();
3000
3001 err |= rtl_verify_bb_layout ();
3002
3371a64f
TJ
3003 err |= verify_hot_cold_block_grouping ();
3004
251a41b9
TJ
3005 return err;
3006}
ca6c03ca 3007\f
eaec9b3d 3008/* Assume that the preceding pass has possibly eliminated jump instructions
ca6c03ca
JH
3009 or converted the unconditional jumps. Eliminate the edges from CFG.
3010 Return true if any edges are eliminated. */
3011
3012bool
d329e058 3013purge_dead_edges (basic_block bb)
ca6c03ca 3014{
628f6a4e 3015 edge e;
8879d71c
DM
3016 rtx_insn *insn = BB_END (bb);
3017 rtx note;
ca6c03ca 3018 bool purged = false;
628f6a4e
BE
3019 bool found;
3020 edge_iterator ei;
ca6c03ca 3021
b5b8b0ac
AO
3022 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
3023 do
3024 insn = PREV_INSN (insn);
3025 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3026
70da1d03 3027 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
4b4bf941 3028 if (NONJUMP_INSN_P (insn)
70da1d03
JH
3029 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3030 {
3031 rtx eqnote;
3032
3033 if (! may_trap_p (PATTERN (insn))
3034 || ((eqnote = find_reg_equal_equiv_note (insn))
3035 && ! may_trap_p (XEXP (eqnote, 0))))
3036 remove_note (insn, note);
3037 }
3038
546c093e 3039 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
628f6a4e 3040 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
546c093e 3041 {
1d65f45c
RH
3042 bool remove = false;
3043
e5f9a909
JW
3044 /* There are three types of edges we need to handle correctly here: EH
3045 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3046 latter can appear when nonlocal gotos are used. */
1d65f45c 3047 if (e->flags & EDGE_ABNORMAL_CALL)
546c093e 3048 {
1d65f45c
RH
3049 if (!CALL_P (insn))
3050 remove = true;
3051 else if (can_nonlocal_goto (insn))
3052 ;
3053 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3054 ;
0a35513e
AH
3055 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3056 ;
1d65f45c
RH
3057 else
3058 remove = true;
546c093e 3059 }
1d65f45c
RH
3060 else if (e->flags & EDGE_EH)
3061 remove = !can_throw_internal (insn);
3062
3063 if (remove)
546c093e 3064 {
1d65f45c
RH
3065 remove_edge (e);
3066 df_set_bb_dirty (bb);
3067 purged = true;
546c093e
RH
3068 }
3069 else
1d65f45c 3070 ei_next (&ei);
546c093e 3071 }
5f0d2358 3072
4b4bf941 3073 if (JUMP_P (insn))
ca6c03ca
JH
3074 {
3075 rtx note;
3076 edge b,f;
628f6a4e 3077 edge_iterator ei;
5f0d2358 3078
ca6c03ca
JH
3079 /* We do care only about conditional jumps and simplejumps. */
3080 if (!any_condjump_p (insn)
3081 && !returnjump_p (insn)
3082 && !simplejump_p (insn))
c51d95ec 3083 return purged;
5f0d2358 3084
5a1a3e5e
JH
3085 /* Branch probability/prediction notes are defined only for
3086 condjumps. We've possibly turned condjump into simplejump. */
3087 if (simplejump_p (insn))
3088 {
3089 note = find_reg_note (insn, REG_BR_PROB, NULL);
3090 if (note)
3091 remove_note (insn, note);
3092 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3093 remove_note (insn, note);
3094 }
3095
628f6a4e 3096 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 3097 {
7fcd7218
JH
3098 /* Avoid abnormal flags to leak from computed jumps turned
3099 into simplejumps. */
f87c27b4 3100
0e1638d4 3101 e->flags &= ~EDGE_ABNORMAL;
7fcd7218 3102
5a566bed
MM
3103 /* See if this edge is one we should keep. */
3104 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3105 /* A conditional jump can fall through into the next
3106 block, so we should keep the edge. */
628f6a4e
BE
3107 {
3108 ei_next (&ei);
3109 continue;
3110 }
fefa31b5 3111 else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
a813c111 3112 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
5a566bed
MM
3113 /* If the destination block is the target of the jump,
3114 keep the edge. */
628f6a4e
BE
3115 {
3116 ei_next (&ei);
3117 continue;
3118 }
fefa31b5
DM
3119 else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3120 && returnjump_p (insn))
5a566bed
MM
3121 /* If the destination block is the exit block, and this
3122 instruction is a return, then keep the edge. */
628f6a4e
BE
3123 {
3124 ei_next (&ei);
3125 continue;
3126 }
5a566bed
MM
3127 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3128 /* Keep the edges that correspond to exceptions thrown by
0b75beaa
EB
3129 this instruction and rematerialize the EDGE_ABNORMAL
3130 flag we just cleared above. */
3131 {
3132 e->flags |= EDGE_ABNORMAL;
628f6a4e 3133 ei_next (&ei);
0b75beaa
EB
3134 continue;
3135 }
5f0d2358 3136
5a566bed 3137 /* We do not need this edge. */
6fb5fa3c 3138 df_set_bb_dirty (bb);
ca6c03ca
JH
3139 purged = true;
3140 remove_edge (e);
3141 }
5f0d2358 3142
628f6a4e 3143 if (EDGE_COUNT (bb->succs) == 0 || !purged)
c51d95ec 3144 return purged;
5f0d2358 3145
c263766c
RH
3146 if (dump_file)
3147 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
5f0d2358 3148
ca6c03ca
JH
3149 if (!optimize)
3150 return purged;
3151
3152 /* Redistribute probabilities. */
c5cbcccf 3153 if (single_succ_p (bb))
ca6c03ca 3154 {
357067f2 3155 single_succ_edge (bb)->probability = profile_probability::always ();
c5cbcccf 3156 single_succ_edge (bb)->count = bb->count;
f87c27b4 3157 }
ca6c03ca
JH
3158 else
3159 {
3160 note = find_reg_note (insn, REG_BR_PROB, NULL);
3161 if (!note)
3162 return purged;
5f0d2358 3163
ca6c03ca
JH
3164 b = BRANCH_EDGE (bb);
3165 f = FALLTHRU_EDGE (bb);
5fa396ad 3166 b->probability = profile_probability::from_reg_br_prob_note
357067f2 3167 (XINT (note, 0));
5fa396ad 3168 f->probability = b->probability.invert ();
3995f3a2
JH
3169 b->count = bb->count.apply_probability (b->probability);
3170 f->count = bb->count.apply_probability (f->probability);
ca6c03ca 3171 }
5f0d2358 3172
ca6c03ca
JH
3173 return purged;
3174 }
4b4bf941 3175 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
1722c2c8
RH
3176 {
3177 /* First, there should not be any EH or ABCALL edges resulting
3178 from non-local gotos and the like. If there were, we shouldn't
3179 have created the sibcall in the first place. Second, there
3180 should of course never have been a fallthru edge. */
c5cbcccf
ZD
3181 gcc_assert (single_succ_p (bb));
3182 gcc_assert (single_succ_edge (bb)->flags
3183 == (EDGE_SIBCALL | EDGE_ABNORMAL));
1722c2c8
RH
3184
3185 return 0;
3186 }
ca6c03ca 3187
ca6c03ca
JH
3188 /* If we don't see a jump insn, we don't know exactly why the block would
3189 have been broken at this point. Look for a simple, non-fallthru edge,
3190 as these are only created by conditional branches. If we find such an
3191 edge we know that there used to be a jump here and can then safely
3192 remove all non-fallthru edges. */
628f6a4e
BE
3193 found = false;
3194 FOR_EACH_EDGE (e, ei, bb->succs)
3195 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3196 {
3197 found = true;
3198 break;
3199 }
5f0d2358 3200
628f6a4e 3201 if (!found)
ca6c03ca 3202 return purged;
5f0d2358 3203
2afa8dce
DB
3204 /* Remove all but the fake and fallthru edges. The fake edge may be
3205 the only successor for this block in the case of noreturn
3206 calls. */
628f6a4e 3207 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
ca6c03ca 3208 {
2afa8dce 3209 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
c51d95ec 3210 {
6fb5fa3c 3211 df_set_bb_dirty (bb);
c51d95ec
JH
3212 remove_edge (e);
3213 purged = true;
3214 }
628f6a4e
BE
3215 else
3216 ei_next (&ei);
ca6c03ca 3217 }
5f0d2358 3218
c5cbcccf 3219 gcc_assert (single_succ_p (bb));
5f0d2358 3220
357067f2 3221 single_succ_edge (bb)->probability = profile_probability::always ();
c5cbcccf 3222 single_succ_edge (bb)->count = bb->count;
ca6c03ca 3223
c263766c
RH
3224 if (dump_file)
3225 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
0b17ab2f 3226 bb->index);
ca6c03ca
JH
3227 return purged;
3228}
3229
5f0d2358
RK
3230/* Search all basic blocks for potentially dead edges and purge them. Return
3231 true if some edge has been eliminated. */
ca6c03ca
JH
3232
3233bool
25cd19de 3234purge_all_dead_edges (void)
ca6c03ca 3235{
e0082a72 3236 int purged = false;
e0082a72 3237 basic_block bb;
473fb060 3238
11cd3bed 3239 FOR_EACH_BB_FN (bb, cfun)
473fb060 3240 {
e0082a72 3241 bool purged_here = purge_dead_edges (bb);
5f0d2358 3242
473fb060 3243 purged |= purged_here;
473fb060 3244 }
5f0d2358 3245
ca6c03ca
JH
3246 return purged;
3247}
9ee634e3 3248
ba5e9aca
EB
3249/* This is used by a few passes that emit some instructions after abnormal
3250 calls, moving the basic block's end, while they in fact do want to emit
3251 them on the fallthru edge. Look for abnormal call edges, find backward
3252 the call in the block and insert the instructions on the edge instead.
3253
3254 Similarly, handle instructions throwing exceptions internally.
3255
3256 Return true when instructions have been found and inserted on edges. */
3257
3258bool
3259fixup_abnormal_edges (void)
3260{
3261 bool inserted = false;
3262 basic_block bb;
3263
11cd3bed 3264 FOR_EACH_BB_FN (bb, cfun)
ba5e9aca
EB
3265 {
3266 edge e;
3267 edge_iterator ei;
3268
3269 /* Look for cases we are interested in - calls or instructions causing
3270 exceptions. */
3271 FOR_EACH_EDGE (e, ei, bb->succs)
3272 if ((e->flags & EDGE_ABNORMAL_CALL)
3273 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3274 == (EDGE_ABNORMAL | EDGE_EH)))
3275 break;
3276
3277 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3278 {
8879d71c 3279 rtx_insn *insn;
ba5e9aca
EB
3280
3281 /* Get past the new insns generated. Allow notes, as the insns
3282 may be already deleted. */
3283 insn = BB_END (bb);
3284 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3285 && !can_throw_internal (insn)
3286 && insn != BB_HEAD (bb))
3287 insn = PREV_INSN (insn);
3288
3289 if (CALL_P (insn) || can_throw_internal (insn))
3290 {
8879d71c 3291 rtx_insn *stop, *next;
ba5e9aca
EB
3292
3293 e = find_fallthru_edge (bb->succs);
3294
3295 stop = NEXT_INSN (BB_END (bb));
1130d5e3 3296 BB_END (bb) = insn;
ba5e9aca
EB
3297
3298 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3299 {
3300 next = NEXT_INSN (insn);
3301 if (INSN_P (insn))
3302 {
3303 delete_insn (insn);
3304
3305 /* Sometimes there's still the return value USE.
3306 If it's placed after a trapping call (i.e. that
3307 call is the last insn anyway), we have no fallthru
3308 edge. Simply delete this use and don't try to insert
3309 on the non-existent edge. */
3310 if (GET_CODE (PATTERN (insn)) != USE)
3311 {
3312 /* We're not deleting it, we're moving it. */
4654c0cf 3313 insn->set_undeleted ();
0f82e5c9
DM
3314 SET_PREV_INSN (insn) = NULL_RTX;
3315 SET_NEXT_INSN (insn) = NULL_RTX;
ba5e9aca
EB
3316
3317 insert_insn_on_edge (insn, e);
3318 inserted = true;
3319 }
3320 }
3321 else if (!BARRIER_P (insn))
3322 set_block_for_insn (insn, NULL);
3323 }
3324 }
3325
3326 /* It may be that we don't find any trapping insn. In this
3327 case we discovered quite late that the insn that had been
3328 marked as can_throw_internal in fact couldn't trap at all.
3329 So we should in fact delete the EH edges out of the block. */
3330 else
3331 purge_dead_edges (bb);
3332 }
3333 }
3334
3335 return inserted;
3336}
78bde837
SB
3337\f
3338/* Cut the insns from FIRST to LAST out of the insns stream. */
3339
a90c614a 3340rtx_insn *
9152e0aa 3341unlink_insn_chain (rtx_insn *first, rtx_insn *last)
78bde837 3342{
a90c614a
DM
3343 rtx_insn *prevfirst = PREV_INSN (first);
3344 rtx_insn *nextlast = NEXT_INSN (last);
78bde837 3345
0f82e5c9
DM
3346 SET_PREV_INSN (first) = NULL;
3347 SET_NEXT_INSN (last) = NULL;
78bde837 3348 if (prevfirst)
0f82e5c9 3349 SET_NEXT_INSN (prevfirst) = nextlast;
78bde837 3350 if (nextlast)
0f82e5c9 3351 SET_PREV_INSN (nextlast) = prevfirst;
78bde837
SB
3352 else
3353 set_last_insn (prevfirst);
3354 if (!prevfirst)
3355 set_first_insn (nextlast);
9152e0aa 3356 return first;
78bde837
SB
3357}
3358\f
3359/* Skip over inter-block insns occurring after BB which are typically
3360 associated with BB (e.g., barriers). If there are any such insns,
3361 we return the last one. Otherwise, we return the end of BB. */
3362
8879d71c 3363static rtx_insn *
78bde837
SB
3364skip_insns_after_block (basic_block bb)
3365{
8879d71c 3366 rtx_insn *insn, *last_insn, *next_head, *prev;
78bde837 3367
8879d71c 3368 next_head = NULL;
fefa31b5 3369 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3370 next_head = BB_HEAD (bb->next_bb);
3371
3372 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3373 {
3374 if (insn == next_head)
3375 break;
3376
3377 switch (GET_CODE (insn))
3378 {
3379 case BARRIER:
3380 last_insn = insn;
3381 continue;
3382
3383 case NOTE:
3384 switch (NOTE_KIND (insn))
3385 {
3386 case NOTE_INSN_BLOCK_END:
3387 gcc_unreachable ();
3388 continue;
3389 default:
3390 continue;
3391 break;
3392 }
3393 break;
3394
3395 case CODE_LABEL:
3396 if (NEXT_INSN (insn)
3397 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3398 {
3399 insn = NEXT_INSN (insn);
3400 last_insn = insn;
3401 continue;
3402 }
3403 break;
3404
3405 default:
3406 break;
3407 }
3408
3409 break;
3410 }
3411
3412 /* It is possible to hit contradictory sequence. For instance:
3413
3414 jump_insn
3415 NOTE_INSN_BLOCK_BEG
3416 barrier
3417
3418 Where barrier belongs to jump_insn, but the note does not. This can be
3419 created by removing the basic block originally following
3420 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3421
3422 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3423 {
3424 prev = PREV_INSN (insn);
3425 if (NOTE_P (insn))
3426 switch (NOTE_KIND (insn))
3427 {
3428 case NOTE_INSN_BLOCK_END:
3429 gcc_unreachable ();
3430 break;
3431 case NOTE_INSN_DELETED:
3432 case NOTE_INSN_DELETED_LABEL:
3433 case NOTE_INSN_DELETED_DEBUG_LABEL:
3434 continue;
3435 default:
3436 reorder_insns (insn, insn, last_insn);
3437 }
3438 }
3439
3440 return last_insn;
3441}
3442
3443/* Locate or create a label for a given basic block. */
3444
e67d1102 3445static rtx_insn *
78bde837
SB
3446label_for_bb (basic_block bb)
3447{
e67d1102 3448 rtx_insn *label = BB_HEAD (bb);
78bde837
SB
3449
3450 if (!LABEL_P (label))
3451 {
3452 if (dump_file)
3453 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3454
3455 label = block_label (bb);
3456 }
3457
3458 return label;
3459}
3460
3461/* Locate the effective beginning and end of the insn chain for each
3462 block, as defined by skip_insns_after_block above. */
3463
3464static void
3465record_effective_endpoints (void)
3466{
8879d71c 3467 rtx_insn *next_insn;
78bde837 3468 basic_block bb;
8879d71c 3469 rtx_insn *insn;
78bde837
SB
3470
3471 for (insn = get_insns ();
3472 insn
3473 && NOTE_P (insn)
3474 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3475 insn = NEXT_INSN (insn))
3476 continue;
3477 /* No basic blocks at all? */
3478 gcc_assert (insn);
3479
3480 if (PREV_INSN (insn))
3481 cfg_layout_function_header =
3482 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3483 else
9152e0aa 3484 cfg_layout_function_header = NULL;
78bde837
SB
3485
3486 next_insn = get_insns ();
11cd3bed 3487 FOR_EACH_BB_FN (bb, cfun)
78bde837 3488 {
8879d71c 3489 rtx_insn *end;
78bde837
SB
3490
3491 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
1130d5e3 3492 BB_HEADER (bb) = unlink_insn_chain (next_insn,
190bea87 3493 PREV_INSN (BB_HEAD (bb)));
78bde837
SB
3494 end = skip_insns_after_block (bb);
3495 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
d8ce2eae 3496 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
78bde837
SB
3497 next_insn = NEXT_INSN (BB_END (bb));
3498 }
3499
3500 cfg_layout_function_footer = next_insn;
3501 if (cfg_layout_function_footer)
3502 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3503}
3504\f
27a4cd48
DM
3505namespace {
3506
3507const pass_data pass_data_into_cfg_layout_mode =
3508{
3509 RTL_PASS, /* type */
3510 "into_cfglayout", /* name */
3511 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3512 TV_CFG, /* tv_id */
3513 0, /* properties_required */
3514 PROP_cfglayout, /* properties_provided */
3515 0, /* properties_destroyed */
3516 0, /* todo_flags_start */
3517 0, /* todo_flags_finish */
78bde837
SB
3518};
3519
27a4cd48
DM
3520class pass_into_cfg_layout_mode : public rtl_opt_pass
3521{
3522public:
c3284718
RS
3523 pass_into_cfg_layout_mode (gcc::context *ctxt)
3524 : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
27a4cd48
DM
3525 {}
3526
3527 /* opt_pass methods: */
be55bfe6
TS
3528 virtual unsigned int execute (function *)
3529 {
3530 cfg_layout_initialize (0);
3531 return 0;
3532 }
27a4cd48
DM
3533
3534}; // class pass_into_cfg_layout_mode
3535
3536} // anon namespace
3537
3538rtl_opt_pass *
3539make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3540{
3541 return new pass_into_cfg_layout_mode (ctxt);
3542}
3543
3544namespace {
3545
3546const pass_data pass_data_outof_cfg_layout_mode =
3547{
3548 RTL_PASS, /* type */
3549 "outof_cfglayout", /* name */
3550 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3551 TV_CFG, /* tv_id */
3552 0, /* properties_required */
3553 0, /* properties_provided */
3554 PROP_cfglayout, /* properties_destroyed */
3555 0, /* todo_flags_start */
3556 0, /* todo_flags_finish */
78bde837 3557};
27a4cd48
DM
3558
3559class pass_outof_cfg_layout_mode : public rtl_opt_pass
3560{
3561public:
c3284718
RS
3562 pass_outof_cfg_layout_mode (gcc::context *ctxt)
3563 : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
27a4cd48
DM
3564 {}
3565
3566 /* opt_pass methods: */
be55bfe6 3567 virtual unsigned int execute (function *);
27a4cd48
DM
3568
3569}; // class pass_outof_cfg_layout_mode
3570
be55bfe6
TS
3571unsigned int
3572pass_outof_cfg_layout_mode::execute (function *fun)
3573{
3574 basic_block bb;
3575
3576 FOR_EACH_BB_FN (bb, fun)
3577 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3578 bb->aux = bb->next_bb;
3579
3580 cfg_layout_finalize ();
3581
3582 return 0;
3583}
3584
27a4cd48
DM
3585} // anon namespace
3586
3587rtl_opt_pass *
3588make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3589{
3590 return new pass_outof_cfg_layout_mode (ctxt);
3591}
78bde837
SB
3592\f
3593
3594/* Link the basic blocks in the correct order, compacting the basic
3595 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3596 function also clears the basic block header and footer fields.
3597
3598 This function is usually called after a pass (e.g. tracer) finishes
3599 some transformations while in cfglayout mode. The required sequence
3600 of the basic blocks is in a linked list along the bb->aux field.
3601 This functions re-links the basic block prev_bb and next_bb pointers
532aafad
SB
3602 accordingly, and it compacts and renumbers the blocks.
3603
3604 FIXME: This currently works only for RTL, but the only RTL-specific
3605 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3606 to GIMPLE a long time ago, but it doesn't relink the basic block
3607 chain. It could do that (to give better initial RTL) if this function
3608 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
78bde837
SB
3609
3610void
3611relink_block_chain (bool stay_in_cfglayout_mode)
3612{
3613 basic_block bb, prev_bb;
3614 int index;
3615
3616 /* Maybe dump the re-ordered sequence. */
3617 if (dump_file)
3618 {
3619 fprintf (dump_file, "Reordered sequence:\n");
fefa31b5
DM
3620 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3621 NUM_FIXED_BLOCKS;
78bde837
SB
3622 bb;
3623 bb = (basic_block) bb->aux, index++)
3624 {
3625 fprintf (dump_file, " %i ", index);
3626 if (get_bb_original (bb))
3627 fprintf (dump_file, "duplicate of %i ",
3628 get_bb_original (bb)->index);
3629 else if (forwarder_block_p (bb)
3630 && !LABEL_P (BB_HEAD (bb)))
3631 fprintf (dump_file, "compensation ");
3632 else
3633 fprintf (dump_file, "bb %i ", bb->index);
3634 fprintf (dump_file, " [%i]\n", bb->frequency);
3635 }
3636 }
3637
3638 /* Now reorder the blocks. */
fefa31b5
DM
3639 prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3640 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
78bde837
SB
3641 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3642 {
3643 bb->prev_bb = prev_bb;
3644 prev_bb->next_bb = bb;
3645 }
fefa31b5
DM
3646 prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3647 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
78bde837
SB
3648
3649 /* Then, clean up the aux fields. */
04a90bec 3650 FOR_ALL_BB_FN (bb, cfun)
78bde837
SB
3651 {
3652 bb->aux = NULL;
3653 if (!stay_in_cfglayout_mode)
1130d5e3 3654 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
78bde837
SB
3655 }
3656
3657 /* Maybe reset the original copy tables, they are not valid anymore
3658 when we renumber the basic blocks in compact_blocks. If we are
3659 are going out of cfglayout mode, don't re-allocate the tables. */
c2e84327
DM
3660 if (original_copy_tables_initialized_p ())
3661 free_original_copy_tables ();
78bde837
SB
3662 if (stay_in_cfglayout_mode)
3663 initialize_original_copy_tables ();
3664
3665 /* Finally, put basic_block_info in the new order. */
3666 compact_blocks ();
3667}
3668\f
3669
3670/* Given a reorder chain, rearrange the code to match. */
3671
3672static void
3673fixup_reorder_chain (void)
3674{
3675 basic_block bb;
9152e0aa 3676 rtx_insn *insn = NULL;
78bde837
SB
3677
3678 if (cfg_layout_function_header)
3679 {
3680 set_first_insn (cfg_layout_function_header);
3681 insn = cfg_layout_function_header;
3682 while (NEXT_INSN (insn))
3683 insn = NEXT_INSN (insn);
3684 }
3685
3686 /* First do the bulk reordering -- rechain the blocks without regard to
3687 the needed changes to jumps and labels. */
3688
fefa31b5
DM
3689 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3690 bb->aux)
78bde837
SB
3691 {
3692 if (BB_HEADER (bb))
3693 {
3694 if (insn)
0f82e5c9 3695 SET_NEXT_INSN (insn) = BB_HEADER (bb);
78bde837
SB
3696 else
3697 set_first_insn (BB_HEADER (bb));
0f82e5c9 3698 SET_PREV_INSN (BB_HEADER (bb)) = insn;
78bde837
SB
3699 insn = BB_HEADER (bb);
3700 while (NEXT_INSN (insn))
3701 insn = NEXT_INSN (insn);
3702 }
3703 if (insn)
0f82e5c9 3704 SET_NEXT_INSN (insn) = BB_HEAD (bb);
78bde837
SB
3705 else
3706 set_first_insn (BB_HEAD (bb));
0f82e5c9 3707 SET_PREV_INSN (BB_HEAD (bb)) = insn;
78bde837
SB
3708 insn = BB_END (bb);
3709 if (BB_FOOTER (bb))
3710 {
0f82e5c9
DM
3711 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3712 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
78bde837
SB
3713 while (NEXT_INSN (insn))
3714 insn = NEXT_INSN (insn);
3715 }
3716 }
3717
0f82e5c9 3718 SET_NEXT_INSN (insn) = cfg_layout_function_footer;
78bde837 3719 if (cfg_layout_function_footer)
0f82e5c9 3720 SET_PREV_INSN (cfg_layout_function_footer) = insn;
78bde837
SB
3721
3722 while (NEXT_INSN (insn))
3723 insn = NEXT_INSN (insn);
3724
3725 set_last_insn (insn);
b2b29377
MM
3726 if (flag_checking)
3727 verify_insn_chain ();
78bde837
SB
3728
3729 /* Now add jumps and labels as needed to match the blocks new
29f3fd5b 3730 outgoing edges. */
78bde837 3731
29f3fd5b
SB
3732 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3733 bb->aux)
78bde837
SB
3734 {
3735 edge e_fall, e_taken, e;
8879d71c 3736 rtx_insn *bb_end_insn;
78bde837 3737 rtx ret_label = NULL_RTX;
3371a64f 3738 basic_block nb;
78bde837
SB
3739 edge_iterator ei;
3740
3741 if (EDGE_COUNT (bb->succs) == 0)
3742 continue;
3743
3744 /* Find the old fallthru edge, and another non-EH edge for
3745 a taken jump. */
3746 e_taken = e_fall = NULL;
3747
3748 FOR_EACH_EDGE (e, ei, bb->succs)
3749 if (e->flags & EDGE_FALLTHRU)
3750 e_fall = e;
3751 else if (! (e->flags & EDGE_EH))
3752 e_taken = e;
3753
3754 bb_end_insn = BB_END (bb);
1476d1bd 3755 if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
78bde837 3756 {
1476d1bd
MM
3757 ret_label = JUMP_LABEL (bb_end_jump);
3758 if (any_condjump_p (bb_end_jump))
78bde837
SB
3759 {
3760 /* This might happen if the conditional jump has side
3761 effects and could therefore not be optimized away.
3762 Make the basic block to end with a barrier in order
3763 to prevent rtl_verify_flow_info from complaining. */
3764 if (!e_fall)
3765 {
1476d1bd
MM
3766 gcc_assert (!onlyjump_p (bb_end_jump)
3767 || returnjump_p (bb_end_jump)
91af97c3 3768 || (e_taken->flags & EDGE_CROSSING));
1476d1bd 3769 emit_barrier_after (bb_end_jump);
78bde837
SB
3770 continue;
3771 }
3772
3773 /* If the old fallthru is still next, nothing to do. */
3774 if (bb->aux == e_fall->dest
fefa31b5 3775 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3776 continue;
3777
3778 /* The degenerated case of conditional jump jumping to the next
3779 instruction can happen for jumps with side effects. We need
3780 to construct a forwarder block and this will be done just
3781 fine by force_nonfallthru below. */
3782 if (!e_taken)
3783 ;
3784
3785 /* There is another special case: if *neither* block is next,
3786 such as happens at the very end of a function, then we'll
3787 need to add a new unconditional jump. Choose the taken
3788 edge based on known or assumed probability. */
3789 else if (bb->aux != e_taken->dest)
3790 {
1476d1bd 3791 rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
78bde837
SB
3792
3793 if (note
5fa396ad
JH
3794 && profile_probability::from_reg_br_prob_note
3795 (XINT (note, 0)) < profile_probability::even ()
1476d1bd 3796 && invert_jump (bb_end_jump,
fefa31b5
DM
3797 (e_fall->dest
3798 == EXIT_BLOCK_PTR_FOR_FN (cfun)
78bde837
SB
3799 ? NULL_RTX
3800 : label_for_bb (e_fall->dest)), 0))
3801 {
3802 e_fall->flags &= ~EDGE_FALLTHRU;
3803 gcc_checking_assert (could_fall_through
3804 (e_taken->src, e_taken->dest));
3805 e_taken->flags |= EDGE_FALLTHRU;
3806 update_br_prob_note (bb);
3807 e = e_fall, e_fall = e_taken, e_taken = e;
3808 }
3809 }
3810
3811 /* If the "jumping" edge is a crossing edge, and the fall
3812 through edge is non-crossing, leave things as they are. */
3813 else if ((e_taken->flags & EDGE_CROSSING)
3814 && !(e_fall->flags & EDGE_CROSSING))
3815 continue;
3816
3817 /* Otherwise we can try to invert the jump. This will
3818 basically never fail, however, keep up the pretense. */
1476d1bd 3819 else if (invert_jump (bb_end_jump,
fefa31b5
DM
3820 (e_fall->dest
3821 == EXIT_BLOCK_PTR_FOR_FN (cfun)
78bde837
SB
3822 ? NULL_RTX
3823 : label_for_bb (e_fall->dest)), 0))
3824 {
3825 e_fall->flags &= ~EDGE_FALLTHRU;
3826 gcc_checking_assert (could_fall_through
3827 (e_taken->src, e_taken->dest));
3828 e_taken->flags |= EDGE_FALLTHRU;
3829 update_br_prob_note (bb);
3830 if (LABEL_NUSES (ret_label) == 0
3831 && single_pred_p (e_taken->dest))
a29f6a2b 3832 delete_insn (as_a<rtx_insn *> (ret_label));
78bde837
SB
3833 continue;
3834 }
3835 }
3836 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3837 {
3838 /* If the old fallthru is still next or if
3839 asm goto doesn't have a fallthru (e.g. when followed by
3840 __builtin_unreachable ()), nothing to do. */
3841 if (! e_fall
3842 || bb->aux == e_fall->dest
fefa31b5 3843 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3844 continue;
3845
3846 /* Otherwise we'll have to use the fallthru fixup below. */
3847 }
3848 else
3849 {
3850 /* Otherwise we have some return, switch or computed
3851 jump. In the 99% case, there should not have been a
3852 fallthru edge. */
3853 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3854 continue;
3855 }
3856 }
3857 else
3858 {
3859 /* No fallthru implies a noreturn function with EH edges, or
3860 something similarly bizarre. In any case, we don't need to
3861 do anything. */
3862 if (! e_fall)
3863 continue;
3864
3865 /* If the fallthru block is still next, nothing to do. */
3866 if (bb->aux == e_fall->dest)
3867 continue;
3868
3869 /* A fallthru to exit block. */
fefa31b5 3870 if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3871 continue;
3872 }
3873
3874 /* We got here if we need to add a new jump insn.
3875 Note force_nonfallthru can delete E_FALL and thus we have to
3876 save E_FALL->src prior to the call to force_nonfallthru. */
78bde837
SB
3877 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3878 if (nb)
3879 {
3880 nb->aux = bb->aux;
3881 bb->aux = nb;
3882 /* Don't process this new block. */
3883 bb = nb;
78bde837
SB
3884 }
3885 }
3886
3887 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3888
29f3fd5b 3889 /* Annoying special case - jump around dead jumptables left in the code. */
11cd3bed 3890 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
3891 {
3892 edge e = find_fallthru_edge (bb->succs);
3893
29f3fd5b
SB
3894 if (e && !can_fallthru (e->src, e->dest))
3895 force_nonfallthru (e);
78bde837
SB
3896 }
3897
3898 /* Ensure goto_locus from edges has some instructions with that locus
3899 in RTL. */
3900 if (!optimize)
11cd3bed 3901 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
3902 {
3903 edge e;
3904 edge_iterator ei;
3905
3906 FOR_EACH_EDGE (e, ei, bb->succs)
2f13f2de 3907 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
5368224f 3908 && !(e->flags & EDGE_ABNORMAL))
78bde837
SB
3909 {
3910 edge e2;
3911 edge_iterator ei2;
3912 basic_block dest, nb;
8879d71c 3913 rtx_insn *end;
78bde837
SB
3914
3915 insn = BB_END (e->src);
3916 end = PREV_INSN (BB_HEAD (e->src));
3917 while (insn != end
5368224f 3918 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
78bde837
SB
3919 insn = PREV_INSN (insn);
3920 if (insn != end
5368224f 3921 && INSN_LOCATION (insn) == e->goto_locus)
78bde837
SB
3922 continue;
3923 if (simplejump_p (BB_END (e->src))
5368224f 3924 && !INSN_HAS_LOCATION (BB_END (e->src)))
78bde837 3925 {
5368224f 3926 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
78bde837
SB
3927 continue;
3928 }
3929 dest = e->dest;
fefa31b5 3930 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
78bde837
SB
3931 {
3932 /* Non-fallthru edges to the exit block cannot be split. */
3933 if (!(e->flags & EDGE_FALLTHRU))
3934 continue;
3935 }
3936 else
3937 {
3938 insn = BB_HEAD (dest);
3939 end = NEXT_INSN (BB_END (dest));
3940 while (insn != end && !NONDEBUG_INSN_P (insn))
3941 insn = NEXT_INSN (insn);
5368224f
DC
3942 if (insn != end && INSN_HAS_LOCATION (insn)
3943 && INSN_LOCATION (insn) == e->goto_locus)
78bde837
SB
3944 continue;
3945 }
3946 nb = split_edge (e);
3947 if (!INSN_P (BB_END (nb)))
1130d5e3 3948 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
190bea87 3949 nb);
5368224f 3950 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
78bde837
SB
3951
3952 /* If there are other incoming edges to the destination block
3953 with the same goto locus, redirect them to the new block as
3954 well, this can prevent other such blocks from being created
3955 in subsequent iterations of the loop. */
3956 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
2f13f2de 3957 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
78bde837 3958 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
5368224f 3959 && e->goto_locus == e2->goto_locus)
78bde837
SB
3960 redirect_edge_and_branch (e2, nb);
3961 else
3962 ei_next (&ei2);
3963 }
3964 }
3965}
3966\f
3967/* Perform sanity checks on the insn chain.
3968 1. Check that next/prev pointers are consistent in both the forward and
3969 reverse direction.
3970 2. Count insns in chain, going both directions, and check if equal.
3971 3. Check that get_last_insn () returns the actual end of chain. */
3972
3973DEBUG_FUNCTION void
3974verify_insn_chain (void)
3975{
8879d71c 3976 rtx_insn *x, *prevx, *nextx;
78bde837
SB
3977 int insn_cnt1, insn_cnt2;
3978
3979 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3980 x != 0;
3981 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
3982 gcc_assert (PREV_INSN (x) == prevx);
3983
3984 gcc_assert (prevx == get_last_insn ());
3985
3986 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
3987 x != 0;
3988 nextx = x, insn_cnt2++, x = PREV_INSN (x))
3989 gcc_assert (NEXT_INSN (x) == nextx);
3990
3991 gcc_assert (insn_cnt1 == insn_cnt2);
3992}
3993\f
3994/* If we have assembler epilogues, the block falling through to exit must
3995 be the last one in the reordered chain when we reach final. Ensure
3996 that this condition is met. */
3997static void
3998fixup_fallthru_exit_predecessor (void)
3999{
4000 edge e;
4001 basic_block bb = NULL;
4002
4003 /* This transformation is not valid before reload, because we might
4004 separate a call from the instruction that copies the return
4005 value. */
4006 gcc_assert (reload_completed);
4007
fefa31b5 4008 e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
78bde837
SB
4009 if (e)
4010 bb = e->src;
4011
4012 if (bb && bb->aux)
4013 {
fefa31b5 4014 basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
78bde837
SB
4015
4016 /* If the very first block is the one with the fall-through exit
4017 edge, we have to split that block. */
4018 if (c == bb)
4019 {
c4d281b2 4020 bb = split_block_after_labels (bb)->dest;
78bde837
SB
4021 bb->aux = c->aux;
4022 c->aux = bb;
d8ce2eae
DM
4023 BB_FOOTER (bb) = BB_FOOTER (c);
4024 BB_FOOTER (c) = NULL;
78bde837
SB
4025 }
4026
4027 while (c->aux != bb)
4028 c = (basic_block) c->aux;
4029
4030 c->aux = bb->aux;
4031 while (c->aux)
4032 c = (basic_block) c->aux;
4033
4034 c->aux = bb;
4035 bb->aux = NULL;
4036 }
4037}
4038
4039/* In case there are more than one fallthru predecessors of exit, force that
4040 there is only one. */
4041
4042static void
4043force_one_exit_fallthru (void)
4044{
4045 edge e, predecessor = NULL;
4046 bool more = false;
4047 edge_iterator ei;
4048 basic_block forwarder, bb;
4049
fefa31b5 4050 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
78bde837
SB
4051 if (e->flags & EDGE_FALLTHRU)
4052 {
4053 if (predecessor == NULL)
4054 predecessor = e;
4055 else
4056 {
4057 more = true;
4058 break;
4059 }
4060 }
4061
4062 if (!more)
4063 return;
4064
4065 /* Exit has several fallthru predecessors. Create a forwarder block for
4066 them. */
4067 forwarder = split_edge (predecessor);
fefa31b5
DM
4068 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4069 (e = ei_safe_edge (ei)); )
78bde837
SB
4070 {
4071 if (e->src == forwarder
4072 || !(e->flags & EDGE_FALLTHRU))
4073 ei_next (&ei);
4074 else
4075 redirect_edge_and_branch_force (e, forwarder);
4076 }
4077
4078 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4079 exit block. */
11cd3bed 4080 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
4081 {
4082 if (bb->aux == NULL && bb != forwarder)
4083 {
4084 bb->aux = forwarder;
4085 break;
4086 }
4087 }
4088}
4089\f
4090/* Return true in case it is possible to duplicate the basic block BB. */
4091
4092static bool
4093cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4094{
4095 /* Do not attempt to duplicate tablejumps, as we need to unshare
4096 the dispatch table. This is difficult to do, as the instructions
4097 computing jump destination may be hoisted outside the basic block. */
4098 if (tablejump_p (BB_END (bb), NULL, NULL))
4099 return false;
4100
4101 /* Do not duplicate blocks containing insns that can't be copied. */
4102 if (targetm.cannot_copy_insn_p)
4103 {
8879d71c 4104 rtx_insn *insn = BB_HEAD (bb);
78bde837
SB
4105 while (1)
4106 {
4107 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4108 return false;
4109 if (insn == BB_END (bb))
4110 break;
4111 insn = NEXT_INSN (insn);
4112 }
4113 }
4114
4115 return true;
4116}
4117
a90c614a 4118rtx_insn *
5d34b050 4119duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
78bde837 4120{
5d34b050 4121 rtx_insn *insn, *next, *copy;
66e8df53 4122 rtx_note *last;
78bde837
SB
4123
4124 /* Avoid updating of boundaries of previous basic block. The
4125 note will get removed from insn stream in fixup. */
4126 last = emit_note (NOTE_INSN_DELETED);
4127
4128 /* Create copy at the end of INSN chain. The chain will
4129 be reordered later. */
4130 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4131 {
4132 switch (GET_CODE (insn))
4133 {
4134 case DEBUG_INSN:
4135 /* Don't duplicate label debug insns. */
4136 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4137 break;
4138 /* FALLTHRU */
4139 case INSN:
4140 case CALL_INSN:
4141 case JUMP_INSN:
78bde837
SB
4142 copy = emit_copy_of_insn_after (insn, get_last_insn ());
4143 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4144 && ANY_RETURN_P (JUMP_LABEL (insn)))
4145 JUMP_LABEL (copy) = JUMP_LABEL (insn);
4146 maybe_copy_prologue_epilogue_insn (insn, copy);
4147 break;
4148
39718607
SB
4149 case JUMP_TABLE_DATA:
4150 /* Avoid copying of dispatch tables. We never duplicate
4151 tablejumps, so this can hit only in case the table got
4152 moved far from original jump.
4153 Avoid copying following barrier as well if any
4154 (and debug insns in between). */
4155 for (next = NEXT_INSN (insn);
4156 next != NEXT_INSN (to);
4157 next = NEXT_INSN (next))
4158 if (!DEBUG_INSN_P (next))
4159 break;
4160 if (next != NEXT_INSN (to) && BARRIER_P (next))
4161 insn = next;
4162 break;
4163
78bde837
SB
4164 case CODE_LABEL:
4165 break;
4166
4167 case BARRIER:
4168 emit_barrier ();
4169 break;
4170
4171 case NOTE:
4172 switch (NOTE_KIND (insn))
4173 {
4174 /* In case prologue is empty and function contain label
4175 in first BB, we may want to copy the block. */
4176 case NOTE_INSN_PROLOGUE_END:
4177
4178 case NOTE_INSN_DELETED:
4179 case NOTE_INSN_DELETED_LABEL:
4180 case NOTE_INSN_DELETED_DEBUG_LABEL:
4181 /* No problem to strip these. */
4182 case NOTE_INSN_FUNCTION_BEG:
4183 /* There is always just single entry to function. */
4184 case NOTE_INSN_BASIC_BLOCK:
3371a64f
TJ
4185 /* We should only switch text sections once. */
4186 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
78bde837
SB
4187 break;
4188
4189 case NOTE_INSN_EPILOGUE_BEG:
d33606c3 4190 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
66e8df53 4191 emit_note_copy (as_a <rtx_note *> (insn));
78bde837
SB
4192 break;
4193
4194 default:
4195 /* All other notes should have already been eliminated. */
4196 gcc_unreachable ();
4197 }
4198 break;
4199 default:
4200 gcc_unreachable ();
4201 }
4202 }
4203 insn = NEXT_INSN (last);
4204 delete_insn (last);
5d34b050 4205 return insn;
78bde837
SB
4206}
4207
4208/* Create a duplicate of the basic block BB. */
4209
4210static basic_block
4211cfg_layout_duplicate_bb (basic_block bb)
4212{
8879d71c 4213 rtx_insn *insn;
78bde837
SB
4214 basic_block new_bb;
4215
4216 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4217 new_bb = create_basic_block (insn,
4218 insn ? get_last_insn () : NULL,
fefa31b5 4219 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
78bde837
SB
4220
4221 BB_COPY_PARTITION (new_bb, bb);
4222 if (BB_HEADER (bb))
4223 {
4224 insn = BB_HEADER (bb);
4225 while (NEXT_INSN (insn))
4226 insn = NEXT_INSN (insn);
4227 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4228 if (insn)
1130d5e3 4229 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
78bde837
SB
4230 }
4231
4232 if (BB_FOOTER (bb))
4233 {
4234 insn = BB_FOOTER (bb);
4235 while (NEXT_INSN (insn))
4236 insn = NEXT_INSN (insn);
4237 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4238 if (insn)
d8ce2eae 4239 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
78bde837
SB
4240 }
4241
4242 return new_bb;
4243}
4244
4245\f
4246/* Main entry point to this module - initialize the datastructures for
4247 CFG layout changes. It keeps LOOPS up-to-date if not null.
4248
4249 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4250
4251void
1a817418 4252cfg_layout_initialize (int flags)
78bde837 4253{
b5241a5a 4254 rtx_insn_list *x;
78bde837
SB
4255 basic_block bb;
4256
8a9e6b45
BS
4257 /* Once bb partitioning is complete, cfg layout mode should not be
4258 re-entered. Entering cfg layout mode may require fixups. As an
4259 example, if edge forwarding performed when optimizing the cfg
4260 layout required moving a block from the hot to the cold
4261 section. This would create an illegal partitioning unless some
4262 manual fixup was performed. */
0b6bc904 4263 gcc_assert (!crtl->bb_reorder_complete || !crtl->has_bb_partition);
4ca19309 4264
78bde837
SB
4265 initialize_original_copy_tables ();
4266
4267 cfg_layout_rtl_register_cfg_hooks ();
4268
4269 record_effective_endpoints ();
4270
4271 /* Make sure that the targets of non local gotos are marked. */
2382940b 4272 for (x = nonlocal_goto_handler_labels; x; x = x->next ())
78bde837 4273 {
b5241a5a 4274 bb = BLOCK_FOR_INSN (x->insn ());
78bde837
SB
4275 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4276 }
4277
4278 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4279}
4280
4281/* Splits superblocks. */
4282void
4283break_superblocks (void)
4284{
78bde837
SB
4285 bool need = false;
4286 basic_block bb;
4287
7ba9e72d 4288 auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
f61e445a 4289 bitmap_clear (superblocks);
78bde837 4290
11cd3bed 4291 FOR_EACH_BB_FN (bb, cfun)
78bde837
SB
4292 if (bb->flags & BB_SUPERBLOCK)
4293 {
4294 bb->flags &= ~BB_SUPERBLOCK;
d7c028c0 4295 bitmap_set_bit (superblocks, bb->index);
78bde837
SB
4296 need = true;
4297 }
4298
4299 if (need)
4300 {
4301 rebuild_jump_labels (get_insns ());
4302 find_many_sub_basic_blocks (superblocks);
4303 }
78bde837
SB
4304}
4305
4306/* Finalize the changes: reorder insn list according to the sequence specified
29f3fd5b 4307 by aux pointers, enter compensation code, rebuild scope forest. */
78bde837
SB
4308
4309void
4310cfg_layout_finalize (void)
4311{
b2b29377 4312 checking_verify_flow_info ();
63da5ff6 4313 free_dominance_info (CDI_DOMINATORS);
78bde837
SB
4314 force_one_exit_fallthru ();
4315 rtl_register_cfg_hooks ();
e86a9946 4316 if (reload_completed && !targetm.have_epilogue ())
78bde837
SB
4317 fixup_fallthru_exit_predecessor ();
4318 fixup_reorder_chain ();
4319
29f3fd5b
SB
4320 rebuild_jump_labels (get_insns ());
4321 delete_dead_jumptables ();
4322
b2b29377
MM
4323 if (flag_checking)
4324 verify_insn_chain ();
4325 checking_verify_flow_info ();
78bde837
SB
4326}
4327
ba5e9aca 4328
9ee634e3 4329/* Same as split_block but update cfg_layout structures. */
f470c378
ZD
4330
4331static basic_block
d329e058 4332cfg_layout_split_block (basic_block bb, void *insnp)
9ee634e3 4333{
ae50c0cb 4334 rtx insn = (rtx) insnp;
f470c378 4335 basic_block new_bb = rtl_split_block (bb, insn);
9ee634e3 4336
d8ce2eae
DM
4337 BB_FOOTER (new_bb) = BB_FOOTER (bb);
4338 BB_FOOTER (bb) = NULL;
9ee634e3 4339
f470c378 4340 return new_bb;
9ee634e3
JH
4341}
4342
9ee634e3 4343/* Redirect Edge to DEST. */
6de9cd9a 4344static edge
d329e058 4345cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
9ee634e3
JH
4346{
4347 basic_block src = e->src;
6de9cd9a 4348 edge ret;
9ee634e3 4349
bc35512f 4350 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
6de9cd9a 4351 return NULL;
bc35512f 4352
3348b696 4353 if (e->dest == dest)
6de9cd9a 4354 return e;
bc35512f 4355
fefa31b5 4356 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
6de9cd9a 4357 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
f345f21a 4358 {
6fb5fa3c 4359 df_set_bb_dirty (src);
6de9cd9a 4360 return ret;
f345f21a 4361 }
bc35512f 4362
fefa31b5 4363 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
bc35512f
JH
4364 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4365 {
c263766c
RH
4366 if (dump_file)
4367 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
bc35512f
JH
4368 e->src->index, dest->index);
4369
6fb5fa3c 4370 df_set_bb_dirty (e->src);
bc35512f 4371 redirect_edge_succ (e, dest);
6de9cd9a 4372 return e;
bc35512f
JH
4373 }
4374
9ee634e3
JH
4375 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4376 in the case the basic block appears to be in sequence. Avoid this
4377 transformation. */
4378
9ee634e3
JH
4379 if (e->flags & EDGE_FALLTHRU)
4380 {
4381 /* Redirect any branch edges unified with the fallthru one. */
4b4bf941 4382 if (JUMP_P (BB_END (src))
432f982f
JH
4383 && label_is_jump_target_p (BB_HEAD (e->dest),
4384 BB_END (src)))
9ee634e3 4385 {
341c100f 4386 edge redirected;
c22cacf3 4387
c263766c
RH
4388 if (dump_file)
4389 fprintf (dump_file, "Fallthru edge unified with branch "
432f982f
JH
4390 "%i->%i redirected to %i\n",
4391 e->src->index, e->dest->index, dest->index);
4392 e->flags &= ~EDGE_FALLTHRU;
341c100f
NS
4393 redirected = redirect_branch_edge (e, dest);
4394 gcc_assert (redirected);
0c617be4
JL
4395 redirected->flags |= EDGE_FALLTHRU;
4396 df_set_bb_dirty (redirected->src);
4397 return redirected;
9ee634e3
JH
4398 }
4399 /* In case we are redirecting fallthru edge to the branch edge
c22cacf3 4400 of conditional jump, remove it. */
628f6a4e 4401 if (EDGE_COUNT (src->succs) == 2)
9ee634e3 4402 {
03101c6f
KH
4403 /* Find the edge that is different from E. */
4404 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
628f6a4e 4405
9ee634e3 4406 if (s->dest == dest
a813c111
SB
4407 && any_condjump_p (BB_END (src))
4408 && onlyjump_p (BB_END (src)))
4409 delete_insn (BB_END (src));
9ee634e3 4410 }
c263766c 4411 if (dump_file)
dc764d10 4412 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
bc35512f 4413 e->src->index, e->dest->index, dest->index);
0c617be4 4414 ret = redirect_edge_succ_nodup (e, dest);
9ee634e3
JH
4415 }
4416 else
bc35512f 4417 ret = redirect_branch_edge (e, dest);
9ee634e3
JH
4418
4419 /* We don't want simplejumps in the insn stream during cfglayout. */
341c100f 4420 gcc_assert (!simplejump_p (BB_END (src)));
9ee634e3 4421
6fb5fa3c 4422 df_set_bb_dirty (src);
9ee634e3
JH
4423 return ret;
4424}
4425
4426/* Simple wrapper as we always can redirect fallthru edges. */
4427static basic_block
d329e058 4428cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
9ee634e3 4429{
341c100f
NS
4430 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4431
4432 gcc_assert (redirected);
9ee634e3
JH
4433 return NULL;
4434}
4435
f470c378
ZD
4436/* Same as delete_basic_block but update cfg_layout structures. */
4437
9ee634e3 4438static void
d329e058 4439cfg_layout_delete_block (basic_block bb)
9ee634e3 4440{
8879d71c 4441 rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
1130d5e3 4442 rtx_insn **to;
9ee634e3 4443
bcc708fc 4444 if (BB_HEADER (bb))
9ee634e3 4445 {
a813c111 4446 next = BB_HEAD (bb);
9ee634e3 4447 if (prev)
0f82e5c9 4448 SET_NEXT_INSN (prev) = BB_HEADER (bb);
9ee634e3 4449 else
bcc708fc 4450 set_first_insn (BB_HEADER (bb));
0f82e5c9 4451 SET_PREV_INSN (BB_HEADER (bb)) = prev;
bcc708fc 4452 insn = BB_HEADER (bb);
9ee634e3
JH
4453 while (NEXT_INSN (insn))
4454 insn = NEXT_INSN (insn);
0f82e5c9
DM
4455 SET_NEXT_INSN (insn) = next;
4456 SET_PREV_INSN (next) = insn;
9ee634e3 4457 }
a813c111 4458 next = NEXT_INSN (BB_END (bb));
bcc708fc 4459 if (BB_FOOTER (bb))
9ee634e3 4460 {
bcc708fc 4461 insn = BB_FOOTER (bb);
bc35512f
JH
4462 while (insn)
4463 {
4b4bf941 4464 if (BARRIER_P (insn))
bc35512f
JH
4465 {
4466 if (PREV_INSN (insn))
0f82e5c9 4467 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
bc35512f 4468 else
d8ce2eae 4469 BB_FOOTER (bb) = NEXT_INSN (insn);
bc35512f 4470 if (NEXT_INSN (insn))
0f82e5c9 4471 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
bc35512f 4472 }
4b4bf941 4473 if (LABEL_P (insn))
bc35512f
JH
4474 break;
4475 insn = NEXT_INSN (insn);
4476 }
bcc708fc 4477 if (BB_FOOTER (bb))
bc35512f 4478 {
a813c111 4479 insn = BB_END (bb);
0f82e5c9
DM
4480 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4481 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
bc35512f
JH
4482 while (NEXT_INSN (insn))
4483 insn = NEXT_INSN (insn);
0f82e5c9 4484 SET_NEXT_INSN (insn) = next;
bc35512f 4485 if (next)
0f82e5c9 4486 SET_PREV_INSN (next) = insn;
bc35512f
JH
4487 else
4488 set_last_insn (insn);
4489 }
9ee634e3 4490 }
fefa31b5 4491 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1130d5e3 4492 to = &BB_HEADER (bb->next_bb);
9ee634e3
JH
4493 else
4494 to = &cfg_layout_function_footer;
997de8ed 4495
9ee634e3
JH
4496 rtl_delete_block (bb);
4497
4498 if (prev)
4499 prev = NEXT_INSN (prev);
d329e058 4500 else
9ee634e3
JH
4501 prev = get_insns ();
4502 if (next)
4503 next = PREV_INSN (next);
d329e058 4504 else
9ee634e3
JH
4505 next = get_last_insn ();
4506
4507 if (next && NEXT_INSN (next) != prev)
4508 {
4509 remaints = unlink_insn_chain (prev, next);
4510 insn = remaints;
4511 while (NEXT_INSN (insn))
4512 insn = NEXT_INSN (insn);
0f82e5c9 4513 SET_NEXT_INSN (insn) = *to;
9ee634e3 4514 if (*to)
0f82e5c9 4515 SET_PREV_INSN (*to) = insn;
9ee634e3
JH
4516 *to = remaints;
4517 }
4518}
4519
beb235f8 4520/* Return true when blocks A and B can be safely merged. */
b48d0358 4521
bc35512f 4522static bool
b48d0358 4523cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
bc35512f 4524{
750054a2
CT
4525 /* If we are partitioning hot/cold basic blocks, we don't want to
4526 mess up unconditional or indirect jumps that cross between hot
076c7ab8
ZW
4527 and cold sections.
4528
8e8d5162 4529 Basic block partitioning may result in some jumps that appear to
c22cacf3
MS
4530 be optimizable (or blocks that appear to be mergeable), but which really
4531 must be left untouched (they are required to make it safely across
4532 partition boundaries). See the comments at the top of
8e8d5162
CT
4533 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4534
87c8b4be 4535 if (BB_PARTITION (a) != BB_PARTITION (b))
076c7ab8 4536 return false;
750054a2 4537
7d776ee2
RG
4538 /* Protect the loop latches. */
4539 if (current_loops && b->loop_father->latch == b)
4540 return false;
4541
894a84b5
BS
4542 /* If we would end up moving B's instructions, make sure it doesn't fall
4543 through into the exit block, since we cannot recover from a fallthrough
4544 edge into the exit block occurring in the middle of a function. */
4545 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4546 {
4547 edge e = find_fallthru_edge (b->succs);
fefa31b5 4548 if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
894a84b5
BS
4549 return false;
4550 }
4551
bc35512f 4552 /* There must be exactly one edge in between the blocks. */
c5cbcccf
ZD
4553 return (single_succ_p (a)
4554 && single_succ (a) == b
4555 && single_pred_p (b) == 1
628f6a4e 4556 && a != b
bc35512f 4557 /* Must be simple edge. */
c5cbcccf 4558 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
fefa31b5
DM
4559 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4560 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
49ea3702
SB
4561 /* If the jump insn has side effects, we can't kill the edge.
4562 When not optimizing, try_redirect_by_replacing_jump will
4563 not allow us to redirect an edge by replacing a table jump. */
4b4bf941 4564 && (!JUMP_P (BB_END (a))
49ea3702 4565 || ((!optimize || reload_completed)
a813c111 4566 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
bc35512f
JH
4567}
4568
41806d92
NS
4569/* Merge block A and B. The blocks must be mergeable. */
4570
bc35512f
JH
4571static void
4572cfg_layout_merge_blocks (basic_block a, basic_block b)
4573{
50a36e42 4574 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
8879d71c 4575 rtx_insn *insn;
50a36e42 4576
77a74ed7 4577 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
bc35512f 4578
6fb5fa3c 4579 if (dump_file)
50a36e42
EB
4580 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4581 a->index);
6fb5fa3c 4582
bc35512f 4583 /* If there was a CODE_LABEL beginning B, delete it. */
4b4bf941 4584 if (LABEL_P (BB_HEAD (b)))
2c97f8e4 4585 {
2c97f8e4
RH
4586 delete_insn (BB_HEAD (b));
4587 }
bc35512f
JH
4588
4589 /* We should have fallthru edge in a, or we can do dummy redirection to get
4590 it cleaned up. */
4b4bf941 4591 if (JUMP_P (BB_END (a)))
628f6a4e 4592 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
341c100f 4593 gcc_assert (!JUMP_P (BB_END (a)));
bc35512f 4594
8a829274 4595 /* When not optimizing and the edge is the only place in RTL which holds
7241571e 4596 some unique locus, emit a nop with that locus in between. */
9be94227
EB
4597 if (!optimize)
4598 emit_nop_for_unique_locus_between (a, b);
7241571e 4599
4c8af858
SB
4600 /* Move things from b->footer after a->footer. */
4601 if (BB_FOOTER (b))
bc35512f 4602 {
4c8af858 4603 if (!BB_FOOTER (a))
d8ce2eae 4604 BB_FOOTER (a) = BB_FOOTER (b);
4c8af858
SB
4605 else
4606 {
8879d71c 4607 rtx_insn *last = BB_FOOTER (a);
4c8af858
SB
4608
4609 while (NEXT_INSN (last))
4610 last = NEXT_INSN (last);
0f82e5c9
DM
4611 SET_NEXT_INSN (last) = BB_FOOTER (b);
4612 SET_PREV_INSN (BB_FOOTER (b)) = last;
4c8af858 4613 }
d8ce2eae 4614 BB_FOOTER (b) = NULL;
4c8af858
SB
4615 }
4616
4617 /* Move things from b->header before a->footer.
4618 Note that this may include dead tablejump data, but we don't clean
4619 those up until we go out of cfglayout mode. */
4620 if (BB_HEADER (b))
4621 {
4622 if (! BB_FOOTER (a))
d8ce2eae 4623 BB_FOOTER (a) = BB_HEADER (b);
4c8af858
SB
4624 else
4625 {
8879d71c 4626 rtx_insn *last = BB_HEADER (b);
4c8af858
SB
4627
4628 while (NEXT_INSN (last))
4629 last = NEXT_INSN (last);
0f82e5c9
DM
4630 SET_NEXT_INSN (last) = BB_FOOTER (a);
4631 SET_PREV_INSN (BB_FOOTER (a)) = last;
d8ce2eae 4632 BB_FOOTER (a) = BB_HEADER (b);
4c8af858 4633 }
1130d5e3 4634 BB_HEADER (b) = NULL;
bc35512f
JH
4635 }
4636
4637 /* In the case basic blocks are not adjacent, move them around. */
a813c111 4638 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
bc35512f 4639 {
f9df6f16 4640 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
6c3d0e31 4641
f9df6f16 4642 emit_insn_after_noloc (insn, BB_END (a), a);
bc35512f
JH
4643 }
4644 /* Otherwise just re-associate the instructions. */
4645 else
4646 {
a813c111 4647 insn = BB_HEAD (b);
1130d5e3 4648 BB_END (a) = BB_END (b);
bc35512f
JH
4649 }
4650
f9df6f16
JJ
4651 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4652 We need to explicitly call. */
4653 update_bb_for_insn_chain (insn, BB_END (b), a);
4654
4655 /* Skip possible DELETED_LABEL insn. */
4656 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4657 insn = NEXT_INSN (insn);
4658 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
1130d5e3 4659 BB_HEAD (b) = BB_END (b) = NULL;
f9df6f16
JJ
4660 delete_insn (insn);
4661
6fb5fa3c
DB
4662 df_bb_delete (b->index);
4663
50a36e42 4664 /* If B was a forwarder block, propagate the locus on the edge. */
2f13f2de 4665 if (forwarder_p
fbc68f2a 4666 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
50a36e42
EB
4667 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4668
c263766c 4669 if (dump_file)
50a36e42 4670 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
bc35512f
JH
4671}
4672
4673/* Split edge E. */
f470c378 4674
bc35512f
JH
4675static basic_block
4676cfg_layout_split_edge (edge e)
4677{
bc35512f 4678 basic_block new_bb =
fefa31b5 4679 create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
a813c111 4680 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
bc35512f
JH
4681 NULL_RTX, e->src);
4682
fefa31b5 4683 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3e7eb734
JJ
4684 BB_COPY_PARTITION (new_bb, e->src);
4685 else
4686 BB_COPY_PARTITION (new_bb, e->dest);
a9b2ee88 4687 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
bc35512f
JH
4688 redirect_edge_and_branch_force (e, new_bb);
4689
4690 return new_bb;
4691}
4692
f470c378
ZD
4693/* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4694
4695static void
4696rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4697{
4698}
4699
df92c640
SB
4700/* Return true if BB contains only labels or non-executable
4701 instructions. */
4702
4703static bool
4704rtl_block_empty_p (basic_block bb)
4705{
8879d71c 4706 rtx_insn *insn;
df92c640 4707
fefa31b5
DM
4708 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4709 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
df92c640
SB
4710 return true;
4711
4712 FOR_BB_INSNS (bb, insn)
4713 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4714 return false;
4715
4716 return true;
4717}
4718
4719/* Split a basic block if it ends with a conditional branch and if
4720 the other part of the block is not empty. */
4721
4722static basic_block
4723rtl_split_block_before_cond_jump (basic_block bb)
4724{
8879d71c
DM
4725 rtx_insn *insn;
4726 rtx_insn *split_point = NULL;
4727 rtx_insn *last = NULL;
df92c640
SB
4728 bool found_code = false;
4729
4730 FOR_BB_INSNS (bb, insn)
4731 {
4732 if (any_condjump_p (insn))
4733 split_point = last;
4734 else if (NONDEBUG_INSN_P (insn))
4735 found_code = true;
4736 last = insn;
4737 }
4738
4739 /* Did not find everything. */
4740 if (found_code && split_point)
4741 return split_block (bb, split_point)->dest;
4742 else
4743 return NULL;
4744}
4745
6de9cd9a
DN
4746/* Return 1 if BB ends with a call, possibly followed by some
4747 instructions that must stay with the call, 0 otherwise. */
4748
4749static bool
b48d0358 4750rtl_block_ends_with_call_p (basic_block bb)
6de9cd9a 4751{
8879d71c 4752 rtx_insn *insn = BB_END (bb);
6de9cd9a 4753
4b4bf941 4754 while (!CALL_P (insn)
6de9cd9a 4755 && insn != BB_HEAD (bb)
92ddef69 4756 && (keep_with_call_p (insn)
b5b8b0ac
AO
4757 || NOTE_P (insn)
4758 || DEBUG_INSN_P (insn)))
6de9cd9a 4759 insn = PREV_INSN (insn);
4b4bf941 4760 return (CALL_P (insn));
6de9cd9a
DN
4761}
4762
4763/* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4764
4765static bool
9678086d 4766rtl_block_ends_with_condjump_p (const_basic_block bb)
6de9cd9a
DN
4767{
4768 return any_condjump_p (BB_END (bb));
4769}
4770
4771/* Return true if we need to add fake edge to exit.
4772 Helper function for rtl_flow_call_edges_add. */
4773
4774static bool
8879d71c 4775need_fake_edge_p (const rtx_insn *insn)
6de9cd9a
DN
4776{
4777 if (!INSN_P (insn))
4778 return false;
4779
4b4bf941 4780 if ((CALL_P (insn)
6de9cd9a
DN
4781 && !SIBLING_CALL_P (insn)
4782 && !find_reg_note (insn, REG_NORETURN, NULL)
becfd6e5 4783 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
6de9cd9a
DN
4784 return true;
4785
4786 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4787 && MEM_VOLATILE_P (PATTERN (insn)))
4788 || (GET_CODE (PATTERN (insn)) == PARALLEL
4789 && asm_noperands (insn) != -1
4790 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4791 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4792}
4793
4794/* Add fake edges to the function exit for any non constant and non noreturn
4795 calls, volatile inline assembly in the bitmap of blocks specified by
4796 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4797 that were split.
4798
4799 The goal is to expose cases in which entering a basic block does not imply
4800 that all subsequent instructions must be executed. */
4801
4802static int
4803rtl_flow_call_edges_add (sbitmap blocks)
4804{
4805 int i;
4806 int blocks_split = 0;
8b1c6fd7 4807 int last_bb = last_basic_block_for_fn (cfun);
6de9cd9a
DN
4808 bool check_last_block = false;
4809
0cae8d31 4810 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
6de9cd9a
DN
4811 return 0;
4812
4813 if (! blocks)
4814 check_last_block = true;
4815 else
fefa31b5
DM
4816 check_last_block = bitmap_bit_p (blocks,
4817 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
6de9cd9a
DN
4818
4819 /* In the last basic block, before epilogue generation, there will be
4820 a fallthru edge to EXIT. Special care is required if the last insn
4821 of the last basic block is a call because make_edge folds duplicate
4822 edges, which would result in the fallthru edge also being marked
4823 fake, which would result in the fallthru edge being removed by
4824 remove_fake_edges, which would result in an invalid CFG.
4825
4826 Moreover, we can't elide the outgoing fake edge, since the block
4827 profiler needs to take this into account in order to solve the minimal
4828 spanning tree in the case that the call doesn't return.
4829
4830 Handle this by adding a dummy instruction in a new last basic block. */
4831 if (check_last_block)
4832 {
fefa31b5 4833 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8879d71c 4834 rtx_insn *insn = BB_END (bb);
6de9cd9a
DN
4835
4836 /* Back up past insns that must be kept in the same block as a call. */
4837 while (insn != BB_HEAD (bb)
4838 && keep_with_call_p (insn))
4839 insn = PREV_INSN (insn);
4840
4841 if (need_fake_edge_p (insn))
4842 {
4843 edge e;
4844
fefa31b5 4845 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
9ff3d2de
JL
4846 if (e)
4847 {
c41c1387 4848 insert_insn_on_edge (gen_use (const0_rtx), e);
9ff3d2de
JL
4849 commit_edge_insertions ();
4850 }
6de9cd9a
DN
4851 }
4852 }
4853
4854 /* Now add fake edges to the function exit for any non constant
4855 calls since there is no way that we can determine if they will
4856 return or not... */
4857
24bd1a0b 4858 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
6de9cd9a 4859 {
06e28de2 4860 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8879d71c
DM
4861 rtx_insn *insn;
4862 rtx_insn *prev_insn;
6de9cd9a
DN
4863
4864 if (!bb)
4865 continue;
4866
d7c028c0 4867 if (blocks && !bitmap_bit_p (blocks, i))
6de9cd9a
DN
4868 continue;
4869
4870 for (insn = BB_END (bb); ; insn = prev_insn)
4871 {
4872 prev_insn = PREV_INSN (insn);
4873 if (need_fake_edge_p (insn))
4874 {
4875 edge e;
8879d71c 4876 rtx_insn *split_at_insn = insn;
6de9cd9a
DN
4877
4878 /* Don't split the block between a call and an insn that should
c22cacf3 4879 remain in the same block as the call. */
4b4bf941 4880 if (CALL_P (insn))
6de9cd9a
DN
4881 while (split_at_insn != BB_END (bb)
4882 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4883 split_at_insn = NEXT_INSN (split_at_insn);
4884
4885 /* The handling above of the final block before the epilogue
c22cacf3 4886 should be enough to verify that there is no edge to the exit
6de9cd9a
DN
4887 block in CFG already. Calling make_edge in such case would
4888 cause us to mark that edge as fake and remove it later. */
4889
b2b29377 4890 if (flag_checking && split_at_insn == BB_END (bb))
628f6a4e 4891 {
fefa31b5 4892 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
9ff3d2de 4893 gcc_assert (e == NULL);
628f6a4e 4894 }
6de9cd9a
DN
4895
4896 /* Note that the following may create a new basic block
4897 and renumber the existing basic blocks. */
4898 if (split_at_insn != BB_END (bb))
4899 {
4900 e = split_block (bb, split_at_insn);
4901 if (e)
4902 blocks_split++;
4903 }
4904
36d95aa8
JH
4905 edge ne = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
4906 ne->probability = profile_probability::guessed_never ();
4907 ne->count = profile_count::guessed_zero ();
6de9cd9a
DN
4908 }
4909
4910 if (insn == BB_HEAD (bb))
4911 break;
4912 }
4913 }
4914
4915 if (blocks_split)
4916 verify_flow_info ();
4917
4918 return blocks_split;
4919}
4920
1cb7dfc3 4921/* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
315682fb 4922 the conditional branch target, SECOND_HEAD should be the fall-thru
1cb7dfc3
MH
4923 there is no need to handle this here the loop versioning code handles
4924 this. the reason for SECON_HEAD is that it is needed for condition
4925 in trees, and this should be of the same type since it is a hook. */
4926static void
4927rtl_lv_add_condition_to_bb (basic_block first_head ,
c22cacf3
MS
4928 basic_block second_head ATTRIBUTE_UNUSED,
4929 basic_block cond_bb, void *comp_rtx)
1cb7dfc3 4930{
1476d1bd 4931 rtx_code_label *label;
8879d71c 4932 rtx_insn *seq, *jump;
1cb7dfc3
MH
4933 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4934 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4935 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
ef4bddc2 4936 machine_mode mode;
1cb7dfc3
MH
4937
4938
4939 label = block_label (first_head);
4940 mode = GET_MODE (op0);
4941 if (mode == VOIDmode)
4942 mode = GET_MODE (op1);
4943
4944 start_sequence ();
4945 op0 = force_operand (op0, NULL_RTX);
4946 op1 = force_operand (op1, NULL_RTX);
357067f2
JH
4947 do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label,
4948 profile_probability::uninitialized ());
1cb7dfc3
MH
4949 jump = get_last_insn ();
4950 JUMP_LABEL (jump) = label;
4951 LABEL_NUSES (label)++;
4952 seq = get_insns ();
4953 end_sequence ();
4954
2b9c63a2 4955 /* Add the new cond, in the new head. */
c3284718 4956 emit_insn_after (seq, BB_END (cond_bb));
1cb7dfc3
MH
4957}
4958
4959
4960/* Given a block B with unconditional branch at its end, get the
4961 store the return the branch edge and the fall-thru edge in
4962 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4963static void
4964rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4965 edge *fallthru_edge)
4966{
4967 edge e = EDGE_SUCC (b, 0);
4968
4969 if (e->flags & EDGE_FALLTHRU)
4970 {
4971 *fallthru_edge = e;
4972 *branch_edge = EDGE_SUCC (b, 1);
4973 }
4974 else
4975 {
4976 *branch_edge = e;
4977 *fallthru_edge = EDGE_SUCC (b, 1);
4978 }
4979}
4980
5e2d947c
JH
4981void
4982init_rtl_bb_info (basic_block bb)
4983{
bcc708fc
MM
4984 gcc_assert (!bb->il.x.rtl);
4985 bb->il.x.head_ = NULL;
766090c2 4986 bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5e2d947c
JH
4987}
4988
14fa2cc0
ZD
4989/* Returns true if it is possible to remove edge E by redirecting
4990 it to the destination of the other edge from E->src. */
4991
4992static bool
9678086d 4993rtl_can_remove_branch_p (const_edge e)
14fa2cc0 4994{
9678086d
KG
4995 const_basic_block src = e->src;
4996 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
8879d71c
DM
4997 const rtx_insn *insn = BB_END (src);
4998 rtx set;
14fa2cc0
ZD
4999
5000 /* The conditions are taken from try_redirect_by_replacing_jump. */
fefa31b5 5001 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
14fa2cc0
ZD
5002 return false;
5003
5004 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
5005 return false;
5006
3371a64f 5007 if (BB_PARTITION (src) != BB_PARTITION (target))
14fa2cc0
ZD
5008 return false;
5009
5010 if (!onlyjump_p (insn)
5011 || tablejump_p (insn, NULL, NULL))
5012 return false;
5013
5014 set = single_set (insn);
5015 if (!set || side_effects_p (set))
5016 return false;
5017
5018 return true;
5019}
5020
ffe14686
AM
5021static basic_block
5022rtl_duplicate_bb (basic_block bb)
5023{
5024 bb = cfg_layout_duplicate_bb (bb);
5025 bb->aux = NULL;
5026 return bb;
5027}
5028
aa4723d7
SB
5029/* Do book-keeping of basic block BB for the profile consistency checker.
5030 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
5031 then do post-pass accounting. Store the counting in RECORD. */
5032static void
5033rtl_account_profile_record (basic_block bb, int after_pass,
5034 struct profile_record *record)
5035{
8879d71c 5036 rtx_insn *insn;
aa4723d7
SB
5037 FOR_BB_INSNS (bb, insn)
5038 if (INSN_P (insn))
5039 {
5040 record->size[after_pass]
5041 += insn_rtx_cost (PATTERN (insn), false);
3995f3a2 5042 if (bb->count.initialized_p ())
aa4723d7 5043 record->time[after_pass]
3995f3a2 5044 += insn_rtx_cost (PATTERN (insn), true) * bb->count.to_gcov_type ();
0a6a6ac9 5045 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
aa4723d7
SB
5046 record->time[after_pass]
5047 += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
5048 }
5049}
5050
9ee634e3
JH
5051/* Implementation of CFG manipulation for linearized RTL. */
5052struct cfg_hooks rtl_cfg_hooks = {
f470c378 5053 "rtl",
9ee634e3 5054 rtl_verify_flow_info,
10e9fecc 5055 rtl_dump_bb,
2c895bd1 5056 rtl_dump_bb_for_graph,
bc35512f 5057 rtl_create_basic_block,
9ee634e3
JH
5058 rtl_redirect_edge_and_branch,
5059 rtl_redirect_edge_and_branch_force,
14fa2cc0 5060 rtl_can_remove_branch_p,
9ee634e3
JH
5061 rtl_delete_block,
5062 rtl_split_block,
f470c378 5063 rtl_move_block_after,
bc35512f
JH
5064 rtl_can_merge_blocks, /* can_merge_blocks_p */
5065 rtl_merge_blocks,
6de9cd9a
DN
5066 rtl_predict_edge,
5067 rtl_predicted_by_p,
ffe14686
AM
5068 cfg_layout_can_duplicate_bb_p,
5069 rtl_duplicate_bb,
f470c378
ZD
5070 rtl_split_edge,
5071 rtl_make_forwarder_block,
6de9cd9a 5072 rtl_tidy_fallthru_edge,
cf103ca4 5073 rtl_force_nonfallthru,
6de9cd9a
DN
5074 rtl_block_ends_with_call_p,
5075 rtl_block_ends_with_condjump_p,
d9d4706f
KH
5076 rtl_flow_call_edges_add,
5077 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
5078 NULL, /* execute_on_shrinking_pred */
5079 NULL, /* duplicate loop for trees */
5080 NULL, /* lv_add_condition_to_bb */
5081 NULL, /* lv_adjust_loop_header_phi*/
5082 NULL, /* extract_cond_bb_edges */
df92c640
SB
5083 NULL, /* flush_pending_stmts */
5084 rtl_block_empty_p, /* block_empty_p */
5085 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
aa4723d7 5086 rtl_account_profile_record,
9ee634e3
JH
5087};
5088
5089/* Implementation of CFG manipulation for cfg layout RTL, where
5090 basic block connected via fallthru edges does not have to be adjacent.
5091 This representation will hopefully become the default one in future
5092 version of the compiler. */
6de9cd9a 5093
9ee634e3 5094struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
f470c378 5095 "cfglayout mode",
bc35512f 5096 rtl_verify_flow_info_1,
10e9fecc 5097 rtl_dump_bb,
2c895bd1 5098 rtl_dump_bb_for_graph,
bc35512f 5099 cfg_layout_create_basic_block,
9ee634e3
JH
5100 cfg_layout_redirect_edge_and_branch,
5101 cfg_layout_redirect_edge_and_branch_force,
14fa2cc0 5102 rtl_can_remove_branch_p,
9ee634e3
JH
5103 cfg_layout_delete_block,
5104 cfg_layout_split_block,
f470c378 5105 rtl_move_block_after,
bc35512f
JH
5106 cfg_layout_can_merge_blocks_p,
5107 cfg_layout_merge_blocks,
6de9cd9a
DN
5108 rtl_predict_edge,
5109 rtl_predicted_by_p,
5110 cfg_layout_can_duplicate_bb_p,
5111 cfg_layout_duplicate_bb,
f470c378
ZD
5112 cfg_layout_split_edge,
5113 rtl_make_forwarder_block,
cf103ca4
EB
5114 NULL, /* tidy_fallthru_edge */
5115 rtl_force_nonfallthru,
6de9cd9a
DN
5116 rtl_block_ends_with_call_p,
5117 rtl_block_ends_with_condjump_p,
d9d4706f
KH
5118 rtl_flow_call_edges_add,
5119 NULL, /* execute_on_growing_pred */
1cb7dfc3
MH
5120 NULL, /* execute_on_shrinking_pred */
5121 duplicate_loop_to_header_edge, /* duplicate loop for trees */
5122 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5123 NULL, /* lv_adjust_loop_header_phi*/
5124 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
df92c640
SB
5125 NULL, /* flush_pending_stmts */
5126 rtl_block_empty_p, /* block_empty_p */
5127 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
aa4723d7 5128 rtl_account_profile_record,
9ee634e3 5129};
78bde837
SB
5130
5131#include "gt-cfgrtl.h"