]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgrtl.c
Eliminate FOR_ALL_BB macro.
[thirdparty/gcc.git] / gcc / cfgrtl.c
CommitLineData
b36d64df 1/* Control flow graph manipulation code for GNU compiler.
711789cc 2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
b36d64df 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
b36d64df 9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
b36d64df 19
5cc577b6 20/* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
b36d64df 22
23 Available functionality:
c60fa3a7 24 - Basic CFG/RTL manipulation API documented in cfghooks.h
5cc577b6 25 - CFG-aware instruction chain manipulation
b36d64df 26 delete_insn, delete_insn_chain
c60fa3a7 27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
54f21e20 31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
c60fa3a7 33
34 Functions not supposed for generic use:
5cc577b6 35 - Infrastructure to determine quickly basic block for insn
b36d64df 36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
5cc577b6 37 - Edge redirection with updating and optimizing of insn chain
c60fa3a7 38 block_label, tidy_fallthru_edge, force_nonfallthru */
b36d64df 39\f
40#include "config.h"
41#include "system.h"
805e22b2 42#include "coretypes.h"
43#include "tm.h"
b36d64df 44#include "tree.h"
b36d64df 45#include "hard-reg-set.h"
46#include "basic-block.h"
aa78dca5 47#include "bb-reorder.h"
b36d64df 48#include "regs.h"
49#include "flags.h"
b36d64df 50#include "function.h"
51#include "except.h"
d7091a76 52#include "rtl-error.h"
b36d64df 53#include "tm_p.h"
54#include "obstack.h"
225e5f6f 55#include "insn-attr.h"
26fb1781 56#include "insn-config.h"
804e497b 57#include "expr.h"
065efcb1 58#include "target.h"
218e3e4e 59#include "common/common-target.h"
c50ae675 60#include "cfgloop.h"
e0dde8f8 61#include "ggc.h"
77fce4cd 62#include "tree-pass.h"
3072d30e 63#include "df.h"
b36d64df 64
23a070f3 65/* Holds the interesting leading and trailing notes for the function.
66 Only applicable if the CFG is in cfglayout mode. */
67static GTY(()) rtx cfg_layout_function_footer;
68static GTY(()) rtx cfg_layout_function_header;
69
70static rtx skip_insns_after_block (basic_block);
71static void record_effective_endpoints (void);
72static rtx label_for_bb (basic_block);
73static void fixup_reorder_chain (void);
74
75void verify_insn_chain (void);
76static void fixup_fallthru_exit_predecessor (void);
5493cb9a 77static int can_delete_note_p (const_rtx);
78static int can_delete_label_p (const_rtx);
4c9e08a4 79static basic_block rtl_split_edge (edge);
5f5d4cd1 80static bool rtl_move_block_after (basic_block, basic_block);
4c9e08a4 81static int rtl_verify_flow_info (void);
5f5d4cd1 82static basic_block cfg_layout_split_block (basic_block, void *);
4ee9c684 83static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
4c9e08a4 84static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
85static void cfg_layout_delete_block (basic_block);
86static void rtl_delete_block (basic_block);
87static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
4ee9c684 88static edge rtl_redirect_edge_and_branch (edge, basic_block);
5f5d4cd1 89static basic_block rtl_split_block (basic_block, void *);
5147ec07 90static void rtl_dump_bb (FILE *, basic_block, int, int);
4c9e08a4 91static int rtl_verify_flow_info_1 (void);
5f5d4cd1 92static void rtl_make_forwarder_block (edge);
b36d64df 93\f
94/* Return true if NOTE is not one of the ones that must be kept paired,
5cc577b6 95 so that we may simply delete it. */
b36d64df 96
97static int
5493cb9a 98can_delete_note_p (const_rtx note)
b36d64df 99{
25e880b1 100 switch (NOTE_KIND (note))
101 {
102 case NOTE_INSN_DELETED:
103 case NOTE_INSN_BASIC_BLOCK:
104 case NOTE_INSN_EPILOGUE_BEG:
105 return true;
106
107 default:
108 return false;
109 }
b36d64df 110}
111
112/* True if a given label can be deleted. */
113
114static int
5493cb9a 115can_delete_label_p (const_rtx label)
b36d64df 116{
5cc577b6 117 return (!LABEL_PRESERVE_P (label)
118 /* User declared labels must be preserved. */
119 && LABEL_NAME (label) == 0
2f138c1c 120 && !in_expr_list_p (forced_labels, label));
b36d64df 121}
122
a2b85e40 123/* Delete INSN by patching it out. */
b36d64df 124
a2b85e40 125void
4c9e08a4 126delete_insn (rtx insn)
b36d64df 127{
b36d64df 128 rtx note;
129 bool really_delete = true;
130
6d7dc5b9 131 if (LABEL_P (insn))
b36d64df 132 {
133 /* Some labels can't be directly removed from the INSN chain, as they
a0c938f0 134 might be references via variables, constant pool etc.
135 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
b36d64df 136 if (! can_delete_label_p (insn))
137 {
138 const char *name = LABEL_NAME (insn);
6de7acd5 139 basic_block bb = BLOCK_FOR_INSN (insn);
a2b85e40 140 rtx bb_note = NEXT_INSN (insn);
b36d64df 141
142 really_delete = false;
143 PUT_CODE (insn, NOTE);
ad4583d9 144 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
7bd3dcc4 145 NOTE_DELETED_LABEL_NAME (insn) = name;
a2b85e40 146
fb848efb 147 /* If the note following the label starts a basic block, and the
6de7acd5 148 label is a member of the same basic block, interchange the two. */
fb848efb 149 if (bb_note != NULL_RTX
150 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
6de7acd5 151 && bb != NULL
152 && bb == BLOCK_FOR_INSN (bb_note))
a2b85e40 153 {
154 reorder_insns_nobb (insn, insn, bb_note);
155 BB_HEAD (bb) = bb_note;
156 if (BB_END (bb) == bb_note)
157 BB_END (bb) = insn;
158 }
b36d64df 159 }
5cc577b6 160
b36d64df 161 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
162 }
163
164 if (really_delete)
165 {
e172357f 166 /* If this insn has already been deleted, something is very wrong. */
cc636d56 167 gcc_assert (!INSN_DELETED_P (insn));
93ff53d3 168 if (INSN_P (insn))
169 df_insn_delete (insn);
b36d64df 170 remove_insn (insn);
171 INSN_DELETED_P (insn) = 1;
172 }
173
174 /* If deleting a jump, decrement the use count of the label. Deleting
175 the label itself should happen in the normal course of block merging. */
19d2fe05 176 if (JUMP_P (insn))
242dafee 177 {
19d2fe05 178 if (JUMP_LABEL (insn)
179 && LABEL_P (JUMP_LABEL (insn)))
180 LABEL_NUSES (JUMP_LABEL (insn))--;
181
182 /* If there are more targets, remove them too. */
183 while ((note
184 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
6d7dc5b9 185 && LABEL_P (XEXP (note, 0)))
242dafee 186 {
187 LABEL_NUSES (XEXP (note, 0))--;
188 remove_note (insn, note);
189 }
190 }
b36d64df 191
19d2fe05 192 /* Also if deleting any insn that references a label as an operand. */
193 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
194 && LABEL_P (XEXP (note, 0)))
195 {
196 LABEL_NUSES (XEXP (note, 0))--;
197 remove_note (insn, note);
198 }
199
971ba038 200 if (JUMP_TABLE_DATA_P (insn))
b36d64df 201 {
202 rtx pat = PATTERN (insn);
203 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
204 int len = XVECLEN (pat, diff_vec_p);
205 int i;
206
207 for (i = 0; i < len; i++)
fd0d7f8c 208 {
209 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
210
211 /* When deleting code in bulk (e.g. removing many unreachable
212 blocks) we can delete a label that's a target of the vector
213 before deleting the vector itself. */
6d7dc5b9 214 if (!NOTE_P (label))
fd0d7f8c 215 LABEL_NUSES (label)--;
216 }
b36d64df 217 }
b36d64df 218}
219
fb20d6fa 220/* Like delete_insn but also purge dead edges from BB. */
ebc94641 221
a2b85e40 222void
4c9e08a4 223delete_insn_and_edges (rtx insn)
fb20d6fa 224{
fb20d6fa 225 bool purge = false;
226
ab87d1bc 227 if (INSN_P (insn)
fb20d6fa 228 && BLOCK_FOR_INSN (insn)
5496dbfc 229 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
fb20d6fa 230 purge = true;
a2b85e40 231 delete_insn (insn);
fb20d6fa 232 if (purge)
233 purge_dead_edges (BLOCK_FOR_INSN (insn));
fb20d6fa 234}
235
b36d64df 236/* Unlink a chain of insns between START and FINISH, leaving notes
39257eb5 237 that must be paired. If CLEAR_BB is true, we set bb field for
238 insns that cannot be removed to NULL. */
b36d64df 239
240void
39257eb5 241delete_insn_chain (rtx start, rtx finish, bool clear_bb)
b36d64df 242{
a2b85e40 243 rtx prev, current;
b36d64df 244
5cc577b6 245 /* Unchain the insns one by one. It would be quicker to delete all of these
246 with a single unchaining, rather than one at a time, but we need to keep
247 the NOTE's. */
a2b85e40 248 current = finish;
b36d64df 249 while (1)
250 {
a2b85e40 251 prev = PREV_INSN (current);
252 if (NOTE_P (current) && !can_delete_note_p (current))
b36d64df 253 ;
254 else
a2b85e40 255 delete_insn (current);
b36d64df 256
a2b85e40 257 if (clear_bb && !INSN_DELETED_P (current))
258 set_block_for_insn (current, NULL);
39257eb5 259
a2b85e40 260 if (current == start)
b36d64df 261 break;
a2b85e40 262 current = prev;
b36d64df 263 }
264}
265\f
5cc577b6 266/* Create a new basic block consisting of the instructions between HEAD and END
267 inclusive. This function is designed to allow fast BB construction - reuses
268 the note and basic block struct in BB_NOTE, if any and do not grow
269 BASIC_BLOCK chain and should be used directly only by CFG construction code.
270 END can be NULL in to create new empty basic block before HEAD. Both END
7fa55aef 271 and HEAD can be NULL to create basic block at the end of INSN chain.
272 AFTER is the basic block we should be put after. */
b36d64df 273
274basic_block
4c9e08a4 275create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
b36d64df 276{
277 basic_block bb;
278
279 if (bb_note
b36d64df 280 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
281 && bb->aux == NULL)
282 {
283 /* If we found an existing note, thread it back onto the chain. */
284
285 rtx after;
286
6d7dc5b9 287 if (LABEL_P (head))
b36d64df 288 after = head;
289 else
290 {
291 after = PREV_INSN (head);
292 head = bb_note;
293 }
294
295 if (after != bb_note && NEXT_INSN (after) != bb_note)
ab87d1bc 296 reorder_insns_nobb (bb_note, bb_note, after);
b36d64df 297 }
298 else
299 {
300 /* Otherwise we must create a note and a basic block structure. */
301
302 bb = alloc_block ();
303
e0dde8f8 304 init_rtl_bb_info (bb);
b36d64df 305 if (!head && !end)
5cc577b6 306 head = end = bb_note
307 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
6d7dc5b9 308 else if (LABEL_P (head) && end)
b36d64df 309 {
310 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
311 if (head == end)
312 end = bb_note;
313 }
314 else
315 {
316 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
317 head = bb_note;
318 if (!end)
319 end = head;
320 }
5cc577b6 321
b36d64df 322 NOTE_BASIC_BLOCK (bb_note) = bb;
323 }
324
325 /* Always include the bb note in the block. */
326 if (NEXT_INSN (end) == bb_note)
327 end = bb_note;
328
5496dbfc 329 BB_HEAD (bb) = head;
330 BB_END (bb) = end;
fe672ac0 331 bb->index = last_basic_block_for_fn (cfun)++;
e0dde8f8 332 bb->flags = BB_NEW | BB_RTL;
7fa55aef 333 link_block (bb, after);
f64d2ca4 334 SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
3072d30e 335 df_bb_refs_record (bb->index, false);
ab87d1bc 336 update_bb_for_insn (bb);
7562ed74 337 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
b36d64df 338
339 /* Tag the block so that we know it has been used when considering
340 other basic block notes. */
341 bb->aux = bb;
342
343 return bb;
344}
345
5cc577b6 346/* Create new basic block consisting of instructions in between HEAD and END
0a55d497 347 and place it to the BB chain after block AFTER. END can be NULL to
348 create a new empty basic block before HEAD. Both END and HEAD can be
349 NULL to create basic block at the end of INSN chain. */
b36d64df 350
c60fa3a7 351static basic_block
352rtl_create_basic_block (void *headp, void *endp, basic_block after)
b36d64df 353{
f780cc25 354 rtx head = (rtx) headp, end = (rtx) endp;
b36d64df 355 basic_block bb;
b3d6de89 356
743ff5bb 357 /* Grow the basic block array if needed. */
fe672ac0 358 if ((size_t) last_basic_block_for_fn (cfun)
359 >= basic_block_info_for_fn (cfun)->length ())
743ff5bb 360 {
fe672ac0 361 size_t new_size =
362 (last_basic_block_for_fn (cfun)
363 + (last_basic_block_for_fn (cfun) + 3) / 4);
9ca41bd1 364 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
743ff5bb 365 }
b3d6de89 366
a28770e1 367 n_basic_blocks_for_fn (cfun)++;
b36d64df 368
f23d9a22 369 bb = create_basic_block_structure (head, end, NULL, after);
b36d64df 370 bb->aux = NULL;
371 return bb;
372}
c60fa3a7 373
374static basic_block
375cfg_layout_create_basic_block (void *head, void *end, basic_block after)
376{
377 basic_block newbb = rtl_create_basic_block (head, end, after);
378
c60fa3a7 379 return newbb;
380}
b36d64df 381\f
382/* Delete the insns in a (non-live) block. We physically delete every
383 non-deleted-note insn, and update the flow graph appropriately.
384
385 Return nonzero if we deleted an exception handler. */
386
387/* ??? Preserving all such notes strikes me as wrong. It would be nice
388 to post-process the stream to remove empty blocks, loops, ranges, etc. */
389
5f704f9f 390static void
4c9e08a4 391rtl_delete_block (basic_block b)
b36d64df 392{
5d65a39c 393 rtx insn, end;
b36d64df 394
395 /* If the head of this block is a CODE_LABEL, then it might be the
57548910 396 label for an exception handler which can't be reached. We need
397 to remove the label from the exception_handler_label list. */
5496dbfc 398 insn = BB_HEAD (b);
b36d64df 399
5d65a39c 400 end = get_last_bb_insn (b);
b36d64df 401
402 /* Selectively delete the entire chain. */
5496dbfc 403 BB_HEAD (b) = NULL;
39257eb5 404 delete_insn_chain (insn, end, true);
405
3072d30e 406
407 if (dump_file)
408 fprintf (dump_file, "deleting block %d\n", b->index);
409 df_bb_delete (b->index);
b36d64df 410}
411\f
f23d9a22 412/* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
b36d64df 413
414void
4c9e08a4 415compute_bb_for_insn (void)
b36d64df 416{
4c26117a 417 basic_block bb;
b36d64df 418
fc00614f 419 FOR_EACH_BB_FN (bb, cfun)
b36d64df 420 {
5496dbfc 421 rtx end = BB_END (bb);
5cc577b6 422 rtx insn;
b36d64df 423
5496dbfc 424 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
b36d64df 425 {
ab87d1bc 426 BLOCK_FOR_INSN (insn) = bb;
b36d64df 427 if (insn == end)
428 break;
b36d64df 429 }
430 }
431}
432
433/* Release the basic_block_for_insn array. */
434
2a1990e9 435unsigned int
4c9e08a4 436free_bb_for_insn (void)
b36d64df 437{
ab87d1bc 438 rtx insn;
439 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6d7dc5b9 440 if (!BARRIER_P (insn))
ab87d1bc 441 BLOCK_FOR_INSN (insn) = NULL;
2a1990e9 442 return 0;
b36d64df 443}
444
225e5f6f 445static unsigned int
446rest_of_pass_free_cfg (void)
447{
448#ifdef DELAY_SLOTS
449 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
450 valid at that point so it would be too late to call df_analyze. */
451 if (optimize > 0 && flag_delayed_branch)
db8f6977 452 {
453 df_note_add_problem ();
454 df_analyze ();
455 }
225e5f6f 456#endif
457
aa78dca5 458 if (crtl->has_bb_partition)
459 insert_section_boundary_note ();
460
225e5f6f 461 free_bb_for_insn ();
462 return 0;
463}
464
cbe8bda8 465namespace {
466
467const pass_data pass_data_free_cfg =
468{
469 RTL_PASS, /* type */
470 "*free_cfg", /* name */
471 OPTGROUP_NONE, /* optinfo_flags */
472 false, /* has_gate */
473 true, /* has_execute */
474 TV_NONE, /* tv_id */
475 0, /* properties_required */
476 0, /* properties_provided */
477 PROP_cfg, /* properties_destroyed */
478 0, /* todo_flags_start */
479 0, /* todo_flags_finish */
77fce4cd 480};
481
cbe8bda8 482class pass_free_cfg : public rtl_opt_pass
483{
484public:
9af5ce0c 485 pass_free_cfg (gcc::context *ctxt)
486 : rtl_opt_pass (pass_data_free_cfg, ctxt)
cbe8bda8 487 {}
488
489 /* opt_pass methods: */
490 unsigned int execute () { return rest_of_pass_free_cfg (); }
491
492}; // class pass_free_cfg
493
494} // anon namespace
495
496rtl_opt_pass *
497make_pass_free_cfg (gcc::context *ctxt)
498{
499 return new pass_free_cfg (ctxt);
500}
501
a0fee14a 502/* Return RTX to emit after when we want to emit code on the entry of function. */
503rtx
504entry_of_function (void)
505{
a28770e1 506 return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
34154e27 507 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
a0fee14a 508}
509
aa02fa19 510/* Emit INSN at the entry point of the function, ensuring that it is only
511 executed once per function. */
512void
513emit_insn_at_entry (rtx insn)
514{
34154e27 515 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
aa02fa19 516 edge e = ei_safe_edge (ei);
1db61145 517 gcc_assert (e->flags & EDGE_FALLTHRU);
aa02fa19 518
519 insert_insn_on_edge (insn, e);
520 commit_edge_insertions ();
521}
522
f41d4683 523/* Update BLOCK_FOR_INSN of insns between BEGIN and END
48e1416a 524 (or BARRIER if found) and notify df of the bb change.
f41d4683 525 The insn chain range is inclusive
526 (i.e. both BEGIN and END will be updated. */
b36d64df 527
f41d4683 528static void
529update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
b36d64df 530{
531 rtx insn;
532
f41d4683 533 end = NEXT_INSN (end);
534 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
a2bdd643 535 if (!BARRIER_P (insn))
536 df_insn_change_bb (insn, bb);
b36d64df 537}
f41d4683 538
539/* Update BLOCK_FOR_INSN of insns in BB to BB,
540 and notify df of the change. */
541
542void
543update_bb_for_insn (basic_block bb)
544{
545 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
546}
547
4a020a8c 548\f
549/* Like active_insn_p, except keep the return value clobber around
550 even after reload. */
551
552static bool
553flow_active_insn_p (const_rtx insn)
554{
555 if (active_insn_p (insn))
556 return true;
557
558 /* A clobber of the function return value exists for buggy
559 programs that fail to return a value. Its effect is to
560 keep the return value from being live across the entire
561 function. If we allow it to be skipped, we introduce the
562 possibility for register lifetime confusion. */
563 if (GET_CODE (PATTERN (insn)) == CLOBBER
564 && REG_P (XEXP (PATTERN (insn), 0))
565 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
566 return true;
567
568 return false;
569}
570
571/* Return true if the block has no effect and only forwards control flow to
572 its single destination. */
4a020a8c 573
574bool
bbdfcf34 575contains_no_active_insn_p (const_basic_block bb)
4a020a8c 576{
577 rtx insn;
578
34154e27 579 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4a020a8c 580 || !single_succ_p (bb))
581 return false;
582
bbdfcf34 583 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
584 if (INSN_P (insn) && flow_active_insn_p (insn))
585 return false;
586
587 return (!INSN_P (insn)
588 || (JUMP_P (insn) && simplejump_p (insn))
589 || !flow_active_insn_p (insn));
590}
591
592/* Likewise, but protect loop latches, headers and preheaders. */
593/* FIXME: Make this a cfg hook. */
594
595bool
596forwarder_block_p (const_basic_block bb)
597{
598 if (!contains_no_active_insn_p (bb))
599 return false;
600
4a020a8c 601 /* Protect loop latches, headers and preheaders. */
602 if (current_loops)
603 {
604 basic_block dest;
605 if (bb->loop_father->header == bb)
606 return false;
607 dest = EDGE_SUCC (bb, 0)->dest;
608 if (dest->loop_father->header == dest)
609 return false;
610 }
611
bbdfcf34 612 return true;
4a020a8c 613}
614
615/* Return nonzero if we can reach target from src by falling through. */
cd316116 616/* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
4a020a8c 617
618bool
619can_fallthru (basic_block src, basic_block target)
620{
621 rtx insn = BB_END (src);
622 rtx insn2;
623 edge e;
624 edge_iterator ei;
625
34154e27 626 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
4a020a8c 627 return true;
628 if (src->next_bb != target)
cd316116 629 return false;
630
631 /* ??? Later we may add code to move jump tables offline. */
632 if (tablejump_p (insn, NULL, NULL))
633 return false;
634
4a020a8c 635 FOR_EACH_EDGE (e, ei, src->succs)
34154e27 636 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
4a020a8c 637 && e->flags & EDGE_FALLTHRU)
cd316116 638 return false;
4a020a8c 639
640 insn2 = BB_HEAD (target);
cd316116 641 if (!active_insn_p (insn2))
4a020a8c 642 insn2 = next_active_insn (insn2);
643
4a020a8c 644 return next_active_insn (insn) == insn2;
645}
646
647/* Return nonzero if we could reach target from src by falling through,
648 if the target was made adjacent. If we already have a fall-through
649 edge to the exit block, we can't do that. */
650static bool
651could_fall_through (basic_block src, basic_block target)
652{
653 edge e;
654 edge_iterator ei;
655
34154e27 656 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
4a020a8c 657 return true;
658 FOR_EACH_EDGE (e, ei, src->succs)
34154e27 659 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
4a020a8c 660 && e->flags & EDGE_FALLTHRU)
661 return 0;
662 return true;
663}
b36d64df 664\f
018f0595 665/* Return the NOTE_INSN_BASIC_BLOCK of BB. */
666rtx
667bb_note (basic_block bb)
668{
669 rtx note;
670
671 note = BB_HEAD (bb);
672 if (LABEL_P (note))
673 note = NEXT_INSN (note);
674
675 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
676 return note;
677}
678
3072d30e 679/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
680 note associated with the BLOCK. */
681
682static rtx
683first_insn_after_basic_block_note (basic_block block)
684{
685 rtx insn;
686
687 /* Get the first instruction in the block. */
688 insn = BB_HEAD (block);
689
690 if (insn == NULL_RTX)
691 return NULL_RTX;
692 if (LABEL_P (insn))
693 insn = NEXT_INSN (insn);
694 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
695
696 return NEXT_INSN (insn);
697}
698
5f5d4cd1 699/* Creates a new basic block just after basic block B by splitting
700 everything after specified instruction I. */
b36d64df 701
5f5d4cd1 702static basic_block
4c9e08a4 703rtl_split_block (basic_block bb, void *insnp)
b36d64df 704{
705 basic_block new_bb;
f780cc25 706 rtx insn = (rtx) insnp;
5f5d4cd1 707 edge e;
cd665a06 708 edge_iterator ei;
b36d64df 709
5f5d4cd1 710 if (!insn)
711 {
712 insn = first_insn_after_basic_block_note (bb);
713
714 if (insn)
9845d120 715 {
716 rtx next = insn;
717
718 insn = PREV_INSN (insn);
719
720 /* If the block contains only debug insns, insn would have
721 been NULL in a non-debug compilation, and then we'd end
722 up emitting a DELETED note. For -fcompare-debug
723 stability, emit the note too. */
724 if (insn != BB_END (bb)
725 && DEBUG_INSN_P (next)
726 && DEBUG_INSN_P (BB_END (bb)))
727 {
728 while (next != BB_END (bb) && DEBUG_INSN_P (next))
729 next = NEXT_INSN (next);
730
731 if (next == BB_END (bb))
732 emit_note_after (NOTE_INSN_DELETED, next);
733 }
734 }
5f5d4cd1 735 else
736 insn = get_last_insn ();
737 }
738
739 /* We probably should check type of the insn so that we do not create
740 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
741 bother. */
742 if (insn == BB_END (bb))
743 emit_note_after (NOTE_INSN_DELETED, insn);
b36d64df 744
745 /* Create the new basic block. */
5496dbfc 746 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
7562ed74 747 BB_COPY_PARTITION (new_bb, bb);
5496dbfc 748 BB_END (bb) = insn;
b36d64df 749
750 /* Redirect the outgoing edges. */
cd665a06 751 new_bb->succs = bb->succs;
752 bb->succs = NULL;
753 FOR_EACH_EDGE (e, ei, new_bb->succs)
b36d64df 754 e->src = new_bb;
755
3072d30e 756 /* The new block starts off being dirty. */
757 df_set_bb_dirty (bb);
5f5d4cd1 758 return new_bb;
c60fa3a7 759}
760
8ddad41d 761/* Return true if the single edge between blocks A and B is the only place
762 in RTL which holds some unique locus. */
763
764static bool
765unique_locus_on_edge_between_p (basic_block a, basic_block b)
766{
5169661d 767 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
8ddad41d 768 rtx insn, end;
769
8e7408e3 770 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
8ddad41d 771 return false;
772
773 /* First scan block A backward. */
774 insn = BB_END (a);
775 end = PREV_INSN (BB_HEAD (a));
5169661d 776 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
8ddad41d 777 insn = PREV_INSN (insn);
778
5169661d 779 if (insn != end && INSN_LOCATION (insn) == goto_locus)
8ddad41d 780 return false;
781
782 /* Then scan block B forward. */
783 insn = BB_HEAD (b);
784 if (insn)
785 {
786 end = NEXT_INSN (BB_END (b));
787 while (insn != end && !NONDEBUG_INSN_P (insn))
788 insn = NEXT_INSN (insn);
789
5169661d 790 if (insn != end && INSN_HAS_LOCATION (insn)
791 && INSN_LOCATION (insn) == goto_locus)
8ddad41d 792 return false;
793 }
794
795 return true;
796}
797
798/* If the single edge between blocks A and B is the only place in RTL which
799 holds some unique locus, emit a nop with that locus between the blocks. */
800
801static void
802emit_nop_for_unique_locus_between (basic_block a, basic_block b)
803{
804 if (!unique_locus_on_edge_between_p (a, b))
805 return;
806
807 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
5169661d 808 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
8ddad41d 809}
810
b36d64df 811/* Blocks A and B are to be merged into a single block A. The insns
c60fa3a7 812 are already contiguous. */
b36d64df 813
c60fa3a7 814static void
815rtl_merge_blocks (basic_block a, basic_block b)
b36d64df 816{
5496dbfc 817 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
b36d64df 818 rtx del_first = NULL_RTX, del_last = NULL_RTX;
9845d120 819 rtx b_debug_start = b_end, b_debug_end = b_end;
18b762f0 820 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
b36d64df 821 int b_empty = 0;
822
3072d30e 823 if (dump_file)
18b762f0 824 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
825 a->index);
3072d30e 826
9845d120 827 while (DEBUG_INSN_P (b_end))
828 b_end = PREV_INSN (b_debug_start = b_end);
829
b36d64df 830 /* If there was a CODE_LABEL beginning B, delete it. */
6d7dc5b9 831 if (LABEL_P (b_head))
b36d64df 832 {
833 /* Detect basic blocks with nothing but a label. This can happen
834 in particular at the end of a function. */
835 if (b_head == b_end)
836 b_empty = 1;
5cc577b6 837
b36d64df 838 del_first = del_last = b_head;
839 b_head = NEXT_INSN (b_head);
840 }
841
5cc577b6 842 /* Delete the basic block note and handle blocks containing just that
843 note. */
b36d64df 844 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
845 {
846 if (b_head == b_end)
847 b_empty = 1;
848 if (! del_last)
849 del_first = b_head;
5cc577b6 850
b36d64df 851 del_last = b_head;
852 b_head = NEXT_INSN (b_head);
853 }
854
855 /* If there was a jump out of A, delete it. */
6d7dc5b9 856 if (JUMP_P (a_end))
b36d64df 857 {
858 rtx prev;
859
860 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
6d7dc5b9 861 if (!NOTE_P (prev)
ad4583d9 862 || NOTE_INSN_BASIC_BLOCK_P (prev)
5496dbfc 863 || prev == BB_HEAD (a))
b36d64df 864 break;
865
866 del_first = a_end;
867
868#ifdef HAVE_cc0
869 /* If this was a conditional jump, we need to also delete
870 the insn that set cc0. */
871 if (only_sets_cc0_p (prev))
872 {
873 rtx tmp = prev;
5cc577b6 874
b36d64df 875 prev = prev_nonnote_insn (prev);
876 if (!prev)
5496dbfc 877 prev = BB_HEAD (a);
b36d64df 878 del_first = tmp;
879 }
880#endif
881
882 a_end = PREV_INSN (del_first);
883 }
6d7dc5b9 884 else if (BARRIER_P (NEXT_INSN (a_end)))
b36d64df 885 del_first = NEXT_INSN (a_end);
886
b36d64df 887 /* Delete everything marked above as well as crap that might be
888 hanging out between the two blocks. */
8ddad41d 889 BB_END (a) = a_end;
890 BB_HEAD (b) = b_empty ? NULL_RTX : b_head;
39257eb5 891 delete_insn_chain (del_first, del_last, true);
b36d64df 892
8ddad41d 893 /* When not optimizing CFG and the edge is the only place in RTL which holds
894 some unique locus, emit a nop with that locus in between. */
895 if (!optimize)
896 {
897 emit_nop_for_unique_locus_between (a, b);
898 a_end = BB_END (a);
899 }
900
b36d64df 901 /* Reassociate the insns of B with A. */
902 if (!b_empty)
903 {
9845d120 904 update_bb_for_insn_chain (a_end, b_debug_end, a);
5cc577b6 905
8ddad41d 906 BB_END (a) = b_debug_end;
907 BB_HEAD (b) = NULL_RTX;
9845d120 908 }
909 else if (b_end != b_debug_end)
910 {
911 /* Move any deleted labels and other notes between the end of A
912 and the debug insns that make up B after the debug insns,
913 bringing the debug insns into A while keeping the notes after
914 the end of A. */
915 if (NEXT_INSN (a_end) != b_debug_start)
916 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
917 b_debug_end);
918 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
8ddad41d 919 BB_END (a) = b_debug_end;
b36d64df 920 }
5cc577b6 921
3072d30e 922 df_bb_delete (b->index);
18b762f0 923
924 /* If B was a forwarder block, propagate the locus on the edge. */
81ad108c 925 if (forwarder_p
926 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
18b762f0 927 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
928
929 if (dump_file)
930 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
b36d64df 931}
c60fa3a7 932
3072d30e 933
c60fa3a7 934/* Return true when block A and B can be merged. */
5493cb9a 935
47aaf6e6 936static bool
937rtl_can_merge_blocks (basic_block a, basic_block b)
c60fa3a7 938{
4f18499c 939 /* If we are partitioning hot/cold basic blocks, we don't want to
940 mess up unconditional or indirect jumps that cross between hot
7562ed74 941 and cold sections.
942
1118aef7 943 Basic block partitioning may result in some jumps that appear to
a0c938f0 944 be optimizable (or blocks that appear to be mergeable), but which really
945 must be left untouched (they are required to make it safely across
946 partition boundaries). See the comments at the top of
1118aef7 947 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
7562ed74 948
1897b881 949 if (BB_PARTITION (a) != BB_PARTITION (b))
7562ed74 950 return false;
4f18499c 951
79f958cb 952 /* Protect the loop latches. */
953 if (current_loops && b->loop_father->latch == b)
954 return false;
955
c60fa3a7 956 /* There must be exactly one edge in between the blocks. */
ea091dfd 957 return (single_succ_p (a)
958 && single_succ (a) == b
959 && single_pred_p (b)
cd665a06 960 && a != b
c60fa3a7 961 /* Must be simple edge. */
ea091dfd 962 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
c60fa3a7 963 && a->next_bb == b
34154e27 964 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
965 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
c60fa3a7 966 /* If the jump insn has side effects,
967 we can't kill the edge. */
6d7dc5b9 968 && (!JUMP_P (BB_END (a))
18f811c9 969 || (reload_completed
5496dbfc 970 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
c60fa3a7 971}
b36d64df 972\f
5cc577b6 973/* Return the label in the head of basic block BLOCK. Create one if it doesn't
974 exist. */
b36d64df 975
976rtx
4c9e08a4 977block_label (basic_block block)
b36d64df 978{
34154e27 979 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
b36d64df 980 return NULL_RTX;
5cc577b6 981
6d7dc5b9 982 if (!LABEL_P (BB_HEAD (block)))
b36d64df 983 {
5496dbfc 984 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
b36d64df 985 }
5cc577b6 986
5496dbfc 987 return BB_HEAD (block);
b36d64df 988}
989
990/* Attempt to perform edge redirection by replacing possibly complex jump
5cc577b6 991 instruction by unconditional jump or removing jump completely. This can
992 apply only if all edges now point to the same block. The parameters and
993 return values are equivalent to redirect_edge_and_branch. */
b36d64df 994
4ee9c684 995edge
c60fa3a7 996try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
b36d64df 997{
998 basic_block src = e->src;
5496dbfc 999 rtx insn = BB_END (src), kill_from;
b19beda9 1000 rtx set;
b36d64df 1001 int fallthru = 0;
4f18499c 1002
1003 /* If we are partitioning hot/cold basic blocks, we don't want to
1004 mess up unconditional or indirect jumps that cross between hot
1118aef7 1005 and cold sections.
1006
1007 Basic block partitioning may result in some jumps that appear to
a0c938f0 1008 be optimizable (or blocks that appear to be mergeable), but which really
1009 must be left untouched (they are required to make it safely across
1010 partition boundaries). See the comments at the top of
1118aef7 1011 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
a0c938f0 1012
aa78dca5 1013 if (BB_PARTITION (src) != BB_PARTITION (target))
53f8294f 1014 return NULL;
4f18499c 1015
34f05bfa 1016 /* We can replace or remove a complex jump only when we have exactly
1017 two edges. Also, if we have exactly one outgoing edge, we can
1018 redirect that. */
1019 if (EDGE_COUNT (src->succs) >= 3
1020 /* Verify that all targets will be TARGET. Specifically, the
1021 edge that is not E must also go to TARGET. */
1022 || (EDGE_COUNT (src->succs) == 2
1023 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1024 return NULL;
5cc577b6 1025
34f05bfa 1026 if (!onlyjump_p (insn))
4ee9c684 1027 return NULL;
e5562ab8 1028 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
4ee9c684 1029 return NULL;
b36d64df 1030
1031 /* Avoid removing branch with side effects. */
1032 set = single_set (insn);
1033 if (!set || side_effects_p (set))
4ee9c684 1034 return NULL;
b36d64df 1035
1036 /* In case we zap a conditional jump, we'll need to kill
1037 the cc0 setter too. */
1038 kill_from = insn;
1039#ifdef HAVE_cc0
3d5a3e76 1040 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
1041 && only_sets_cc0_p (PREV_INSN (insn)))
b36d64df 1042 kill_from = PREV_INSN (insn);
1043#endif
1044
1045 /* See if we can create the fallthru edge. */
c60fa3a7 1046 if (in_cfglayout || can_fallthru (src, target))
b36d64df 1047 {
450d042a 1048 if (dump_file)
1049 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
b36d64df 1050 fallthru = 1;
1051
4a82352a 1052 /* Selectively unlink whole insn chain. */
c60fa3a7 1053 if (in_cfglayout)
1054 {
43e94e51 1055 rtx insn = BB_FOOTER (src);
c60fa3a7 1056
39257eb5 1057 delete_insn_chain (kill_from, BB_END (src), false);
c60fa3a7 1058
1059 /* Remove barriers but keep jumptables. */
1060 while (insn)
1061 {
6d7dc5b9 1062 if (BARRIER_P (insn))
c60fa3a7 1063 {
1064 if (PREV_INSN (insn))
1065 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1066 else
43e94e51 1067 BB_FOOTER (src) = NEXT_INSN (insn);
c60fa3a7 1068 if (NEXT_INSN (insn))
1069 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1070 }
6d7dc5b9 1071 if (LABEL_P (insn))
c60fa3a7 1072 break;
1073 insn = NEXT_INSN (insn);
1074 }
1075 }
1076 else
39257eb5 1077 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1078 false);
b36d64df 1079 }
5cc577b6 1080
b36d64df 1081 /* If this already is simplejump, redirect it. */
1082 else if (simplejump_p (insn))
1083 {
1084 if (e->dest == target)
4ee9c684 1085 return NULL;
450d042a 1086 if (dump_file)
1087 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
b3d6de89 1088 INSN_UID (insn), e->dest->index, target->index);
8963581a 1089 if (!redirect_jump (insn, block_label (target), 0))
1090 {
34154e27 1091 gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
cc636d56 1092 return NULL;
8963581a 1093 }
b36d64df 1094 }
5cc577b6 1095
8963581a 1096 /* Cannot do anything for target exit block. */
34154e27 1097 else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
4ee9c684 1098 return NULL;
8963581a 1099
b36d64df 1100 /* Or replace possibly complicated jump insn by simple jump insn. */
1101 else
1102 {
1103 rtx target_label = block_label (target);
669f9131 1104 rtx barrier, label, table;
b36d64df 1105
0891f67c 1106 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
5496dbfc 1107 JUMP_LABEL (BB_END (src)) = target_label;
b36d64df 1108 LABEL_NUSES (target_label)++;
450d042a 1109 if (dump_file)
1110 fprintf (dump_file, "Replacing insn %i by jump %i\n",
5496dbfc 1111 INSN_UID (insn), INSN_UID (BB_END (src)));
b36d64df 1112
1ed8ccdb 1113
201f6961 1114 delete_insn_chain (kill_from, insn, false);
1115
1ed8ccdb 1116 /* Recognize a tablejump that we are converting to a
1117 simple jump and remove its associated CODE_LABEL
1118 and ADDR_VEC or ADDR_DIFF_VEC. */
201f6961 1119 if (tablejump_p (insn, &label, &table))
1120 delete_insn_chain (label, table, false);
669f9131 1121
5496dbfc 1122 barrier = next_nonnote_insn (BB_END (src));
6d7dc5b9 1123 if (!barrier || !BARRIER_P (barrier))
5496dbfc 1124 emit_barrier_after (BB_END (src));
1824678b 1125 else
1126 {
5496dbfc 1127 if (barrier != NEXT_INSN (BB_END (src)))
1824678b 1128 {
1129 /* Move the jump before barrier so that the notes
1130 which originally were or were created before jump table are
1131 inside the basic block. */
5496dbfc 1132 rtx new_insn = BB_END (src);
1824678b 1133
f41d4683 1134 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1135 PREV_INSN (barrier), src);
1824678b 1136
1137 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1138 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1139
1140 NEXT_INSN (new_insn) = barrier;
1141 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1142
1143 PREV_INSN (new_insn) = PREV_INSN (barrier);
1144 PREV_INSN (barrier) = new_insn;
1145 }
1146 }
b36d64df 1147 }
1148
1149 /* Keep only one edge out and set proper flags. */
ea091dfd 1150 if (!single_succ_p (src))
cd665a06 1151 remove_edge (e);
ea091dfd 1152 gcc_assert (single_succ_p (src));
cd665a06 1153
ea091dfd 1154 e = single_succ_edge (src);
b36d64df 1155 if (fallthru)
1156 e->flags = EDGE_FALLTHRU;
1157 else
1158 e->flags = 0;
5cc577b6 1159
b36d64df 1160 e->probability = REG_BR_PROB_BASE;
1161 e->count = src->count;
1162
b36d64df 1163 if (e->dest != target)
1164 redirect_edge_succ (e, target);
4ee9c684 1165 return e;
b36d64df 1166}
1167
a8dd994c 1168/* Subroutine of redirect_branch_edge that tries to patch the jump
1169 instruction INSN so that it reaches block NEW. Do this
1170 only when it originally reached block OLD. Return true if this
1171 worked or the original target wasn't OLD, return false if redirection
1172 doesn't work. */
1173
1174static bool
1175patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
b36d64df 1176{
1177 rtx tmp;
b36d64df 1178 /* Recognize a tablejump and adjust all matching cases. */
b19beda9 1179 if (tablejump_p (insn, NULL, &tmp))
b36d64df 1180 {
1181 rtvec vec;
1182 int j;
a8dd994c 1183 rtx new_label = block_label (new_bb);
b36d64df 1184
34154e27 1185 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
a8dd994c 1186 return false;
b36d64df 1187 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1188 vec = XVEC (PATTERN (tmp), 0);
1189 else
1190 vec = XVEC (PATTERN (tmp), 1);
1191
1192 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1193 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1194 {
1195 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1196 --LABEL_NUSES (old_label);
1197 ++LABEL_NUSES (new_label);
1198 }
1199
2358393e 1200 /* Handle casesi dispatch insns. */
b36d64df 1201 if ((tmp = single_set (insn)) != NULL
1202 && SET_DEST (tmp) == pc_rtx
1203 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1204 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1205 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1206 {
514b43f8 1207 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
b36d64df 1208 new_label);
1209 --LABEL_NUSES (old_label);
1210 ++LABEL_NUSES (new_label);
1211 }
1212 }
78f55ca8 1213 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1214 {
1215 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1216 rtx new_label, note;
1217
34154e27 1218 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
78f55ca8 1219 return false;
1220 new_label = block_label (new_bb);
1221
1222 for (i = 0; i < n; ++i)
1223 {
1224 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1225 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1226 if (XEXP (old_ref, 0) == old_label)
1227 {
1228 ASM_OPERANDS_LABEL (tmp, i)
1229 = gen_rtx_LABEL_REF (Pmode, new_label);
1230 --LABEL_NUSES (old_label);
1231 ++LABEL_NUSES (new_label);
1232 }
1233 }
1234
1235 if (JUMP_LABEL (insn) == old_label)
1236 {
1237 JUMP_LABEL (insn) = new_label;
1238 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1239 if (note)
1240 remove_note (insn, note);
1241 }
1242 else
1243 {
1244 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1245 if (note)
1246 remove_note (insn, note);
1247 if (JUMP_LABEL (insn) != new_label
1248 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1249 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1250 }
dcb172b4 1251 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1252 != NULL_RTX)
1253 XEXP (note, 0) = new_label;
78f55ca8 1254 }
b36d64df 1255 else
1256 {
1257 /* ?? We may play the games with moving the named labels from
1258 one basic block to the other in case only one computed_jump is
1259 available. */
5cc577b6 1260 if (computed_jump_p (insn)
1261 /* A return instruction can't be redirected. */
1262 || returnjump_p (insn))
a8dd994c 1263 return false;
8963581a 1264
a8dd994c 1265 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
8963581a 1266 {
a8dd994c 1267 /* If the insn doesn't go where we think, we're confused. */
1268 gcc_assert (JUMP_LABEL (insn) == old_label);
1269
1270 /* If the substitution doesn't succeed, die. This can happen
1271 if the back end emitted unrecognizable instructions or if
1272 target is exit block on some arches. */
1273 if (!redirect_jump (insn, block_label (new_bb), 0))
1274 {
34154e27 1275 gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun));
a8dd994c 1276 return false;
1277 }
8963581a 1278 }
b36d64df 1279 }
a8dd994c 1280 return true;
1281}
1282
1283
1284/* Redirect edge representing branch of (un)conditional jump or tablejump,
1285 NULL on failure */
1286static edge
1287redirect_branch_edge (edge e, basic_block target)
1288{
1289 rtx old_label = BB_HEAD (e->dest);
1290 basic_block src = e->src;
1291 rtx insn = BB_END (src);
1292
1293 /* We can only redirect non-fallthru edges of jump insn. */
1294 if (e->flags & EDGE_FALLTHRU)
1295 return NULL;
1296 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1297 return NULL;
1298
1299 if (!currently_expanding_to_rtl)
1300 {
1301 if (!patch_jump_insn (insn, old_label, target))
1302 return NULL;
1303 }
1304 else
1305 /* When expanding this BB might actually contain multiple
1306 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1307 Redirect all of those that match our label. */
50b15e04 1308 FOR_BB_INSNS (src, insn)
a8dd994c 1309 if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1310 return NULL;
b36d64df 1311
450d042a 1312 if (dump_file)
1313 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
b3d6de89 1314 e->src->index, e->dest->index, target->index);
5cc577b6 1315
b36d64df 1316 if (e->dest != target)
4ee9c684 1317 e = redirect_edge_succ_nodup (e, target);
3072d30e 1318
4ee9c684 1319 return e;
c60fa3a7 1320}
1321
aa78dca5 1322/* Called when edge E has been redirected to a new destination,
1323 in order to update the region crossing flag on the edge and
1324 jump. */
1325
1326static void
1327fixup_partition_crossing (edge e)
1328{
1329 rtx note;
1330
34154e27 1331 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1332 == EXIT_BLOCK_PTR_FOR_FN (cfun))
aa78dca5 1333 return;
1334 /* If we redirected an existing edge, it may already be marked
1335 crossing, even though the new src is missing a reg crossing note.
1336 But make sure reg crossing note doesn't already exist before
1337 inserting. */
1338 if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1339 {
1340 e->flags |= EDGE_CROSSING;
1341 note = find_reg_note (BB_END (e->src), REG_CROSSING_JUMP, NULL_RTX);
1342 if (JUMP_P (BB_END (e->src))
1343 && !note)
1344 add_reg_note (BB_END (e->src), REG_CROSSING_JUMP, NULL_RTX);
1345 }
1346 else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1347 {
1348 e->flags &= ~EDGE_CROSSING;
1349 /* Remove the section crossing note from jump at end of
1350 src if it exists, and if no other successors are
1351 still crossing. */
1352 note = find_reg_note (BB_END (e->src), REG_CROSSING_JUMP, NULL_RTX);
1353 if (note)
1354 {
1355 bool has_crossing_succ = false;
1356 edge e2;
1357 edge_iterator ei;
1358 FOR_EACH_EDGE (e2, ei, e->src->succs)
1359 {
1360 has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1361 if (has_crossing_succ)
1362 break;
1363 }
1364 if (!has_crossing_succ)
1365 remove_note (BB_END (e->src), note);
1366 }
1367 }
1368}
1369
80adc5a6 1370/* Called when block BB has been reassigned to the cold partition,
1371 because it is now dominated by another cold block,
1372 to ensure that the region crossing attributes are updated. */
1373
1374static void
1375fixup_new_cold_bb (basic_block bb)
1376{
1377 edge e;
1378 edge_iterator ei;
1379
1380 /* This is called when a hot bb is found to now be dominated
1381 by a cold bb and therefore needs to become cold. Therefore,
1382 its preds will no longer be region crossing. Any non-dominating
1383 preds that were previously hot would also have become cold
1384 in the caller for the same region. Any preds that were previously
1385 region-crossing will be adjusted in fixup_partition_crossing. */
1386 FOR_EACH_EDGE (e, ei, bb->preds)
1387 {
1388 fixup_partition_crossing (e);
1389 }
1390
1391 /* Possibly need to make bb's successor edges region crossing,
1392 or remove stale region crossing. */
1393 FOR_EACH_EDGE (e, ei, bb->succs)
1394 {
1395 /* We can't have fall-through edges across partition boundaries.
1396 Note that force_nonfallthru will do any necessary partition
1397 boundary fixup by calling fixup_partition_crossing itself. */
1398 if ((e->flags & EDGE_FALLTHRU)
1399 && BB_PARTITION (bb) != BB_PARTITION (e->dest)
34154e27 1400 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
80adc5a6 1401 force_nonfallthru (e);
1402 else
1403 fixup_partition_crossing (e);
1404 }
1405}
1406
c60fa3a7 1407/* Attempt to change code to redirect edge E to TARGET. Don't do that on
1408 expense of adding new instructions or reordering basic blocks.
1409
1410 Function can be also called with edge destination equivalent to the TARGET.
1411 Then it should try the simplifications and do nothing if none is possible.
1412
4ee9c684 1413 Return edge representing the branch if transformation succeeded. Return NULL
1414 on failure.
1415 We still return NULL in case E already destinated TARGET and we didn't
1416 managed to simplify instruction stream. */
c60fa3a7 1417
4ee9c684 1418static edge
f82b06e0 1419rtl_redirect_edge_and_branch (edge e, basic_block target)
c60fa3a7 1420{
4ee9c684 1421 edge ret;
adee86a0 1422 basic_block src = e->src;
aa78dca5 1423 basic_block dest = e->dest;
adee86a0 1424
c60fa3a7 1425 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4ee9c684 1426 return NULL;
c60fa3a7 1427
aa78dca5 1428 if (dest == target)
4ee9c684 1429 return e;
e5562ab8 1430
4ee9c684 1431 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
adee86a0 1432 {
3072d30e 1433 df_set_bb_dirty (src);
aa78dca5 1434 fixup_partition_crossing (ret);
4ee9c684 1435 return ret;
adee86a0 1436 }
c60fa3a7 1437
4ee9c684 1438 ret = redirect_branch_edge (e, target);
1439 if (!ret)
1440 return NULL;
5cc577b6 1441
3072d30e 1442 df_set_bb_dirty (src);
aa78dca5 1443 fixup_partition_crossing (ret);
4ee9c684 1444 return ret;
b36d64df 1445}
1446
aa78dca5 1447/* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1448
1449void
1450emit_barrier_after_bb (basic_block bb)
1451{
1452 rtx barrier = emit_barrier_after (BB_END (bb));
9af5ce0c 1453 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
aa78dca5 1454 || current_ir_type () == IR_RTL_CFGLAYOUT);
1455 if (current_ir_type () == IR_RTL_CFGLAYOUT)
1456 BB_FOOTER (bb) = unlink_insn_chain (barrier, barrier);
1457}
1458
20dd417a 1459/* Like force_nonfallthru below, but additionally performs redirection
9cb2517e 1460 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1461 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1462 simple_return_rtx, indicating which kind of returnjump to create.
1463 It should be NULL otherwise. */
b36d64df 1464
4115ac36 1465basic_block
9cb2517e 1466force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
b36d64df 1467{
498d173d 1468 basic_block jump_block, new_bb = NULL, src = e->src;
b36d64df 1469 rtx note;
1470 edge new_edge;
498d173d 1471 int abnormal_edge_flags = 0;
d7ff3ab6 1472 bool asm_goto_edge = false;
9c388755 1473 int loc;
b36d64df 1474
accd6c44 1475 /* In the case the last instruction is conditional jump to the next
1476 instruction, first redirect the jump itself and then continue
df07c3ae 1477 by creating a basic block afterwards to redirect fallthru edge. */
34154e27 1478 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1479 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
5496dbfc 1480 && any_condjump_p (BB_END (e->src))
5496dbfc 1481 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
accd6c44 1482 {
1483 rtx note;
d9aa52be 1484 edge b = unchecked_make_edge (e->src, target, 0);
cc636d56 1485 bool redirected;
accd6c44 1486
cc636d56 1487 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1488 gcc_assert (redirected);
a0c938f0 1489
5496dbfc 1490 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
accd6c44 1491 if (note)
1492 {
9eb946de 1493 int prob = XINT (note, 0);
accd6c44 1494
1495 b->probability = prob;
f9d4b7f4 1496 /* Update this to use GCOV_COMPUTE_SCALE. */
accd6c44 1497 b->count = e->count * prob / REG_BR_PROB_BASE;
1498 e->probability -= e->probability;
1499 e->count -= b->count;
1500 if (e->probability < 0)
1501 e->probability = 0;
1502 if (e->count < 0)
1503 e->count = 0;
1504 }
1505 }
1506
b36d64df 1507 if (e->flags & EDGE_ABNORMAL)
498d173d 1508 {
1509 /* Irritating special case - fallthru edge to the same block as abnormal
1510 edge.
1511 We can't redirect abnormal edge, but we still can split the fallthru
4c9e08a4 1512 one and create separate abnormal edge to original destination.
498d173d 1513 This allows bb-reorder to make such edge non-fallthru. */
cc636d56 1514 gcc_assert (e->dest == target);
4ecbba1a 1515 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1516 e->flags &= EDGE_FALLTHRU;
498d173d 1517 }
cc636d56 1518 else
76cc9616 1519 {
cc636d56 1520 gcc_assert (e->flags & EDGE_FALLTHRU);
34154e27 1521 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
cc636d56 1522 {
1523 /* We can't redirect the entry block. Create an empty block
cd665a06 1524 at the start of the function which we use to add the new
1525 jump. */
1526 edge tmp;
1527 edge_iterator ei;
1528 bool found = false;
a0c938f0 1529
34154e27 1530 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1531 ENTRY_BLOCK_PTR_FOR_FN (cfun));
a0c938f0 1532
cc636d56 1533 /* Change the existing edge's source to be the new block, and add
1534 a new edge from the entry block to the new block. */
1535 e->src = bb;
34154e27 1536 for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1537 (tmp = ei_safe_edge (ei)); )
cd665a06 1538 {
1539 if (tmp == e)
1540 {
34154e27 1541 ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
cd665a06 1542 found = true;
1543 break;
1544 }
1545 else
1546 ei_next (&ei);
1547 }
a0c938f0 1548
cd665a06 1549 gcc_assert (found);
a0c938f0 1550
f1f41a6c 1551 vec_safe_push (bb->succs, e);
34154e27 1552 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1553 EDGE_FALLTHRU);
cc636d56 1554 }
76cc9616 1555 }
1556
d7ff3ab6 1557 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
92f67f6e 1558 don't point to the target or fallthru label. */
d7ff3ab6 1559 if (JUMP_P (BB_END (e->src))
34154e27 1560 && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
d7ff3ab6 1561 && (e->flags & EDGE_FALLTHRU)
1562 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
b36d64df 1563 {
d7ff3ab6 1564 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
ca632883 1565 bool adjust_jump_target = false;
d7ff3ab6 1566
1567 for (i = 0; i < n; ++i)
92f67f6e 1568 {
1569 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
ca632883 1570 {
1571 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1572 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1573 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1574 adjust_jump_target = true;
1575 }
92f67f6e 1576 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
d7ff3ab6 1577 asm_goto_edge = true;
92f67f6e 1578 }
ca632883 1579 if (adjust_jump_target)
1580 {
1581 rtx insn = BB_END (e->src), note;
1582 rtx old_label = BB_HEAD (e->dest);
1583 rtx new_label = BB_HEAD (target);
1584
1585 if (JUMP_LABEL (insn) == old_label)
1586 {
1587 JUMP_LABEL (insn) = new_label;
1588 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1589 if (note)
1590 remove_note (insn, note);
1591 }
1592 else
1593 {
1594 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1595 if (note)
1596 remove_note (insn, note);
1597 if (JUMP_LABEL (insn) != new_label
1598 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1599 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1600 }
1601 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1602 != NULL_RTX)
1603 XEXP (note, 0) = new_label;
1604 }
d7ff3ab6 1605 }
1606
1607 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1608 {
1609 gcov_type count = e->count;
1610 int probability = e->probability;
b36d64df 1611 /* Create the new structures. */
3160014e 1612
7a1fee99 1613 /* If the old block ended with a tablejump, skip its table
1614 by searching forward from there. Otherwise start searching
1615 forward from the last instruction of the old block. */
5496dbfc 1616 if (!tablejump_p (BB_END (e->src), NULL, &note))
1617 note = BB_END (e->src);
3160014e 1618 note = NEXT_INSN (note);
1619
3160014e 1620 jump_block = create_basic_block (note, NULL, e->src);
d7ff3ab6 1621 jump_block->count = count;
b36d64df 1622 jump_block->frequency = EDGE_FREQUENCY (e);
b36d64df 1623
4f18499c 1624 /* Make sure new block ends up in correct hot/cold section. */
1625
7562ed74 1626 BB_COPY_PARTITION (jump_block, e->src);
a0c938f0 1627
b36d64df 1628 /* Wire edge in. */
1629 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
d7ff3ab6 1630 new_edge->probability = probability;
1631 new_edge->count = count;
b36d64df 1632
1633 /* Redirect old edge. */
1634 redirect_edge_pred (e, jump_block);
1635 e->probability = REG_BR_PROB_BASE;
1636
aa78dca5 1637 /* If e->src was previously region crossing, it no longer is
1638 and the reg crossing note should be removed. */
1639 fixup_partition_crossing (new_edge);
1640
d7ff3ab6 1641 /* If asm goto has any label refs to target's label,
1642 add also edge from asm goto bb to target. */
1643 if (asm_goto_edge)
1644 {
1645 new_edge->probability /= 2;
1646 new_edge->count /= 2;
1647 jump_block->count /= 2;
1648 jump_block->frequency /= 2;
1649 new_edge = make_edge (new_edge->src, target,
1650 e->flags & ~EDGE_FALLTHRU);
1651 new_edge->probability = probability - probability / 2;
1652 new_edge->count = count - count / 2;
1653 }
1654
b36d64df 1655 new_bb = jump_block;
1656 }
1657 else
1658 jump_block = e->src;
5cc577b6 1659
8e7408e3 1660 loc = e->goto_locus;
b36d64df 1661 e->flags &= ~EDGE_FALLTHRU;
34154e27 1662 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
b36d64df 1663 {
9cb2517e 1664 if (jump_label == ret_rtx)
1665 {
38c95e2f 1666#ifdef HAVE_return
9cb2517e 1667 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
38c95e2f 1668#else
9cb2517e 1669 gcc_unreachable ();
38c95e2f 1670#endif
9cb2517e 1671 }
1672 else
1673 {
1674 gcc_assert (jump_label == simple_return_rtx);
1675#ifdef HAVE_simple_return
1676 emit_jump_insn_after_setloc (gen_simple_return (),
1677 BB_END (jump_block), loc);
1678#else
1679 gcc_unreachable ();
1680#endif
1681 }
31a53363 1682 set_return_jump_label (BB_END (jump_block));
b36d64df 1683 }
1684 else
1685 {
1686 rtx label = block_label (target);
9c388755 1687 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
5496dbfc 1688 JUMP_LABEL (BB_END (jump_block)) = label;
b36d64df 1689 LABEL_NUSES (label)++;
1690 }
5cc577b6 1691
aa78dca5 1692 /* We might be in cfg layout mode, and if so, the following routine will
1693 insert the barrier correctly. */
1694 emit_barrier_after_bb (jump_block);
b36d64df 1695 redirect_edge_succ_nodup (e, target);
1696
498d173d 1697 if (abnormal_edge_flags)
1698 make_edge (src, target, abnormal_edge_flags);
1699
48e1416a 1700 df_mark_solutions_dirty ();
aa78dca5 1701 fixup_partition_crossing (e);
b36d64df 1702 return new_bb;
1703}
1704
1705/* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1706 (and possibly create new basic block) to make edge non-fallthru.
1707 Return newly created BB or NULL if none. */
5cc577b6 1708
202bbc06 1709static basic_block
1710rtl_force_nonfallthru (edge e)
b36d64df 1711{
9cb2517e 1712 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
b36d64df 1713}
1714
1715/* Redirect edge even at the expense of creating new jump insn or
1716 basic block. Return new basic block if created, NULL otherwise.
a53ff4c1 1717 Conversion must be possible. */
b36d64df 1718
1026363d 1719static basic_block
4c9e08a4 1720rtl_redirect_edge_and_branch_force (edge e, basic_block target)
b36d64df 1721{
5cc577b6 1722 if (redirect_edge_and_branch (e, target)
1723 || e->dest == target)
b36d64df 1724 return NULL;
1725
1726 /* In case the edge redirection failed, try to force it to be non-fallthru
1727 and redirect newly created simplejump. */
3072d30e 1728 df_set_bb_dirty (e->src);
9cb2517e 1729 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
b36d64df 1730}
1731
1732/* The given edge should potentially be a fallthru edge. If that is in
1733 fact true, delete the jump and barriers that are in the way. */
1734
5f5d4cd1 1735static void
1736rtl_tidy_fallthru_edge (edge e)
b36d64df 1737{
1738 rtx q;
5f5d4cd1 1739 basic_block b = e->src, c = b->next_bb;
1740
b36d64df 1741 /* ??? In a late-running flow pass, other folks may have deleted basic
1742 blocks by nopping out blocks, leaving multiple BARRIERs between here
442e3cb9 1743 and the target label. They ought to be chastised and fixed.
b36d64df 1744
1745 We can also wind up with a sequence of undeletable labels between
1746 one block and the next.
1747
1748 So search through a sequence of barriers, labels, and notes for
1749 the head of block C and assert that we really do fall through. */
1750
5496dbfc 1751 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
ac25a327 1752 if (INSN_P (q))
1753 return;
b36d64df 1754
1755 /* Remove what will soon cease being the jump insn from the source block.
1756 If block B consisted only of this single jump, turn it into a deleted
1757 note. */
5496dbfc 1758 q = BB_END (b);
6d7dc5b9 1759 if (JUMP_P (q)
b36d64df 1760 && onlyjump_p (q)
1761 && (any_uncondjump_p (q)
ea091dfd 1762 || single_succ_p (b)))
b36d64df 1763 {
1764#ifdef HAVE_cc0
1765 /* If this was a conditional jump, we need to also delete
1766 the insn that set cc0. */
1767 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1768 q = PREV_INSN (q);
1769#endif
1770
1771 q = PREV_INSN (q);
b36d64df 1772 }
1773
1774 /* Selectively unlink the sequence. */
5496dbfc 1775 if (q != PREV_INSN (BB_HEAD (c)))
39257eb5 1776 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
b36d64df 1777
1778 e->flags |= EDGE_FALLTHRU;
1779}
b36d64df 1780\f
5f5d4cd1 1781/* Should move basic block BB after basic block AFTER. NIY. */
1782
1783static bool
1784rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1785 basic_block after ATTRIBUTE_UNUSED)
1786{
1787 return false;
1788}
1789
aa78dca5 1790/* Locate the last bb in the same partition as START_BB. */
1791
1792static basic_block
1793last_bb_in_partition (basic_block start_bb)
1794{
1795 basic_block bb;
34154e27 1796 FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
aa78dca5 1797 {
1798 if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1799 return bb;
1800 }
efee62d1 1801 /* Return bb before the exit block. */
aa78dca5 1802 return bb->prev_bb;
1803}
1804
b36d64df 1805/* Split a (typically critical) edge. Return the new block.
a53ff4c1 1806 The edge must not be abnormal.
b36d64df 1807
1808 ??? The code generally expects to be called on critical edges.
1809 The case of a block ending in an unconditional jump to a
1810 block with multiple predecessors is not handled optimally. */
1811
9140e457 1812static basic_block
4c9e08a4 1813rtl_split_edge (edge edge_in)
b36d64df 1814{
aa78dca5 1815 basic_block bb, new_bb;
b36d64df 1816 rtx before;
1817
1818 /* Abnormal edges cannot be split. */
cc636d56 1819 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
b36d64df 1820
1821 /* We are going to place the new block in front of edge destination.
4a82352a 1822 Avoid existence of fallthru predecessors. */
b36d64df 1823 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1824 {
7f58c05e 1825 edge e = find_fallthru_edge (edge_in->dest->preds);
b36d64df 1826
1827 if (e)
1828 force_nonfallthru (e);
1829 }
1830
588aadb7 1831 /* Create the basic block note. */
34154e27 1832 if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
5496dbfc 1833 before = BB_HEAD (edge_in->dest);
b36d64df 1834 else
1835 before = NULL_RTX;
1836
9bb8a4af 1837 /* If this is a fall through edge to the exit block, the blocks might be
0a55d497 1838 not adjacent, and the right place is after the source. */
34154e27 1839 if ((edge_in->flags & EDGE_FALLTHRU)
1840 && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9bb8a4af 1841 {
1842 before = NEXT_INSN (BB_END (edge_in->src));
9bb8a4af 1843 bb = create_basic_block (before, NULL, edge_in->src);
7562ed74 1844 BB_COPY_PARTITION (bb, edge_in->src);
9bb8a4af 1845 }
1846 else
065efcb1 1847 {
34154e27 1848 if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
aa78dca5 1849 {
1850 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1851 BB_COPY_PARTITION (bb, edge_in->dest);
1852 }
1853 else
1854 {
1855 basic_block after = edge_in->dest->prev_bb;
1856 /* If this is post-bb reordering, and the edge crosses a partition
1857 boundary, the new block needs to be inserted in the bb chain
1858 at the end of the src partition (since we put the new bb into
1859 that partition, see below). Otherwise we may end up creating
1860 an extra partition crossing in the chain, which is illegal.
1861 It can't go after the src, because src may have a fall-through
1862 to a different block. */
1863 if (crtl->bb_reorder_complete
1864 && (edge_in->flags & EDGE_CROSSING))
1865 {
1866 after = last_bb_in_partition (edge_in->src);
1867 before = NEXT_INSN (BB_END (after));
1868 /* The instruction following the last bb in partition should
1869 be a barrier, since it cannot end in a fall-through. */
1870 gcc_checking_assert (BARRIER_P (before));
1871 before = NEXT_INSN (before);
1872 }
1873 bb = create_basic_block (before, NULL, after);
1874 /* Put the split bb into the src partition, to avoid creating
1875 a situation where a cold bb dominates a hot bb, in the case
1876 where src is cold and dest is hot. The src will dominate
1877 the new bb (whereas it might not have dominated dest). */
1878 BB_COPY_PARTITION (bb, edge_in->src);
1879 }
065efcb1 1880 }
b36d64df 1881
805e22b2 1882 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
b36d64df 1883
aa78dca5 1884 /* Can't allow a region crossing edge to be fallthrough. */
1885 if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
34154e27 1886 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
aa78dca5 1887 {
1888 new_bb = force_nonfallthru (single_succ_edge (bb));
1889 gcc_assert (!new_bb);
1890 }
1891
917bbcab 1892 /* For non-fallthru edges, we must adjust the predecessor's
b36d64df 1893 jump instruction to target our new block. */
1894 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1895 {
cc636d56 1896 edge redirected = redirect_edge_and_branch (edge_in, bb);
1897 gcc_assert (redirected);
b36d64df 1898 }
1899 else
dcb172b4 1900 {
34154e27 1901 if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
dcb172b4 1902 {
1903 /* For asm goto even splitting of fallthru edge might
1904 need insn patching, as other labels might point to the
1905 old label. */
1906 rtx last = BB_END (edge_in->src);
1907 if (last
1908 && JUMP_P (last)
34154e27 1909 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
dcb172b4 1910 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1911 && patch_jump_insn (last, before, bb))
1912 df_set_bb_dirty (edge_in->src);
1913 }
1914 redirect_edge_succ (edge_in, bb);
1915 }
b36d64df 1916
1917 return bb;
1918}
1919
1920/* Queue instructions for insertion on an edge between two basic blocks.
1921 The new instructions and basic blocks (if any) will not appear in the
1922 CFG until commit_edge_insertions is called. */
1923
1924void
4c9e08a4 1925insert_insn_on_edge (rtx pattern, edge e)
b36d64df 1926{
1927 /* We cannot insert instructions on an abnormal critical edge.
1928 It will be easier to find the culprit if we die now. */
cc636d56 1929 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
b36d64df 1930
4ee9c684 1931 if (e->insns.r == NULL_RTX)
b36d64df 1932 start_sequence ();
1933 else
4ee9c684 1934 push_to_sequence (e->insns.r);
b36d64df 1935
1936 emit_insn (pattern);
1937
4ee9c684 1938 e->insns.r = get_insns ();
b36d64df 1939 end_sequence ();
1940}
1941
1942/* Update the CFG for the instructions queued on edge E. */
1943
a8dd994c 1944void
ca462ec0 1945commit_one_edge_insertion (edge e)
b36d64df 1946{
1947 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
53105d88 1948 basic_block bb;
b36d64df 1949
1950 /* Pull the insns off the edge now since the edge might go away. */
4ee9c684 1951 insns = e->insns.r;
1952 e->insns.r = NULL_RTX;
b36d64df 1953
53105d88 1954 /* Figure out where to put these insns. If the destination has
1955 one predecessor, insert there. Except for the exit block. */
34154e27 1956 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
b36d64df 1957 {
53105d88 1958 bb = e->dest;
1959
1960 /* Get the location correct wrt a code label, and "nice" wrt
1961 a basic block note, and before everything else. */
1962 tmp = BB_HEAD (bb);
1963 if (LABEL_P (tmp))
1964 tmp = NEXT_INSN (tmp);
1965 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1966 tmp = NEXT_INSN (tmp);
1967 if (tmp == BB_HEAD (bb))
1968 before = tmp;
1969 else if (tmp)
1970 after = PREV_INSN (tmp);
1971 else
1972 after = get_last_insn ();
1973 }
fb20d6fa 1974
53105d88 1975 /* If the source has one successor and the edge is not abnormal,
8315fc61 1976 insert there. Except for the entry block.
1977 Don't do this if the predecessor ends in a jump other than
1978 unconditional simple jump. E.g. for asm goto that points all
1979 its labels at the fallthru basic block, we can't insert instructions
1980 before the asm goto, as the asm goto can have various of side effects,
1981 and can't emit instructions after the asm goto, as it must end
1982 the basic block. */
53105d88 1983 else if ((e->flags & EDGE_ABNORMAL) == 0
1984 && single_succ_p (e->src)
34154e27 1985 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8315fc61 1986 && (!JUMP_P (BB_END (e->src))
1987 || simplejump_p (BB_END (e->src))))
53105d88 1988 {
1989 bb = e->src;
fb20d6fa 1990
53105d88 1991 /* It is possible to have a non-simple jump here. Consider a target
1992 where some forms of unconditional jumps clobber a register. This
1993 happens on the fr30 for example.
b36d64df 1994
53105d88 1995 We know this block has a single successor, so we can just emit
1996 the queued insns before the jump. */
1997 if (JUMP_P (BB_END (bb)))
1998 before = BB_END (bb);
fb20d6fa 1999 else
2000 {
53105d88 2001 /* We'd better be fallthru, or we've lost track of what's what. */
2002 gcc_assert (e->flags & EDGE_FALLTHRU);
4f18499c 2003
53105d88 2004 after = BB_END (bb);
b36d64df 2005 }
2006 }
2007
53105d88 2008 /* Otherwise we must split the edge. */
2009 else
2010 {
2011 bb = split_edge (e);
aa78dca5 2012
2013 /* If E crossed a partition boundary, we needed to make bb end in
2014 a region-crossing jump, even though it was originally fallthru. */
2015 if (JUMP_P (BB_END (bb)))
2016 before = BB_END (bb);
2017 else
2018 after = BB_END (bb);
53105d88 2019 }
2020
2021 /* Now that we've found the spot, do the insertion. */
b36d64df 2022 if (before)
2023 {
3072d30e 2024 emit_insn_before_noloc (insns, before, bb);
b36d64df 2025 last = prev_nonnote_insn (before);
2026 }
2027 else
3072d30e 2028 last = emit_insn_after_noloc (insns, after, bb);
b36d64df 2029
2030 if (returnjump_p (last))
2031 {
2032 /* ??? Remove all outgoing edges from BB and add one for EXIT.
a0c938f0 2033 This is not currently a problem because this only happens
2034 for the (single) epilogue, which already has a fallthru edge
2035 to EXIT. */
b36d64df 2036
ea091dfd 2037 e = single_succ_edge (bb);
34154e27 2038 gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
ea091dfd 2039 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
b36d64df 2040
5cc577b6 2041 e->flags &= ~EDGE_FALLTHRU;
b36d64df 2042 emit_barrier_after (last);
b3d6de89 2043
b36d64df 2044 if (before)
2045 delete_insn (before);
2046 }
cc636d56 2047 else
2048 gcc_assert (!JUMP_P (last));
b36d64df 2049}
2050
2051/* Update the CFG for all queued instructions. */
2052
2053void
4c9e08a4 2054commit_edge_insertions (void)
b36d64df 2055{
b36d64df 2056 basic_block bb;
2057
80adc5a6 2058 /* Optimization passes that invoke this routine can cause hot blocks
2059 previously reached by both hot and cold blocks to become dominated only
2060 by cold blocks. This will cause the verification below to fail,
2061 and lead to now cold code in the hot section. In some cases this
2062 may only be visible after newly unreachable blocks are deleted,
2063 which will be done by fixup_partitions. */
2064 fixup_partitions ();
2065
b36d64df 2066#ifdef ENABLE_CHECKING
2067 verify_flow_info ();
2068#endif
2069
34154e27 2070 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2071 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
b36d64df 2072 {
cd665a06 2073 edge e;
2074 edge_iterator ei;
b36d64df 2075
cd665a06 2076 FOR_EACH_EDGE (e, ei, bb->succs)
2077 if (e->insns.r)
202bbc06 2078 commit_one_edge_insertion (e);
fb20d6fa 2079 }
b36d64df 2080}
2081\f
3072d30e 2082
5f5d4cd1 2083/* Print out RTL-specific basic block information (live information
5147ec07 2084 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2085 documented in dumpfile.h. */
b36d64df 2086
028f8cc7 2087static void
5147ec07 2088rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
b36d64df 2089{
2090 rtx insn;
2091 rtx last;
5f5d4cd1 2092 char *s_indent;
b36d64df 2093
f780cc25 2094 s_indent = (char *) alloca ((size_t) indent + 1);
4acc30e5 2095 memset (s_indent, ' ', (size_t) indent);
5f5d4cd1 2096 s_indent[indent] = '\0';
48e1416a 2097
5147ec07 2098 if (df && (flags & TDF_DETAILS))
3072d30e 2099 {
2100 df_dump_top (bb, outf);
2101 putc ('\n', outf);
2102 }
b36d64df 2103
4c2112a3 2104 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2105 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
2106 insn = NEXT_INSN (insn))
5147ec07 2107 {
ea9538fb 2108 if (flags & TDF_DETAILS)
2109 df_dump_insn_top (insn, outf);
5147ec07 2110 if (! (flags & TDF_SLIM))
2111 print_rtl_single (outf, insn);
2112 else
2113 dump_insn_slim (outf, insn);
ea9538fb 2114 if (flags & TDF_DETAILS)
2115 df_dump_insn_bottom (insn, outf);
5147ec07 2116 }
2117
2118 if (df && (flags & TDF_DETAILS))
3072d30e 2119 {
2120 df_dump_bottom (bb, outf);
2121 putc ('\n', outf);
2122 }
2123
b36d64df 2124}
2125\f
bec2cf98 2126/* Like dump_function_to_file, but for RTL. Print out dataflow information
2127 for the start of each basic block. FLAGS are the TDF_* masks documented
2128 in dumpfile.h. */
b36d64df 2129
2130void
5147ec07 2131print_rtl_with_bb (FILE *outf, const_rtx rtx_first, int flags)
b36d64df 2132{
1f1872fd 2133 const_rtx tmp_rtx;
b36d64df 2134 if (rtx_first == 0)
2135 fprintf (outf, "(nil)\n");
2136 else
2137 {
b36d64df 2138 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2139 int max_uid = get_max_uid ();
4c36ffe6 2140 basic_block *start = XCNEWVEC (basic_block, max_uid);
2141 basic_block *end = XCNEWVEC (basic_block, max_uid);
2142 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
4c26117a 2143 basic_block bb;
2144
bec2cf98 2145 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2146 insns, but the CFG is not maintained so the basic block info
2147 is not reliable. Therefore it's omitted from the dumps. */
2148 if (! (cfun->curr_properties & PROP_cfg))
2149 flags &= ~TDF_BLOCKS;
2150
3072d30e 2151 if (df)
2152 df_dump_start (outf);
2153
bec2cf98 2154 if (flags & TDF_BLOCKS)
b36d64df 2155 {
7a46197b 2156 FOR_EACH_BB_REVERSE_FN (bb, cfun)
b36d64df 2157 {
bec2cf98 2158 rtx x;
2159
2160 start[INSN_UID (BB_HEAD (bb))] = bb;
2161 end[INSN_UID (BB_END (bb))] = bb;
2162 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2163 {
2164 enum bb_state state = IN_MULTIPLE_BB;
5cc577b6 2165
bec2cf98 2166 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2167 state = IN_ONE_BB;
2168 in_bb_p[INSN_UID (x)] = state;
b36d64df 2169
bec2cf98 2170 if (x == BB_END (bb))
2171 break;
2172 }
b36d64df 2173 }
2174 }
2175
2176 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
2177 {
bec2cf98 2178 if (flags & TDF_BLOCKS)
2179 {
2180 bb = start[INSN_UID (tmp_rtx)];
2181 if (bb != NULL)
2182 {
2183 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
2184 if (df && (flags & TDF_DETAILS))
2185 df_dump_top (bb, outf);
2186 }
b36d64df 2187
bec2cf98 2188 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2189 && !NOTE_P (tmp_rtx)
2190 && !BARRIER_P (tmp_rtx))
2191 fprintf (outf, ";; Insn is not within a basic block\n");
2192 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2193 fprintf (outf, ";; Insn is in multiple basic blocks\n");
2194 }
b36d64df 2195
ea9538fb 2196 if (flags & TDF_DETAILS)
2197 df_dump_insn_top (tmp_rtx, outf);
5147ec07 2198 if (! (flags & TDF_SLIM))
2199 print_rtl_single (outf, tmp_rtx);
2200 else
2201 dump_insn_slim (outf, tmp_rtx);
ea9538fb 2202 if (flags & TDF_DETAILS)
2203 df_dump_insn_bottom (tmp_rtx, outf);
b36d64df 2204
bec2cf98 2205 if (flags & TDF_BLOCKS)
2206 {
2207 bb = end[INSN_UID (tmp_rtx)];
2208 if (bb != NULL)
2209 {
2210 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
2211 if (df && (flags & TDF_DETAILS))
2212 df_dump_bottom (bb, outf);
a30975d4 2213 putc ('\n', outf);
bec2cf98 2214 }
2215 }
b36d64df 2216 }
2217
2218 free (start);
2219 free (end);
2220 free (in_bb_p);
2221 }
b36d64df 2222}
2223\f
4a020a8c 2224/* Update the branch probability of BB if a REG_BR_PROB is present. */
2225
f884e43f 2226void
4c9e08a4 2227update_br_prob_note (basic_block bb)
f884e43f 2228{
2229 rtx note;
6d7dc5b9 2230 if (!JUMP_P (BB_END (bb)))
f884e43f 2231 return;
5496dbfc 2232 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
9eb946de 2233 if (!note || XINT (note, 0) == BRANCH_EDGE (bb)->probability)
f884e43f 2234 return;
9eb946de 2235 XINT (note, 0) = BRANCH_EDGE (bb)->probability;
f884e43f 2236}
5d65a39c 2237
2238/* Get the last insn associated with block BB (that includes barriers and
2239 tablejumps after BB). */
2240rtx
2241get_last_bb_insn (basic_block bb)
2242{
2243 rtx tmp;
2244 rtx end = BB_END (bb);
2245
2246 /* Include any jump table following the basic block. */
2247 if (tablejump_p (end, NULL, &tmp))
2248 end = tmp;
2249
2250 /* Include any barriers that may follow the basic block. */
c4d13c5c 2251 tmp = next_nonnote_insn_bb (end);
5d65a39c 2252 while (tmp && BARRIER_P (tmp))
2253 {
2254 end = tmp;
c4d13c5c 2255 tmp = next_nonnote_insn_bb (end);
5d65a39c 2256 }
2257
2258 return end;
2259}
812ca88e 2260
80adc5a6 2261/* Sanity check partition hotness to ensure that basic blocks in
2262   the cold partition don't dominate basic blocks in the hot partition.
2263 If FLAG_ONLY is true, report violations as errors. Otherwise
2264 re-mark the dominated blocks as cold, since this is run after
2265 cfg optimizations that may make hot blocks previously reached
2266 by both hot and cold blocks now only reachable along cold paths. */
2267
2268static vec<basic_block>
2269find_partition_fixes (bool flag_only)
2270{
2271 basic_block bb;
2272 vec<basic_block> bbs_in_cold_partition = vNULL;
2273 vec<basic_block> bbs_to_fix = vNULL;
2274
2275 /* Callers check this. */
2276 gcc_checking_assert (crtl->has_bb_partition);
2277
fc00614f 2278 FOR_EACH_BB_FN (bb, cfun)
80adc5a6 2279 if ((BB_PARTITION (bb) == BB_COLD_PARTITION))
2280 bbs_in_cold_partition.safe_push (bb);
2281
2282 if (bbs_in_cold_partition.is_empty ())
2283 return vNULL;
2284
2285 bool dom_calculated_here = !dom_info_available_p (CDI_DOMINATORS);
2286
2287 if (dom_calculated_here)
2288 calculate_dominance_info (CDI_DOMINATORS);
2289
2290 while (! bbs_in_cold_partition.is_empty ())
2291 {
2292 bb = bbs_in_cold_partition.pop ();
2293 /* Any blocks dominated by a block in the cold section
2294 must also be cold. */
2295 basic_block son;
2296 for (son = first_dom_son (CDI_DOMINATORS, bb);
2297 son;
2298 son = next_dom_son (CDI_DOMINATORS, son))
2299 {
2300 /* If son is not yet cold, then mark it cold here and
2301 enqueue it for further processing. */
2302 if ((BB_PARTITION (son) != BB_COLD_PARTITION))
2303 {
2304 if (flag_only)
2305 error ("non-cold basic block %d dominated "
2306 "by a block in the cold partition (%d)", son->index, bb->index);
2307 else
2308 BB_SET_PARTITION (son, BB_COLD_PARTITION);
2309 bbs_to_fix.safe_push (son);
2310 bbs_in_cold_partition.safe_push (son);
2311 }
2312 }
2313 }
2314
2315 if (dom_calculated_here)
2316 free_dominance_info (CDI_DOMINATORS);
2317
2318 return bbs_to_fix;
2319}
2320
2321/* Perform cleanup on the hot/cold bb partitioning after optimization
2322 passes that modify the cfg. */
2323
2324void
2325fixup_partitions (void)
2326{
2327 basic_block bb;
2328
2329 if (!crtl->has_bb_partition)
2330 return;
2331
2332 /* Delete any blocks that became unreachable and weren't
2333 already cleaned up, for example during edge forwarding
2334 and convert_jumps_to_returns. This will expose more
2335 opportunities for fixing the partition boundaries here.
2336 Also, the calculation of the dominance graph during verification
2337 will assert if there are unreachable nodes. */
2338 delete_unreachable_blocks ();
2339
2340 /* If there are partitions, do a sanity check on them: A basic block in
2341   a cold partition cannot dominate a basic block in a hot partition.
2342 Fixup any that now violate this requirement, as a result of edge
2343 forwarding and unreachable block deletion.  */
2344 vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2345
2346 /* Do the partition fixup after all necessary blocks have been converted to
2347 cold, so that we only update the region crossings the minimum number of
2348 places, which can require forcing edges to be non fallthru. */
2349 while (! bbs_to_fix.is_empty ())
2350 {
2351 bb = bbs_to_fix.pop ();
2352 fixup_new_cold_bb (bb);
2353 }
2354}
2355
812ca88e 2356/* Verify, in the basic block chain, that there is at most one switch
2357 between hot/cold partitions. This condition will not be true until
2358 after reorder_basic_blocks is called. */
2359
4bf32a77 2360static int
812ca88e 2361verify_hot_cold_block_grouping (void)
2362{
2363 basic_block bb;
2364 int err = 0;
2365 bool switched_sections = false;
2366 int current_partition = BB_UNPARTITIONED;
2367
aa78dca5 2368 /* Even after bb reordering is complete, we go into cfglayout mode
2369 again (in compgoto). Ensure we don't call this before going back
2370 into linearized RTL when any layout fixes would have been committed. */
2371 if (!crtl->bb_reorder_complete
9af5ce0c 2372 || current_ir_type () != IR_RTL_CFGRTL)
4bf32a77 2373 return err;
812ca88e 2374
fc00614f 2375 FOR_EACH_BB_FN (bb, cfun)
812ca88e 2376 {
2377 if (current_partition != BB_UNPARTITIONED
2378 && BB_PARTITION (bb) != current_partition)
2379 {
2380 if (switched_sections)
2381 {
2382 error ("multiple hot/cold transitions found (bb %i)",
2383 bb->index);
2384 err = 1;
2385 }
2386 else
2387 switched_sections = true;
2388
2389 if (!crtl->has_bb_partition)
2390 error ("partition found but function partition flag not set");
2391 }
2392 current_partition = BB_PARTITION (bb);
2393 }
2394
4bf32a77 2395 return err;
812ca88e 2396}
f884e43f 2397\f
b36d64df 2398
4bf32a77 2399/* Perform several checks on the edges out of each block, such as
2400 the consistency of the branch probabilities, the correctness
2401 of hot/cold partition crossing edges, and the number of expected
80adc5a6 2402 successor edges. Also verify that the dominance relationship
2403 between hot/cold blocks is sane. */
5f5d4cd1 2404
028f8cc7 2405static int
4bf32a77 2406rtl_verify_edges (void)
b36d64df 2407{
028f8cc7 2408 int err = 0;
d207f783 2409 basic_block bb;
b36d64df 2410
7a46197b 2411 FOR_EACH_BB_REVERSE_FN (bb, cfun)
b36d64df 2412 {
103dfb10 2413 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2414 int n_eh = 0, n_abnormal = 0;
e6261ab0 2415 edge e, fallthru = NULL;
cd665a06 2416 edge_iterator ei;
4bf32a77 2417 rtx note;
aa78dca5 2418 bool has_crossing_edge = false;
b36d64df 2419
e6d1500f 2420 if (JUMP_P (BB_END (bb))
5496dbfc 2421 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
cd665a06 2422 && EDGE_COUNT (bb->succs) >= 2
5496dbfc 2423 && any_condjump_p (BB_END (bb)))
a2e42321 2424 {
9eb946de 2425 if (XINT (note, 0) != BRANCH_EDGE (bb)->probability
f26d8580 2426 && profile_status_for_fn (cfun) != PROFILE_ABSENT)
a2e42321 2427 {
9eb946de 2428 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2429 XINT (note, 0), BRANCH_EDGE (bb)->probability);
a2e42321 2430 err = 1;
2431 }
2432 }
4bf32a77 2433
cd665a06 2434 FOR_EACH_EDGE (e, ei, bb->succs)
b36d64df 2435 {
f59cbcbf 2436 bool is_crossing;
2437
b36d64df 2438 if (e->flags & EDGE_FALLTHRU)
f59cbcbf 2439 n_fallthru++, fallthru = e;
2440
2441 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
34154e27 2442 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2443 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
aa78dca5 2444 has_crossing_edge |= is_crossing;
f59cbcbf 2445 if (e->flags & EDGE_CROSSING)
4f18499c 2446 {
f59cbcbf 2447 if (!is_crossing)
2448 {
2449 error ("EDGE_CROSSING incorrectly set across same section");
2450 err = 1;
2451 }
2452 if (e->flags & EDGE_FALLTHRU)
2453 {
103dfb10 2454 error ("fallthru edge crosses section boundary in bb %i",
4f18499c 2455 e->src->index);
2456 err = 1;
2457 }
f59cbcbf 2458 if (e->flags & EDGE_EH)
2459 {
103dfb10 2460 error ("EH edge crosses section boundary in bb %i",
f59cbcbf 2461 e->src->index);
2462 err = 1;
2463 }
aa78dca5 2464 if (JUMP_P (BB_END (bb))
2465 && !find_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX))
2466 {
2467 error ("No region crossing jump at section boundary in bb %i",
2468 bb->index);
2469 err = 1;
2470 }
f59cbcbf 2471 }
2472 else if (is_crossing)
2473 {
2474 error ("EDGE_CROSSING missing across section boundary");
2475 err = 1;
4f18499c 2476 }
fb20d6fa 2477
7f42fe24 2478 if ((e->flags & ~(EDGE_DFS_BACK
2479 | EDGE_CAN_FALLTHRU
2480 | EDGE_IRREDUCIBLE_LOOP
f50b5bd0 2481 | EDGE_LOOP_EXIT
acc48fa6 2482 | EDGE_CROSSING
2483 | EDGE_PRESERVE)) == 0)
fb20d6fa 2484 n_branch++;
2485
2486 if (e->flags & EDGE_ABNORMAL_CALL)
103dfb10 2487 n_abnormal_call++;
2488
2489 if (e->flags & EDGE_SIBCALL)
2490 n_sibcall++;
fb20d6fa 2491
2492 if (e->flags & EDGE_EH)
2493 n_eh++;
103dfb10 2494
2495 if (e->flags & EDGE_ABNORMAL)
fb20d6fa 2496 n_abnormal++;
b36d64df 2497 }
5cc577b6 2498
aa78dca5 2499 if (!has_crossing_edge
2500 && find_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX))
2501 {
2502 print_rtl_with_bb (stderr, get_insns (), TDF_RTL | TDF_BLOCKS | TDF_DETAILS);
2503 error ("Region crossing jump across same section in bb %i",
2504 bb->index);
2505 err = 1;
2506 }
2507
e38def9c 2508 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
fb20d6fa 2509 {
103dfb10 2510 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
fb20d6fa 2511 err = 1;
2512 }
e38def9c 2513 if (n_eh > 1)
2514 {
103dfb10 2515 error ("too many exception handling edges in bb %i", bb->index);
e38def9c 2516 err = 1;
2517 }
fb20d6fa 2518 if (n_branch
6d7dc5b9 2519 && (!JUMP_P (BB_END (bb))
5496dbfc 2520 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2521 || any_condjump_p (BB_END (bb))))))
fb20d6fa 2522 {
0a81f5a0 2523 error ("too many outgoing branch edges from bb %i", bb->index);
fb20d6fa 2524 err = 1;
2525 }
5496dbfc 2526 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
fb20d6fa 2527 {
103dfb10 2528 error ("fallthru edge after unconditional jump in bb %i", bb->index);
fb20d6fa 2529 err = 1;
2530 }
5496dbfc 2531 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
fb20d6fa 2532 {
103dfb10 2533 error ("wrong number of branch edges after unconditional jump"
2534 " in bb %i", bb->index);
fb20d6fa 2535 err = 1;
2536 }
5496dbfc 2537 if (n_branch != 1 && any_condjump_p (BB_END (bb))
8345ac1a 2538 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
fb20d6fa 2539 {
103dfb10 2540 error ("wrong amount of branch edges after conditional jump"
2541 " in bb %i", bb->index);
2542 err = 1;
2543 }
2544 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2545 {
2546 error ("abnormal call edges for non-call insn in bb %i", bb->index);
fb20d6fa 2547 err = 1;
2548 }
103dfb10 2549 if (n_sibcall && !CALL_P (BB_END (bb)))
fb20d6fa 2550 {
103dfb10 2551 error ("sibcall edges for non-call insn in bb %i", bb->index);
fb20d6fa 2552 err = 1;
2553 }
103dfb10 2554 if (n_abnormal > n_eh
2555 && !(CALL_P (BB_END (bb))
2556 && n_abnormal == n_abnormal_call + n_sibcall)
6d7dc5b9 2557 && (!JUMP_P (BB_END (bb))
5496dbfc 2558 || any_condjump_p (BB_END (bb))
2559 || any_uncondjump_p (BB_END (bb))))
fb20d6fa 2560 {
0a81f5a0 2561 error ("abnormal edges for no purpose in bb %i", bb->index);
fb20d6fa 2562 err = 1;
2563 }
4bf32a77 2564 }
db34a109 2565
80adc5a6 2566 /* If there are partitions, do a sanity check on them: A basic block in
2567   a cold partition cannot dominate a basic block in a hot partition.  */
2568 if (crtl->has_bb_partition && !err)
2569 {
2570 vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2571 err = !bbs_to_fix.is_empty ();
2572 }
2573
4bf32a77 2574 /* Clean up. */
2575 return err;
2576}
5cc577b6 2577
4bf32a77 2578/* Checks on the instructions within blocks. Currently checks that each
2579 block starts with a basic block note, and that basic block notes and
2580 control flow jumps are not found in the middle of the block. */
b36d64df 2581
4bf32a77 2582static int
2583rtl_verify_bb_insns (void)
2584{
2585 rtx x;
2586 int err = 0;
2587 basic_block bb;
2588
7a46197b 2589 FOR_EACH_BB_REVERSE_FN (bb, cfun)
4bf32a77 2590 {
2591 /* Now check the header of basic
a0c938f0 2592 block. It ought to contain optional CODE_LABEL followed
b36d64df 2593 by NOTE_BASIC_BLOCK. */
5496dbfc 2594 x = BB_HEAD (bb);
6d7dc5b9 2595 if (LABEL_P (x))
b36d64df 2596 {
5496dbfc 2597 if (BB_END (bb) == x)
b36d64df 2598 {
2599 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
b3d6de89 2600 bb->index);
b36d64df 2601 err = 1;
2602 }
5cc577b6 2603
b36d64df 2604 x = NEXT_INSN (x);
2605 }
5cc577b6 2606
b36d64df 2607 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2608 {
2609 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
b3d6de89 2610 bb->index);
b36d64df 2611 err = 1;
2612 }
2613
5496dbfc 2614 if (BB_END (bb) == x)
f3e2a363 2615 /* Do checks for empty blocks here. */
5cc577b6 2616 ;
b36d64df 2617 else
5cc577b6 2618 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2619 {
2620 if (NOTE_INSN_BASIC_BLOCK_P (x))
2621 {
2622 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
b3d6de89 2623 INSN_UID (x), bb->index);
5cc577b6 2624 err = 1;
2625 }
2626
5496dbfc 2627 if (x == BB_END (bb))
5cc577b6 2628 break;
b36d64df 2629
aaf52ffb 2630 if (control_flow_insn_p (x))
5cc577b6 2631 {
b3d6de89 2632 error ("in basic block %d:", bb->index);
5cc577b6 2633 fatal_insn ("flow control insn inside a basic block", x);
2634 }
2635 }
b36d64df 2636 }
2637
4bf32a77 2638 /* Clean up. */
2639 return err;
2640}
2641
2642/* Verify that block pointers for instructions in basic blocks, headers and
2643 footers are set appropriately. */
2644
2645static int
2646rtl_verify_bb_pointers (void)
2647{
2648 int err = 0;
2649 basic_block bb;
2650
2651 /* Check the general integrity of the basic blocks. */
7a46197b 2652 FOR_EACH_BB_REVERSE_FN (bb, cfun)
4bf32a77 2653 {
2654 rtx insn;
2655
2656 if (!(bb->flags & BB_RTL))
2657 {
2658 error ("BB_RTL flag not set for block %d", bb->index);
2659 err = 1;
2660 }
2661
2662 FOR_BB_INSNS (bb, insn)
2663 if (BLOCK_FOR_INSN (insn) != bb)
2664 {
2665 error ("insn %d basic block pointer is %d, should be %d",
2666 INSN_UID (insn),
2667 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2668 bb->index);
2669 err = 1;
2670 }
2671
2672 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2673 if (!BARRIER_P (insn)
2674 && BLOCK_FOR_INSN (insn) != NULL)
2675 {
2676 error ("insn %d in header of bb %d has non-NULL basic block",
2677 INSN_UID (insn), bb->index);
2678 err = 1;
2679 }
2680 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2681 if (!BARRIER_P (insn)
2682 && BLOCK_FOR_INSN (insn) != NULL)
2683 {
2684 error ("insn %d in footer of bb %d has non-NULL basic block",
2685 INSN_UID (insn), bb->index);
2686 err = 1;
2687 }
2688 }
812ca88e 2689
028f8cc7 2690 /* Clean up. */
028f8cc7 2691 return err;
2692}
5cc577b6 2693
028f8cc7 2694/* Verify the CFG and RTL consistency common for both underlying RTL and
2695 cfglayout RTL.
5cc577b6 2696
028f8cc7 2697 Currently it does following checks:
4bf32a77 2698
2699 - overlapping of basic blocks
2700 - insns with wrong BLOCK_FOR_INSN pointers
2701 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2702 - tails of basic blocks (ensure that boundary is necessary)
2703 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2704 and NOTE_INSN_BASIC_BLOCK
2705 - verify that no fall_thru edge crosses hot/cold partition boundaries
2706 - verify that there are no pending RTL branch predictions
80adc5a6 2707 - verify that hot blocks are not dominated by cold blocks
4bf32a77 2708
2709 In future it can be extended check a lot of other stuff as well
2710 (reachability of basic blocks, life information, etc. etc.). */
fcde5c9e 2711
028f8cc7 2712static int
4bf32a77 2713rtl_verify_flow_info_1 (void)
2714{
2715 int err = 0;
2716
2717 err |= rtl_verify_bb_pointers ();
2718
2719 err |= rtl_verify_bb_insns ();
2720
2721 err |= rtl_verify_edges ();
2722
4bf32a77 2723 return err;
2724}
2725
2726/* Walk the instruction chain and verify that bb head/end pointers
2727 are correct, and that instructions are in exactly one bb and have
2728 correct block pointers. */
2729
2730static int
2731rtl_verify_bb_insn_chain (void)
028f8cc7 2732{
2733 basic_block bb;
4bf32a77 2734 int err = 0;
028f8cc7 2735 rtx x;
fcde5c9e 2736 rtx last_head = get_last_insn ();
2737 basic_block *bb_info;
fcde5c9e 2738 const int max_uid = get_max_uid ();
2739
2740 bb_info = XCNEWVEC (basic_block, max_uid);
b36d64df 2741
7a46197b 2742 FOR_EACH_BB_REVERSE_FN (bb, cfun)
028f8cc7 2743 {
fcde5c9e 2744 rtx head = BB_HEAD (bb);
2745 rtx end = BB_END (bb);
cd665a06 2746
fcde5c9e 2747 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
39257eb5 2748 {
2749 /* Verify the end of the basic block is in the INSN chain. */
2750 if (x == end)
2751 break;
2752
4bf32a77 2753 /* And that the code outside of basic blocks has NULL bb field. */
2754 if (!BARRIER_P (x)
2755 && BLOCK_FOR_INSN (x) != NULL)
2756 {
2757 error ("insn %d outside of basic blocks has non-NULL bb field",
2758 INSN_UID (x));
2759 err = 1;
2760 }
39257eb5 2761 }
fcde5c9e 2762
2763 if (!x)
2764 {
2765 error ("end insn %d for block %d not found in the insn stream",
2766 INSN_UID (end), bb->index);
2767 err = 1;
2768 }
2769
2770 /* Work backwards from the end to the head of the basic block
2771 to verify the head is in the RTL chain. */
2772 for (; x != NULL_RTX; x = PREV_INSN (x))
d5043f32 2773 {
fcde5c9e 2774 /* While walking over the insn chain, verify insns appear
2775 in only one basic block. */
2776 if (bb_info[INSN_UID (x)] != NULL)
2777 {
2778 error ("insn %d is in multiple basic blocks (%d and %d)",
2779 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2780 err = 1;
2781 }
2782
2783 bb_info[INSN_UID (x)] = bb;
2784
2785 if (x == head)
2786 break;
2787 }
2788 if (!x)
2789 {
2790 error ("head insn %d for block %d not found in the insn stream",
2791 INSN_UID (head), bb->index);
d5043f32 2792 err = 1;
2793 }
2794
39257eb5 2795 last_head = PREV_INSN (x);
4bf32a77 2796 }
2797
2798 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2799 {
2800 /* Check that the code before the first basic block has NULL
2801 bb field. */
2802 if (!BARRIER_P (x)
2803 && BLOCK_FOR_INSN (x) != NULL)
2804 {
2805 error ("insn %d outside of basic blocks has non-NULL bb field",
2806 INSN_UID (x));
2807 err = 1;
2808 }
2809 }
2810 free (bb_info);
2811
2812 return err;
2813}
2814
2815/* Verify that fallthru edges point to adjacent blocks in layout order and
2816 that barriers exist after non-fallthru blocks. */
2817
2818static int
2819rtl_verify_fallthru (void)
2820{
2821 basic_block bb;
2822 int err = 0;
2823
7a46197b 2824 FOR_EACH_BB_REVERSE_FN (bb, cfun)
4bf32a77 2825 {
2826 edge e;
fcde5c9e 2827
7f58c05e 2828 e = find_fallthru_edge (bb->succs);
028f8cc7 2829 if (!e)
2830 {
2831 rtx insn;
2832
2833 /* Ensure existence of barrier in BB with no fallthru edges. */
d2b48f0c 2834 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2835 {
2836 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
028f8cc7 2837 {
2838 error ("missing barrier after block %i", bb->index);
2839 err = 1;
2840 break;
2841 }
d2b48f0c 2842 if (BARRIER_P (insn))
2843 break;
2844 }
028f8cc7 2845 }
34154e27 2846 else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2847 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
a0c938f0 2848 {
028f8cc7 2849 rtx insn;
2850
2851 if (e->src->next_bb != e->dest)
2852 {
2853 error
2854 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2855 e->src->index, e->dest->index);
2856 err = 1;
2857 }
2858 else
5496dbfc 2859 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
028f8cc7 2860 insn = NEXT_INSN (insn))
ac7d3688 2861 if (BARRIER_P (insn) || INSN_P (insn))
028f8cc7 2862 {
2863 error ("verify_flow_info: Incorrect fallthru %i->%i",
2864 e->src->index, e->dest->index);
2865 fatal_insn ("wrong insn in the fallthru edge", insn);
2866 err = 1;
2867 }
a0c938f0 2868 }
028f8cc7 2869 }
b36d64df 2870
4bf32a77 2871 return err;
2872}
2873
2874/* Verify that blocks are laid out in consecutive order. While walking the
2875 instructions, verify that all expected instructions are inside the basic
2876 blocks, and that all returns are followed by barriers. */
2877
2878static int
2879rtl_verify_bb_layout (void)
2880{
2881 basic_block bb;
2882 int err = 0;
2883 rtx x;
2884 int num_bb_notes;
2885 const rtx rtx_first = get_insns ();
34154e27 2886 basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
fcde5c9e 2887
b36d64df 2888 num_bb_notes = 0;
34154e27 2889 last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
4c26117a 2890
5cc577b6 2891 for (x = rtx_first; x; x = NEXT_INSN (x))
b36d64df 2892 {
2893 if (NOTE_INSN_BASIC_BLOCK_P (x))
2894 {
3c0a32c9 2895 bb = NOTE_BASIC_BLOCK (x);
5cc577b6 2896
b36d64df 2897 num_bb_notes++;
4c26117a 2898 if (bb != last_bb_seen->next_bb)
028f8cc7 2899 internal_error ("basic blocks not laid down consecutively");
b36d64df 2900
028f8cc7 2901 curr_bb = last_bb_seen = bb;
b36d64df 2902 }
2903
028f8cc7 2904 if (!curr_bb)
b36d64df 2905 {
2906 switch (GET_CODE (x))
2907 {
2908 case BARRIER:
2909 case NOTE:
2910 break;
2911
2912 case CODE_LABEL:
91f71fa3 2913 /* An ADDR_VEC is placed outside any basic block. */
b36d64df 2914 if (NEXT_INSN (x)
971ba038 2915 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
5cc577b6 2916 x = NEXT_INSN (x);
b36d64df 2917
2918 /* But in any case, non-deletable labels can appear anywhere. */
2919 break;
2920
2921 default:
cb8bacb6 2922 fatal_insn ("insn outside basic block", x);
b36d64df 2923 }
2924 }
2925
345f9e8c 2926 if (JUMP_P (x)
b36d64df 2927 && returnjump_p (x) && ! condjump_p (x)
8fcc0f14 2928 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
cb8bacb6 2929 fatal_insn ("return not followed by barrier", x);
4bf32a77 2930
5496dbfc 2931 if (curr_bb && x == BB_END (curr_bb))
028f8cc7 2932 curr_bb = NULL;
b36d64df 2933 }
2934
a28770e1 2935 if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
b36d64df 2936 internal_error
b3d6de89 2937 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
a28770e1 2938 num_bb_notes, n_basic_blocks_for_fn (cfun));
b36d64df 2939
028f8cc7 2940 return err;
b36d64df 2941}
4bf32a77 2942
2943/* Verify the CFG and RTL consistency common for both underlying RTL and
2944 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
2945
2946 Currently it does following checks:
2947 - all checks of rtl_verify_flow_info_1
2948 - test head/end pointers
2949 - check that blocks are laid out in consecutive order
2950 - check that all insns are in the basic blocks
2951 (except the switch handling code, barriers and notes)
2952 - check that all returns are followed by barriers
80adc5a6 2953 - check that all fallthru edge points to the adjacent blocks
2954 - verify that there is a single hot/cold partition boundary after bbro */
4bf32a77 2955
2956static int
2957rtl_verify_flow_info (void)
2958{
2959 int err = 0;
2960
2961 err |= rtl_verify_flow_info_1 ();
2962
2963 err |= rtl_verify_bb_insn_chain ();
2964
2965 err |= rtl_verify_fallthru ();
2966
2967 err |= rtl_verify_bb_layout ();
2968
aa78dca5 2969 err |= verify_hot_cold_block_grouping ();
2970
4bf32a77 2971 return err;
2972}
b36d64df 2973\f
4a82352a 2974/* Assume that the preceding pass has possibly eliminated jump instructions
b36d64df 2975 or converted the unconditional jumps. Eliminate the edges from CFG.
2976 Return true if any edges are eliminated. */
2977
2978bool
4c9e08a4 2979purge_dead_edges (basic_block bb)
b36d64df 2980{
cd665a06 2981 edge e;
5496dbfc 2982 rtx insn = BB_END (bb), note;
b36d64df 2983 bool purged = false;
cd665a06 2984 bool found;
2985 edge_iterator ei;
b36d64df 2986
9845d120 2987 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2988 do
2989 insn = PREV_INSN (insn);
2990 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2991
11808932 2992 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
6d7dc5b9 2993 if (NONJUMP_INSN_P (insn)
11808932 2994 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2995 {
2996 rtx eqnote;
2997
2998 if (! may_trap_p (PATTERN (insn))
2999 || ((eqnote = find_reg_equal_equiv_note (insn))
3000 && ! may_trap_p (XEXP (eqnote, 0))))
3001 remove_note (insn, note);
3002 }
3003
f178f44b 3004 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
cd665a06 3005 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
f178f44b 3006 {
e38def9c 3007 bool remove = false;
3008
04166d08 3009 /* There are three types of edges we need to handle correctly here: EH
3010 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3011 latter can appear when nonlocal gotos are used. */
e38def9c 3012 if (e->flags & EDGE_ABNORMAL_CALL)
f178f44b 3013 {
e38def9c 3014 if (!CALL_P (insn))
3015 remove = true;
3016 else if (can_nonlocal_goto (insn))
3017 ;
3018 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3019 ;
4c0315d0 3020 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3021 ;
e38def9c 3022 else
3023 remove = true;
f178f44b 3024 }
e38def9c 3025 else if (e->flags & EDGE_EH)
3026 remove = !can_throw_internal (insn);
3027
3028 if (remove)
f178f44b 3029 {
e38def9c 3030 remove_edge (e);
3031 df_set_bb_dirty (bb);
3032 purged = true;
f178f44b 3033 }
3034 else
e38def9c 3035 ei_next (&ei);
f178f44b 3036 }
5cc577b6 3037
6d7dc5b9 3038 if (JUMP_P (insn))
b36d64df 3039 {
3040 rtx note;
3041 edge b,f;
cd665a06 3042 edge_iterator ei;
5cc577b6 3043
b36d64df 3044 /* We do care only about conditional jumps and simplejumps. */
3045 if (!any_condjump_p (insn)
3046 && !returnjump_p (insn)
3047 && !simplejump_p (insn))
461eeb14 3048 return purged;
5cc577b6 3049
a2e42321 3050 /* Branch probability/prediction notes are defined only for
3051 condjumps. We've possibly turned condjump into simplejump. */
3052 if (simplejump_p (insn))
3053 {
3054 note = find_reg_note (insn, REG_BR_PROB, NULL);
3055 if (note)
3056 remove_note (insn, note);
3057 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3058 remove_note (insn, note);
3059 }
3060
cd665a06 3061 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
b36d64df 3062 {
4270b4b9 3063 /* Avoid abnormal flags to leak from computed jumps turned
3064 into simplejumps. */
db34a109 3065
a0fb7382 3066 e->flags &= ~EDGE_ABNORMAL;
4270b4b9 3067
14abf923 3068 /* See if this edge is one we should keep. */
3069 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3070 /* A conditional jump can fall through into the next
3071 block, so we should keep the edge. */
cd665a06 3072 {
3073 ei_next (&ei);
3074 continue;
3075 }
34154e27 3076 else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
5496dbfc 3077 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
14abf923 3078 /* If the destination block is the target of the jump,
3079 keep the edge. */
cd665a06 3080 {
3081 ei_next (&ei);
3082 continue;
3083 }
34154e27 3084 else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3085 && returnjump_p (insn))
14abf923 3086 /* If the destination block is the exit block, and this
3087 instruction is a return, then keep the edge. */
cd665a06 3088 {
3089 ei_next (&ei);
3090 continue;
3091 }
14abf923 3092 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3093 /* Keep the edges that correspond to exceptions thrown by
1b64239d 3094 this instruction and rematerialize the EDGE_ABNORMAL
3095 flag we just cleared above. */
3096 {
3097 e->flags |= EDGE_ABNORMAL;
cd665a06 3098 ei_next (&ei);
1b64239d 3099 continue;
3100 }
5cc577b6 3101
14abf923 3102 /* We do not need this edge. */
3072d30e 3103 df_set_bb_dirty (bb);
b36d64df 3104 purged = true;
3105 remove_edge (e);
3106 }
5cc577b6 3107
cd665a06 3108 if (EDGE_COUNT (bb->succs) == 0 || !purged)
461eeb14 3109 return purged;
5cc577b6 3110
450d042a 3111 if (dump_file)
3112 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
5cc577b6 3113
b36d64df 3114 if (!optimize)
3115 return purged;
3116
3117 /* Redistribute probabilities. */
ea091dfd 3118 if (single_succ_p (bb))
b36d64df 3119 {
ea091dfd 3120 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3121 single_succ_edge (bb)->count = bb->count;
db34a109 3122 }
b36d64df 3123 else
3124 {
3125 note = find_reg_note (insn, REG_BR_PROB, NULL);
3126 if (!note)
3127 return purged;
5cc577b6 3128
b36d64df 3129 b = BRANCH_EDGE (bb);
3130 f = FALLTHRU_EDGE (bb);
9eb946de 3131 b->probability = XINT (note, 0);
b36d64df 3132 f->probability = REG_BR_PROB_BASE - b->probability;
f9d4b7f4 3133 /* Update these to use GCOV_COMPUTE_SCALE. */
b36d64df 3134 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
3135 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
3136 }
5cc577b6 3137
b36d64df 3138 return purged;
3139 }
6d7dc5b9 3140 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
bf4311e9 3141 {
3142 /* First, there should not be any EH or ABCALL edges resulting
3143 from non-local gotos and the like. If there were, we shouldn't
3144 have created the sibcall in the first place. Second, there
3145 should of course never have been a fallthru edge. */
ea091dfd 3146 gcc_assert (single_succ_p (bb));
3147 gcc_assert (single_succ_edge (bb)->flags
3148 == (EDGE_SIBCALL | EDGE_ABNORMAL));
bf4311e9 3149
3150 return 0;
3151 }
b36d64df 3152
b36d64df 3153 /* If we don't see a jump insn, we don't know exactly why the block would
3154 have been broken at this point. Look for a simple, non-fallthru edge,
3155 as these are only created by conditional branches. If we find such an
3156 edge we know that there used to be a jump here and can then safely
3157 remove all non-fallthru edges. */
cd665a06 3158 found = false;
3159 FOR_EACH_EDGE (e, ei, bb->succs)
3160 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3161 {
3162 found = true;
3163 break;
3164 }
5cc577b6 3165
cd665a06 3166 if (!found)
b36d64df 3167 return purged;
5cc577b6 3168
6e81a369 3169 /* Remove all but the fake and fallthru edges. The fake edge may be
3170 the only successor for this block in the case of noreturn
3171 calls. */
cd665a06 3172 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
b36d64df 3173 {
6e81a369 3174 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
461eeb14 3175 {
3072d30e 3176 df_set_bb_dirty (bb);
461eeb14 3177 remove_edge (e);
3178 purged = true;
3179 }
cd665a06 3180 else
3181 ei_next (&ei);
b36d64df 3182 }
5cc577b6 3183
ea091dfd 3184 gcc_assert (single_succ_p (bb));
5cc577b6 3185
ea091dfd 3186 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3187 single_succ_edge (bb)->count = bb->count;
b36d64df 3188
450d042a 3189 if (dump_file)
3190 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
b3d6de89 3191 bb->index);
b36d64df 3192 return purged;
3193}
3194
5cc577b6 3195/* Search all basic blocks for potentially dead edges and purge them. Return
3196 true if some edge has been eliminated. */
b36d64df 3197
3198bool
94ee50e8 3199purge_all_dead_edges (void)
b36d64df 3200{
4c26117a 3201 int purged = false;
4c26117a 3202 basic_block bb;
0922c912 3203
fc00614f 3204 FOR_EACH_BB_FN (bb, cfun)
0922c912 3205 {
4c26117a 3206 bool purged_here = purge_dead_edges (bb);
5cc577b6 3207
0922c912 3208 purged |= purged_here;
0922c912 3209 }
5cc577b6 3210
b36d64df 3211 return purged;
3212}
1026363d 3213
54f21e20 3214/* This is used by a few passes that emit some instructions after abnormal
3215 calls, moving the basic block's end, while they in fact do want to emit
3216 them on the fallthru edge. Look for abnormal call edges, find backward
3217 the call in the block and insert the instructions on the edge instead.
3218
3219 Similarly, handle instructions throwing exceptions internally.
3220
3221 Return true when instructions have been found and inserted on edges. */
3222
3223bool
3224fixup_abnormal_edges (void)
3225{
3226 bool inserted = false;
3227 basic_block bb;
3228
fc00614f 3229 FOR_EACH_BB_FN (bb, cfun)
54f21e20 3230 {
3231 edge e;
3232 edge_iterator ei;
3233
3234 /* Look for cases we are interested in - calls or instructions causing
3235 exceptions. */
3236 FOR_EACH_EDGE (e, ei, bb->succs)
3237 if ((e->flags & EDGE_ABNORMAL_CALL)
3238 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3239 == (EDGE_ABNORMAL | EDGE_EH)))
3240 break;
3241
3242 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3243 {
3244 rtx insn;
3245
3246 /* Get past the new insns generated. Allow notes, as the insns
3247 may be already deleted. */
3248 insn = BB_END (bb);
3249 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3250 && !can_throw_internal (insn)
3251 && insn != BB_HEAD (bb))
3252 insn = PREV_INSN (insn);
3253
3254 if (CALL_P (insn) || can_throw_internal (insn))
3255 {
3256 rtx stop, next;
3257
3258 e = find_fallthru_edge (bb->succs);
3259
3260 stop = NEXT_INSN (BB_END (bb));
3261 BB_END (bb) = insn;
3262
3263 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3264 {
3265 next = NEXT_INSN (insn);
3266 if (INSN_P (insn))
3267 {
3268 delete_insn (insn);
3269
3270 /* Sometimes there's still the return value USE.
3271 If it's placed after a trapping call (i.e. that
3272 call is the last insn anyway), we have no fallthru
3273 edge. Simply delete this use and don't try to insert
3274 on the non-existent edge. */
3275 if (GET_CODE (PATTERN (insn)) != USE)
3276 {
3277 /* We're not deleting it, we're moving it. */
3278 INSN_DELETED_P (insn) = 0;
3279 PREV_INSN (insn) = NULL_RTX;
3280 NEXT_INSN (insn) = NULL_RTX;
3281
3282 insert_insn_on_edge (insn, e);
3283 inserted = true;
3284 }
3285 }
3286 else if (!BARRIER_P (insn))
3287 set_block_for_insn (insn, NULL);
3288 }
3289 }
3290
3291 /* It may be that we don't find any trapping insn. In this
3292 case we discovered quite late that the insn that had been
3293 marked as can_throw_internal in fact couldn't trap at all.
3294 So we should in fact delete the EH edges out of the block. */
3295 else
3296 purge_dead_edges (bb);
3297 }
3298 }
3299
3300 return inserted;
3301}
23a070f3 3302\f
3303/* Cut the insns from FIRST to LAST out of the insns stream. */
3304
3305rtx
3306unlink_insn_chain (rtx first, rtx last)
3307{
3308 rtx prevfirst = PREV_INSN (first);
3309 rtx nextlast = NEXT_INSN (last);
3310
3311 PREV_INSN (first) = NULL;
3312 NEXT_INSN (last) = NULL;
3313 if (prevfirst)
3314 NEXT_INSN (prevfirst) = nextlast;
3315 if (nextlast)
3316 PREV_INSN (nextlast) = prevfirst;
3317 else
3318 set_last_insn (prevfirst);
3319 if (!prevfirst)
3320 set_first_insn (nextlast);
3321 return first;
3322}
3323\f
3324/* Skip over inter-block insns occurring after BB which are typically
3325 associated with BB (e.g., barriers). If there are any such insns,
3326 we return the last one. Otherwise, we return the end of BB. */
3327
3328static rtx
3329skip_insns_after_block (basic_block bb)
3330{
3331 rtx insn, last_insn, next_head, prev;
3332
3333 next_head = NULL_RTX;
34154e27 3334 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
23a070f3 3335 next_head = BB_HEAD (bb->next_bb);
3336
3337 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3338 {
3339 if (insn == next_head)
3340 break;
3341
3342 switch (GET_CODE (insn))
3343 {
3344 case BARRIER:
3345 last_insn = insn;
3346 continue;
3347
3348 case NOTE:
3349 switch (NOTE_KIND (insn))
3350 {
3351 case NOTE_INSN_BLOCK_END:
3352 gcc_unreachable ();
3353 continue;
3354 default:
3355 continue;
3356 break;
3357 }
3358 break;
3359
3360 case CODE_LABEL:
3361 if (NEXT_INSN (insn)
3362 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3363 {
3364 insn = NEXT_INSN (insn);
3365 last_insn = insn;
3366 continue;
3367 }
3368 break;
3369
3370 default:
3371 break;
3372 }
3373
3374 break;
3375 }
3376
3377 /* It is possible to hit contradictory sequence. For instance:
3378
3379 jump_insn
3380 NOTE_INSN_BLOCK_BEG
3381 barrier
3382
3383 Where barrier belongs to jump_insn, but the note does not. This can be
3384 created by removing the basic block originally following
3385 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3386
3387 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3388 {
3389 prev = PREV_INSN (insn);
3390 if (NOTE_P (insn))
3391 switch (NOTE_KIND (insn))
3392 {
3393 case NOTE_INSN_BLOCK_END:
3394 gcc_unreachable ();
3395 break;
3396 case NOTE_INSN_DELETED:
3397 case NOTE_INSN_DELETED_LABEL:
3398 case NOTE_INSN_DELETED_DEBUG_LABEL:
3399 continue;
3400 default:
3401 reorder_insns (insn, insn, last_insn);
3402 }
3403 }
3404
3405 return last_insn;
3406}
3407
3408/* Locate or create a label for a given basic block. */
3409
3410static rtx
3411label_for_bb (basic_block bb)
3412{
3413 rtx label = BB_HEAD (bb);
3414
3415 if (!LABEL_P (label))
3416 {
3417 if (dump_file)
3418 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3419
3420 label = block_label (bb);
3421 }
3422
3423 return label;
3424}
3425
3426/* Locate the effective beginning and end of the insn chain for each
3427 block, as defined by skip_insns_after_block above. */
3428
3429static void
3430record_effective_endpoints (void)
3431{
3432 rtx next_insn;
3433 basic_block bb;
3434 rtx insn;
3435
3436 for (insn = get_insns ();
3437 insn
3438 && NOTE_P (insn)
3439 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3440 insn = NEXT_INSN (insn))
3441 continue;
3442 /* No basic blocks at all? */
3443 gcc_assert (insn);
3444
3445 if (PREV_INSN (insn))
3446 cfg_layout_function_header =
3447 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3448 else
3449 cfg_layout_function_header = NULL_RTX;
3450
3451 next_insn = get_insns ();
fc00614f 3452 FOR_EACH_BB_FN (bb, cfun)
23a070f3 3453 {
3454 rtx end;
3455
3456 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3457 BB_HEADER (bb) = unlink_insn_chain (next_insn,
3458 PREV_INSN (BB_HEAD (bb)));
3459 end = skip_insns_after_block (bb);
3460 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3461 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3462 next_insn = NEXT_INSN (BB_END (bb));
3463 }
3464
3465 cfg_layout_function_footer = next_insn;
3466 if (cfg_layout_function_footer)
3467 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3468}
3469\f
3470static unsigned int
3471into_cfg_layout_mode (void)
3472{
3473 cfg_layout_initialize (0);
3474 return 0;
3475}
3476
3477static unsigned int
3478outof_cfg_layout_mode (void)
3479{
3480 basic_block bb;
3481
fc00614f 3482 FOR_EACH_BB_FN (bb, cfun)
34154e27 3483 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
23a070f3 3484 bb->aux = bb->next_bb;
3485
3486 cfg_layout_finalize ();
3487
3488 return 0;
3489}
3490
cbe8bda8 3491namespace {
3492
3493const pass_data pass_data_into_cfg_layout_mode =
3494{
3495 RTL_PASS, /* type */
3496 "into_cfglayout", /* name */
3497 OPTGROUP_NONE, /* optinfo_flags */
3498 false, /* has_gate */
3499 true, /* has_execute */
3500 TV_CFG, /* tv_id */
3501 0, /* properties_required */
3502 PROP_cfglayout, /* properties_provided */
3503 0, /* properties_destroyed */
3504 0, /* todo_flags_start */
3505 0, /* todo_flags_finish */
23a070f3 3506};
3507
cbe8bda8 3508class pass_into_cfg_layout_mode : public rtl_opt_pass
3509{
3510public:
9af5ce0c 3511 pass_into_cfg_layout_mode (gcc::context *ctxt)
3512 : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
cbe8bda8 3513 {}
3514
3515 /* opt_pass methods: */
3516 unsigned int execute () { return into_cfg_layout_mode (); }
3517
3518}; // class pass_into_cfg_layout_mode
3519
3520} // anon namespace
3521
3522rtl_opt_pass *
3523make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3524{
3525 return new pass_into_cfg_layout_mode (ctxt);
3526}
3527
3528namespace {
3529
3530const pass_data pass_data_outof_cfg_layout_mode =
3531{
3532 RTL_PASS, /* type */
3533 "outof_cfglayout", /* name */
3534 OPTGROUP_NONE, /* optinfo_flags */
3535 false, /* has_gate */
3536 true, /* has_execute */
3537 TV_CFG, /* tv_id */
3538 0, /* properties_required */
3539 0, /* properties_provided */
3540 PROP_cfglayout, /* properties_destroyed */
3541 0, /* todo_flags_start */
3542 0, /* todo_flags_finish */
23a070f3 3543};
cbe8bda8 3544
3545class pass_outof_cfg_layout_mode : public rtl_opt_pass
3546{
3547public:
9af5ce0c 3548 pass_outof_cfg_layout_mode (gcc::context *ctxt)
3549 : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
cbe8bda8 3550 {}
3551
3552 /* opt_pass methods: */
3553 unsigned int execute () { return outof_cfg_layout_mode (); }
3554
3555}; // class pass_outof_cfg_layout_mode
3556
3557} // anon namespace
3558
3559rtl_opt_pass *
3560make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3561{
3562 return new pass_outof_cfg_layout_mode (ctxt);
3563}
23a070f3 3564\f
3565
3566/* Link the basic blocks in the correct order, compacting the basic
3567 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3568 function also clears the basic block header and footer fields.
3569
3570 This function is usually called after a pass (e.g. tracer) finishes
3571 some transformations while in cfglayout mode. The required sequence
3572 of the basic blocks is in a linked list along the bb->aux field.
3573 This functions re-links the basic block prev_bb and next_bb pointers
4a020a8c 3574 accordingly, and it compacts and renumbers the blocks.
3575
3576 FIXME: This currently works only for RTL, but the only RTL-specific
3577 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3578 to GIMPLE a long time ago, but it doesn't relink the basic block
3579 chain. It could do that (to give better initial RTL) if this function
3580 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
23a070f3 3581
3582void
3583relink_block_chain (bool stay_in_cfglayout_mode)
3584{
3585 basic_block bb, prev_bb;
3586 int index;
3587
3588 /* Maybe dump the re-ordered sequence. */
3589 if (dump_file)
3590 {
3591 fprintf (dump_file, "Reordered sequence:\n");
34154e27 3592 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3593 NUM_FIXED_BLOCKS;
23a070f3 3594 bb;
3595 bb = (basic_block) bb->aux, index++)
3596 {
3597 fprintf (dump_file, " %i ", index);
3598 if (get_bb_original (bb))
3599 fprintf (dump_file, "duplicate of %i ",
3600 get_bb_original (bb)->index);
3601 else if (forwarder_block_p (bb)
3602 && !LABEL_P (BB_HEAD (bb)))
3603 fprintf (dump_file, "compensation ");
3604 else
3605 fprintf (dump_file, "bb %i ", bb->index);
3606 fprintf (dump_file, " [%i]\n", bb->frequency);
3607 }
3608 }
3609
3610 /* Now reorder the blocks. */
34154e27 3611 prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3612 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
23a070f3 3613 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3614 {
3615 bb->prev_bb = prev_bb;
3616 prev_bb->next_bb = bb;
3617 }
34154e27 3618 prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3619 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
23a070f3 3620
3621 /* Then, clean up the aux fields. */
ed7d889a 3622 FOR_ALL_BB_FN (bb, cfun)
23a070f3 3623 {
3624 bb->aux = NULL;
3625 if (!stay_in_cfglayout_mode)
3626 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3627 }
3628
3629 /* Maybe reset the original copy tables, they are not valid anymore
3630 when we renumber the basic blocks in compact_blocks. If we are
3631 are going out of cfglayout mode, don't re-allocate the tables. */
3632 free_original_copy_tables ();
3633 if (stay_in_cfglayout_mode)
3634 initialize_original_copy_tables ();
3635
3636 /* Finally, put basic_block_info in the new order. */
3637 compact_blocks ();
3638}
3639\f
3640
3641/* Given a reorder chain, rearrange the code to match. */
3642
3643static void
3644fixup_reorder_chain (void)
3645{
3646 basic_block bb;
3647 rtx insn = NULL;
3648
3649 if (cfg_layout_function_header)
3650 {
3651 set_first_insn (cfg_layout_function_header);
3652 insn = cfg_layout_function_header;
3653 while (NEXT_INSN (insn))
3654 insn = NEXT_INSN (insn);
3655 }
3656
3657 /* First do the bulk reordering -- rechain the blocks without regard to
3658 the needed changes to jumps and labels. */
3659
34154e27 3660 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3661 bb->aux)
23a070f3 3662 {
3663 if (BB_HEADER (bb))
3664 {
3665 if (insn)
3666 NEXT_INSN (insn) = BB_HEADER (bb);
3667 else
3668 set_first_insn (BB_HEADER (bb));
3669 PREV_INSN (BB_HEADER (bb)) = insn;
3670 insn = BB_HEADER (bb);
3671 while (NEXT_INSN (insn))
3672 insn = NEXT_INSN (insn);
3673 }
3674 if (insn)
3675 NEXT_INSN (insn) = BB_HEAD (bb);
3676 else
3677 set_first_insn (BB_HEAD (bb));
3678 PREV_INSN (BB_HEAD (bb)) = insn;
3679 insn = BB_END (bb);
3680 if (BB_FOOTER (bb))
3681 {
3682 NEXT_INSN (insn) = BB_FOOTER (bb);
3683 PREV_INSN (BB_FOOTER (bb)) = insn;
3684 while (NEXT_INSN (insn))
3685 insn = NEXT_INSN (insn);
3686 }
3687 }
3688
3689 NEXT_INSN (insn) = cfg_layout_function_footer;
3690 if (cfg_layout_function_footer)
3691 PREV_INSN (cfg_layout_function_footer) = insn;
3692
3693 while (NEXT_INSN (insn))
3694 insn = NEXT_INSN (insn);
3695
3696 set_last_insn (insn);
3697#ifdef ENABLE_CHECKING
3698 verify_insn_chain ();
3699#endif
3700
3701 /* Now add jumps and labels as needed to match the blocks new
c38b28e7 3702 outgoing edges. */
23a070f3 3703
c38b28e7 3704 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3705 bb->aux)
23a070f3 3706 {
3707 edge e_fall, e_taken, e;
3708 rtx bb_end_insn;
3709 rtx ret_label = NULL_RTX;
aa78dca5 3710 basic_block nb;
23a070f3 3711 edge_iterator ei;
3712
3713 if (EDGE_COUNT (bb->succs) == 0)
3714 continue;
3715
3716 /* Find the old fallthru edge, and another non-EH edge for
3717 a taken jump. */
3718 e_taken = e_fall = NULL;
3719
3720 FOR_EACH_EDGE (e, ei, bb->succs)
3721 if (e->flags & EDGE_FALLTHRU)
3722 e_fall = e;
3723 else if (! (e->flags & EDGE_EH))
3724 e_taken = e;
3725
3726 bb_end_insn = BB_END (bb);
3727 if (JUMP_P (bb_end_insn))
3728 {
3729 ret_label = JUMP_LABEL (bb_end_insn);
3730 if (any_condjump_p (bb_end_insn))
3731 {
3732 /* This might happen if the conditional jump has side
3733 effects and could therefore not be optimized away.
3734 Make the basic block to end with a barrier in order
3735 to prevent rtl_verify_flow_info from complaining. */
3736 if (!e_fall)
3737 {
3738 gcc_assert (!onlyjump_p (bb_end_insn)
3739 || returnjump_p (bb_end_insn));
91f71fa3 3740 emit_barrier_after (bb_end_insn);
23a070f3 3741 continue;
3742 }
3743
3744 /* If the old fallthru is still next, nothing to do. */
3745 if (bb->aux == e_fall->dest
34154e27 3746 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
23a070f3 3747 continue;
3748
3749 /* The degenerated case of conditional jump jumping to the next
3750 instruction can happen for jumps with side effects. We need
3751 to construct a forwarder block and this will be done just
3752 fine by force_nonfallthru below. */
3753 if (!e_taken)
3754 ;
3755
3756 /* There is another special case: if *neither* block is next,
3757 such as happens at the very end of a function, then we'll
3758 need to add a new unconditional jump. Choose the taken
3759 edge based on known or assumed probability. */
3760 else if (bb->aux != e_taken->dest)
3761 {
3762 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
3763
3764 if (note
9eb946de 3765 && XINT (note, 0) < REG_BR_PROB_BASE / 2
23a070f3 3766 && invert_jump (bb_end_insn,
34154e27 3767 (e_fall->dest
3768 == EXIT_BLOCK_PTR_FOR_FN (cfun)
23a070f3 3769 ? NULL_RTX
3770 : label_for_bb (e_fall->dest)), 0))
3771 {
3772 e_fall->flags &= ~EDGE_FALLTHRU;
3773 gcc_checking_assert (could_fall_through
3774 (e_taken->src, e_taken->dest));
3775 e_taken->flags |= EDGE_FALLTHRU;
3776 update_br_prob_note (bb);
3777 e = e_fall, e_fall = e_taken, e_taken = e;
3778 }
3779 }
3780
3781 /* If the "jumping" edge is a crossing edge, and the fall
3782 through edge is non-crossing, leave things as they are. */
3783 else if ((e_taken->flags & EDGE_CROSSING)
3784 && !(e_fall->flags & EDGE_CROSSING))
3785 continue;
3786
3787 /* Otherwise we can try to invert the jump. This will
3788 basically never fail, however, keep up the pretense. */
3789 else if (invert_jump (bb_end_insn,
34154e27 3790 (e_fall->dest
3791 == EXIT_BLOCK_PTR_FOR_FN (cfun)
23a070f3 3792 ? NULL_RTX
3793 : label_for_bb (e_fall->dest)), 0))
3794 {
3795 e_fall->flags &= ~EDGE_FALLTHRU;
3796 gcc_checking_assert (could_fall_through
3797 (e_taken->src, e_taken->dest));
3798 e_taken->flags |= EDGE_FALLTHRU;
3799 update_br_prob_note (bb);
3800 if (LABEL_NUSES (ret_label) == 0
3801 && single_pred_p (e_taken->dest))
3802 delete_insn (ret_label);
3803 continue;
3804 }
3805 }
3806 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3807 {
3808 /* If the old fallthru is still next or if
3809 asm goto doesn't have a fallthru (e.g. when followed by
3810 __builtin_unreachable ()), nothing to do. */
3811 if (! e_fall
3812 || bb->aux == e_fall->dest
34154e27 3813 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
23a070f3 3814 continue;
3815
3816 /* Otherwise we'll have to use the fallthru fixup below. */
3817 }
3818 else
3819 {
3820 /* Otherwise we have some return, switch or computed
3821 jump. In the 99% case, there should not have been a
3822 fallthru edge. */
3823 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3824 continue;
3825 }
3826 }
3827 else
3828 {
3829 /* No fallthru implies a noreturn function with EH edges, or
3830 something similarly bizarre. In any case, we don't need to
3831 do anything. */
3832 if (! e_fall)
3833 continue;
3834
3835 /* If the fallthru block is still next, nothing to do. */
3836 if (bb->aux == e_fall->dest)
3837 continue;
3838
3839 /* A fallthru to exit block. */
34154e27 3840 if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
23a070f3 3841 continue;
3842 }
3843
3844 /* We got here if we need to add a new jump insn.
3845 Note force_nonfallthru can delete E_FALL and thus we have to
3846 save E_FALL->src prior to the call to force_nonfallthru. */
23a070f3 3847 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3848 if (nb)
3849 {
3850 nb->aux = bb->aux;
3851 bb->aux = nb;
3852 /* Don't process this new block. */
3853 bb = nb;
23a070f3 3854 }
3855 }
3856
3857 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3858
c38b28e7 3859 /* Annoying special case - jump around dead jumptables left in the code. */
fc00614f 3860 FOR_EACH_BB_FN (bb, cfun)
23a070f3 3861 {
3862 edge e = find_fallthru_edge (bb->succs);
3863
c38b28e7 3864 if (e && !can_fallthru (e->src, e->dest))
3865 force_nonfallthru (e);
23a070f3 3866 }
3867
3868 /* Ensure goto_locus from edges has some instructions with that locus
3869 in RTL. */
3870 if (!optimize)
fc00614f 3871 FOR_EACH_BB_FN (bb, cfun)
23a070f3 3872 {
3873 edge e;
3874 edge_iterator ei;
3875
3876 FOR_EACH_EDGE (e, ei, bb->succs)
8e7408e3 3877 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
5169661d 3878 && !(e->flags & EDGE_ABNORMAL))
23a070f3 3879 {
3880 edge e2;
3881 edge_iterator ei2;
3882 basic_block dest, nb;
3883 rtx end;
3884
3885 insn = BB_END (e->src);
3886 end = PREV_INSN (BB_HEAD (e->src));
3887 while (insn != end
5169661d 3888 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
23a070f3 3889 insn = PREV_INSN (insn);
3890 if (insn != end
5169661d 3891 && INSN_LOCATION (insn) == e->goto_locus)
23a070f3 3892 continue;
3893 if (simplejump_p (BB_END (e->src))
5169661d 3894 && !INSN_HAS_LOCATION (BB_END (e->src)))
23a070f3 3895 {
5169661d 3896 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
23a070f3 3897 continue;
3898 }
3899 dest = e->dest;
34154e27 3900 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
23a070f3 3901 {
3902 /* Non-fallthru edges to the exit block cannot be split. */
3903 if (!(e->flags & EDGE_FALLTHRU))
3904 continue;
3905 }
3906 else
3907 {
3908 insn = BB_HEAD (dest);
3909 end = NEXT_INSN (BB_END (dest));
3910 while (insn != end && !NONDEBUG_INSN_P (insn))
3911 insn = NEXT_INSN (insn);
5169661d 3912 if (insn != end && INSN_HAS_LOCATION (insn)
3913 && INSN_LOCATION (insn) == e->goto_locus)
23a070f3 3914 continue;
3915 }
3916 nb = split_edge (e);
3917 if (!INSN_P (BB_END (nb)))
3918 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3919 nb);
5169661d 3920 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
23a070f3 3921
3922 /* If there are other incoming edges to the destination block
3923 with the same goto locus, redirect them to the new block as
3924 well, this can prevent other such blocks from being created
3925 in subsequent iterations of the loop. */
3926 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
8e7408e3 3927 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
23a070f3 3928 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
5169661d 3929 && e->goto_locus == e2->goto_locus)
23a070f3 3930 redirect_edge_and_branch (e2, nb);
3931 else
3932 ei_next (&ei2);
3933 }
3934 }
3935}
3936\f
3937/* Perform sanity checks on the insn chain.
3938 1. Check that next/prev pointers are consistent in both the forward and
3939 reverse direction.
3940 2. Count insns in chain, going both directions, and check if equal.
3941 3. Check that get_last_insn () returns the actual end of chain. */
3942
3943DEBUG_FUNCTION void
3944verify_insn_chain (void)
3945{
3946 rtx x, prevx, nextx;
3947 int insn_cnt1, insn_cnt2;
3948
3949 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3950 x != 0;
3951 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
3952 gcc_assert (PREV_INSN (x) == prevx);
3953
3954 gcc_assert (prevx == get_last_insn ());
3955
3956 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
3957 x != 0;
3958 nextx = x, insn_cnt2++, x = PREV_INSN (x))
3959 gcc_assert (NEXT_INSN (x) == nextx);
3960
3961 gcc_assert (insn_cnt1 == insn_cnt2);
3962}
3963\f
3964/* If we have assembler epilogues, the block falling through to exit must
3965 be the last one in the reordered chain when we reach final. Ensure
3966 that this condition is met. */
3967static void
3968fixup_fallthru_exit_predecessor (void)
3969{
3970 edge e;
3971 basic_block bb = NULL;
3972
3973 /* This transformation is not valid before reload, because we might
3974 separate a call from the instruction that copies the return
3975 value. */
3976 gcc_assert (reload_completed);
3977
34154e27 3978 e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
23a070f3 3979 if (e)
3980 bb = e->src;
3981
3982 if (bb && bb->aux)
3983 {
34154e27 3984 basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
23a070f3 3985
3986 /* If the very first block is the one with the fall-through exit
3987 edge, we have to split that block. */
3988 if (c == bb)
3989 {
3990 bb = split_block (bb, NULL)->dest;
3991 bb->aux = c->aux;
3992 c->aux = bb;
3993 BB_FOOTER (bb) = BB_FOOTER (c);
3994 BB_FOOTER (c) = NULL;
3995 }
3996
3997 while (c->aux != bb)
3998 c = (basic_block) c->aux;
3999
4000 c->aux = bb->aux;
4001 while (c->aux)
4002 c = (basic_block) c->aux;
4003
4004 c->aux = bb;
4005 bb->aux = NULL;
4006 }
4007}
4008
4009/* In case there are more than one fallthru predecessors of exit, force that
4010 there is only one. */
4011
4012static void
4013force_one_exit_fallthru (void)
4014{
4015 edge e, predecessor = NULL;
4016 bool more = false;
4017 edge_iterator ei;
4018 basic_block forwarder, bb;
4019
34154e27 4020 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
23a070f3 4021 if (e->flags & EDGE_FALLTHRU)
4022 {
4023 if (predecessor == NULL)
4024 predecessor = e;
4025 else
4026 {
4027 more = true;
4028 break;
4029 }
4030 }
4031
4032 if (!more)
4033 return;
4034
4035 /* Exit has several fallthru predecessors. Create a forwarder block for
4036 them. */
4037 forwarder = split_edge (predecessor);
34154e27 4038 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4039 (e = ei_safe_edge (ei)); )
23a070f3 4040 {
4041 if (e->src == forwarder
4042 || !(e->flags & EDGE_FALLTHRU))
4043 ei_next (&ei);
4044 else
4045 redirect_edge_and_branch_force (e, forwarder);
4046 }
4047
4048 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4049 exit block. */
fc00614f 4050 FOR_EACH_BB_FN (bb, cfun)
23a070f3 4051 {
4052 if (bb->aux == NULL && bb != forwarder)
4053 {
4054 bb->aux = forwarder;
4055 break;
4056 }
4057 }
4058}
4059\f
4060/* Return true in case it is possible to duplicate the basic block BB. */
4061
4062static bool
4063cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4064{
4065 /* Do not attempt to duplicate tablejumps, as we need to unshare
4066 the dispatch table. This is difficult to do, as the instructions
4067 computing jump destination may be hoisted outside the basic block. */
4068 if (tablejump_p (BB_END (bb), NULL, NULL))
4069 return false;
4070
4071 /* Do not duplicate blocks containing insns that can't be copied. */
4072 if (targetm.cannot_copy_insn_p)
4073 {
4074 rtx insn = BB_HEAD (bb);
4075 while (1)
4076 {
4077 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4078 return false;
4079 if (insn == BB_END (bb))
4080 break;
4081 insn = NEXT_INSN (insn);
4082 }
4083 }
4084
4085 return true;
4086}
4087
4088rtx
4089duplicate_insn_chain (rtx from, rtx to)
4090{
91f71fa3 4091 rtx insn, next, last, copy;
23a070f3 4092
4093 /* Avoid updating of boundaries of previous basic block. The
4094 note will get removed from insn stream in fixup. */
4095 last = emit_note (NOTE_INSN_DELETED);
4096
4097 /* Create copy at the end of INSN chain. The chain will
4098 be reordered later. */
4099 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4100 {
4101 switch (GET_CODE (insn))
4102 {
4103 case DEBUG_INSN:
4104 /* Don't duplicate label debug insns. */
4105 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4106 break;
4107 /* FALLTHRU */
4108 case INSN:
4109 case CALL_INSN:
4110 case JUMP_INSN:
23a070f3 4111 copy = emit_copy_of_insn_after (insn, get_last_insn ());
4112 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4113 && ANY_RETURN_P (JUMP_LABEL (insn)))
4114 JUMP_LABEL (copy) = JUMP_LABEL (insn);
4115 maybe_copy_prologue_epilogue_insn (insn, copy);
4116 break;
4117
91f71fa3 4118 case JUMP_TABLE_DATA:
4119 /* Avoid copying of dispatch tables. We never duplicate
4120 tablejumps, so this can hit only in case the table got
4121 moved far from original jump.
4122 Avoid copying following barrier as well if any
4123 (and debug insns in between). */
4124 for (next = NEXT_INSN (insn);
4125 next != NEXT_INSN (to);
4126 next = NEXT_INSN (next))
4127 if (!DEBUG_INSN_P (next))
4128 break;
4129 if (next != NEXT_INSN (to) && BARRIER_P (next))
4130 insn = next;
4131 break;
4132
23a070f3 4133 case CODE_LABEL:
4134 break;
4135
4136 case BARRIER:
4137 emit_barrier ();
4138 break;
4139
4140 case NOTE:
4141 switch (NOTE_KIND (insn))
4142 {
4143 /* In case prologue is empty and function contain label
4144 in first BB, we may want to copy the block. */
4145 case NOTE_INSN_PROLOGUE_END:
4146
4147 case NOTE_INSN_DELETED:
4148 case NOTE_INSN_DELETED_LABEL:
4149 case NOTE_INSN_DELETED_DEBUG_LABEL:
4150 /* No problem to strip these. */
4151 case NOTE_INSN_FUNCTION_BEG:
4152 /* There is always just single entry to function. */
4153 case NOTE_INSN_BASIC_BLOCK:
aa78dca5 4154 /* We should only switch text sections once. */
4155 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
23a070f3 4156 break;
4157
4158 case NOTE_INSN_EPILOGUE_BEG:
23a070f3 4159 emit_note_copy (insn);
4160 break;
4161
4162 default:
4163 /* All other notes should have already been eliminated. */
4164 gcc_unreachable ();
4165 }
4166 break;
4167 default:
4168 gcc_unreachable ();
4169 }
4170 }
4171 insn = NEXT_INSN (last);
4172 delete_insn (last);
4173 return insn;
4174}
4175
4176/* Create a duplicate of the basic block BB. */
4177
4178static basic_block
4179cfg_layout_duplicate_bb (basic_block bb)
4180{
4181 rtx insn;
4182 basic_block new_bb;
4183
4184 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4185 new_bb = create_basic_block (insn,
4186 insn ? get_last_insn () : NULL,
34154e27 4187 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
23a070f3 4188
4189 BB_COPY_PARTITION (new_bb, bb);
4190 if (BB_HEADER (bb))
4191 {
4192 insn = BB_HEADER (bb);
4193 while (NEXT_INSN (insn))
4194 insn = NEXT_INSN (insn);
4195 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4196 if (insn)
4197 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4198 }
4199
4200 if (BB_FOOTER (bb))
4201 {
4202 insn = BB_FOOTER (bb);
4203 while (NEXT_INSN (insn))
4204 insn = NEXT_INSN (insn);
4205 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4206 if (insn)
4207 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4208 }
4209
4210 return new_bb;
4211}
4212
4213\f
4214/* Main entry point to this module - initialize the datastructures for
4215 CFG layout changes. It keeps LOOPS up-to-date if not null.
4216
4217 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4218
4219void
4220cfg_layout_initialize (unsigned int flags)
4221{
4222 rtx x;
4223 basic_block bb;
4224
e31c70b9 4225 /* Once bb reordering is complete, cfg layout mode should not be re-entered.
4226 Entering cfg layout mode will perform optimizations on the cfg that
4227 could affect the bb layout negatively or even require fixups. An
4228 example of the latter is if edge forwarding performed when optimizing
4229 the cfg layout required moving a block from the hot to the cold section
4230 under -freorder-blocks-and-partition. This would create an illegal
4231 partitioning unless some manual fixup was performed. */
4232 gcc_assert (!crtl->bb_reorder_complete);
4233
23a070f3 4234 initialize_original_copy_tables ();
4235
4236 cfg_layout_rtl_register_cfg_hooks ();
4237
4238 record_effective_endpoints ();
4239
4240 /* Make sure that the targets of non local gotos are marked. */
4241 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
4242 {
4243 bb = BLOCK_FOR_INSN (XEXP (x, 0));
4244 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4245 }
4246
4247 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4248}
4249
4250/* Splits superblocks. */
4251void
4252break_superblocks (void)
4253{
4254 sbitmap superblocks;
4255 bool need = false;
4256 basic_block bb;
4257
fe672ac0 4258 superblocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
53c5d9d4 4259 bitmap_clear (superblocks);
23a070f3 4260
fc00614f 4261 FOR_EACH_BB_FN (bb, cfun)
23a070f3 4262 if (bb->flags & BB_SUPERBLOCK)
4263 {
4264 bb->flags &= ~BB_SUPERBLOCK;
08b7917c 4265 bitmap_set_bit (superblocks, bb->index);
23a070f3 4266 need = true;
4267 }
4268
4269 if (need)
4270 {
4271 rebuild_jump_labels (get_insns ());
4272 find_many_sub_basic_blocks (superblocks);
4273 }
4274
4275 free (superblocks);
4276}
4277
4278/* Finalize the changes: reorder insn list according to the sequence specified
c38b28e7 4279 by aux pointers, enter compensation code, rebuild scope forest. */
23a070f3 4280
4281void
4282cfg_layout_finalize (void)
4283{
4284#ifdef ENABLE_CHECKING
4285 verify_flow_info ();
4286#endif
4287 force_one_exit_fallthru ();
4288 rtl_register_cfg_hooks ();
4289 if (reload_completed
4290#ifdef HAVE_epilogue
4291 && !HAVE_epilogue
4292#endif
4293 )
4294 fixup_fallthru_exit_predecessor ();
4295 fixup_reorder_chain ();
4296
c38b28e7 4297 rebuild_jump_labels (get_insns ());
4298 delete_dead_jumptables ();
4299
23a070f3 4300#ifdef ENABLE_CHECKING
4301 verify_insn_chain ();
4302 verify_flow_info ();
4303#endif
4304}
4305
54f21e20 4306
1026363d 4307/* Same as split_block but update cfg_layout structures. */
5f5d4cd1 4308
4309static basic_block
4c9e08a4 4310cfg_layout_split_block (basic_block bb, void *insnp)
1026363d 4311{
f780cc25 4312 rtx insn = (rtx) insnp;
5f5d4cd1 4313 basic_block new_bb = rtl_split_block (bb, insn);
1026363d 4314
43e94e51 4315 BB_FOOTER (new_bb) = BB_FOOTER (bb);
4316 BB_FOOTER (bb) = NULL;
1026363d 4317
5f5d4cd1 4318 return new_bb;
1026363d 4319}
4320
1026363d 4321/* Redirect Edge to DEST. */
4ee9c684 4322static edge
4c9e08a4 4323cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
1026363d 4324{
4325 basic_block src = e->src;
4ee9c684 4326 edge ret;
1026363d 4327
c60fa3a7 4328 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4ee9c684 4329 return NULL;
c60fa3a7 4330
e5562ab8 4331 if (e->dest == dest)
4ee9c684 4332 return e;
c60fa3a7 4333
34154e27 4334 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4ee9c684 4335 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
adee86a0 4336 {
3072d30e 4337 df_set_bb_dirty (src);
4ee9c684 4338 return ret;
adee86a0 4339 }
c60fa3a7 4340
34154e27 4341 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
c60fa3a7 4342 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4343 {
450d042a 4344 if (dump_file)
4345 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
c60fa3a7 4346 e->src->index, dest->index);
4347
3072d30e 4348 df_set_bb_dirty (e->src);
c60fa3a7 4349 redirect_edge_succ (e, dest);
4ee9c684 4350 return e;
c60fa3a7 4351 }
4352
1026363d 4353 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4354 in the case the basic block appears to be in sequence. Avoid this
4355 transformation. */
4356
1026363d 4357 if (e->flags & EDGE_FALLTHRU)
4358 {
4359 /* Redirect any branch edges unified with the fallthru one. */
6d7dc5b9 4360 if (JUMP_P (BB_END (src))
b9de5542 4361 && label_is_jump_target_p (BB_HEAD (e->dest),
4362 BB_END (src)))
1026363d 4363 {
cc636d56 4364 edge redirected;
a0c938f0 4365
450d042a 4366 if (dump_file)
4367 fprintf (dump_file, "Fallthru edge unified with branch "
b9de5542 4368 "%i->%i redirected to %i\n",
4369 e->src->index, e->dest->index, dest->index);
4370 e->flags &= ~EDGE_FALLTHRU;
cc636d56 4371 redirected = redirect_branch_edge (e, dest);
4372 gcc_assert (redirected);
d5321455 4373 redirected->flags |= EDGE_FALLTHRU;
4374 df_set_bb_dirty (redirected->src);
4375 return redirected;
1026363d 4376 }
4377 /* In case we are redirecting fallthru edge to the branch edge
a0c938f0 4378 of conditional jump, remove it. */
cd665a06 4379 if (EDGE_COUNT (src->succs) == 2)
1026363d 4380 {
9ea57afb 4381 /* Find the edge that is different from E. */
4382 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
cd665a06 4383
1026363d 4384 if (s->dest == dest
5496dbfc 4385 && any_condjump_p (BB_END (src))
4386 && onlyjump_p (BB_END (src)))
4387 delete_insn (BB_END (src));
1026363d 4388 }
450d042a 4389 if (dump_file)
6d31b223 4390 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
c60fa3a7 4391 e->src->index, e->dest->index, dest->index);
d5321455 4392 ret = redirect_edge_succ_nodup (e, dest);
1026363d 4393 }
4394 else
c60fa3a7 4395 ret = redirect_branch_edge (e, dest);
1026363d 4396
4397 /* We don't want simplejumps in the insn stream during cfglayout. */
cc636d56 4398 gcc_assert (!simplejump_p (BB_END (src)));
1026363d 4399
3072d30e 4400 df_set_bb_dirty (src);
1026363d 4401 return ret;
4402}
4403
4404/* Simple wrapper as we always can redirect fallthru edges. */
4405static basic_block
4c9e08a4 4406cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
1026363d 4407{
cc636d56 4408 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4409
4410 gcc_assert (redirected);
1026363d 4411 return NULL;
4412}
4413
5f5d4cd1 4414/* Same as delete_basic_block but update cfg_layout structures. */
4415
1026363d 4416static void
4c9e08a4 4417cfg_layout_delete_block (basic_block bb)
1026363d 4418{
5496dbfc 4419 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
1026363d 4420
43e94e51 4421 if (BB_HEADER (bb))
1026363d 4422 {
5496dbfc 4423 next = BB_HEAD (bb);
1026363d 4424 if (prev)
43e94e51 4425 NEXT_INSN (prev) = BB_HEADER (bb);
1026363d 4426 else
43e94e51 4427 set_first_insn (BB_HEADER (bb));
4428 PREV_INSN (BB_HEADER (bb)) = prev;
4429 insn = BB_HEADER (bb);
1026363d 4430 while (NEXT_INSN (insn))
4431 insn = NEXT_INSN (insn);
4432 NEXT_INSN (insn) = next;
4433 PREV_INSN (next) = insn;
4434 }
5496dbfc 4435 next = NEXT_INSN (BB_END (bb));
43e94e51 4436 if (BB_FOOTER (bb))
1026363d 4437 {
43e94e51 4438 insn = BB_FOOTER (bb);
c60fa3a7 4439 while (insn)
4440 {
6d7dc5b9 4441 if (BARRIER_P (insn))
c60fa3a7 4442 {
4443 if (PREV_INSN (insn))
4444 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4445 else
43e94e51 4446 BB_FOOTER (bb) = NEXT_INSN (insn);
c60fa3a7 4447 if (NEXT_INSN (insn))
4448 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4449 }
6d7dc5b9 4450 if (LABEL_P (insn))
c60fa3a7 4451 break;
4452 insn = NEXT_INSN (insn);
4453 }
43e94e51 4454 if (BB_FOOTER (bb))
c60fa3a7 4455 {
5496dbfc 4456 insn = BB_END (bb);
43e94e51 4457 NEXT_INSN (insn) = BB_FOOTER (bb);
4458 PREV_INSN (BB_FOOTER (bb)) = insn;
c60fa3a7 4459 while (NEXT_INSN (insn))
4460 insn = NEXT_INSN (insn);
4461 NEXT_INSN (insn) = next;
4462 if (next)
4463 PREV_INSN (next) = insn;
4464 else
4465 set_last_insn (insn);
4466 }
1026363d 4467 }
34154e27 4468 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
43e94e51 4469 to = &BB_HEADER (bb->next_bb);
1026363d 4470 else
4471 to = &cfg_layout_function_footer;
7a22afab 4472
1026363d 4473 rtl_delete_block (bb);
4474
4475 if (prev)
4476 prev = NEXT_INSN (prev);
4c9e08a4 4477 else
1026363d 4478 prev = get_insns ();
4479 if (next)
4480 next = PREV_INSN (next);
4c9e08a4 4481 else
1026363d 4482 next = get_last_insn ();
4483
4484 if (next && NEXT_INSN (next) != prev)
4485 {
4486 remaints = unlink_insn_chain (prev, next);
4487 insn = remaints;
4488 while (NEXT_INSN (insn))
4489 insn = NEXT_INSN (insn);
4490 NEXT_INSN (insn) = *to;
4491 if (*to)
4492 PREV_INSN (*to) = insn;
4493 *to = remaints;
4494 }
4495}
4496
139c3f48 4497/* Return true when blocks A and B can be safely merged. */
47aaf6e6 4498
c60fa3a7 4499static bool
47aaf6e6 4500cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
c60fa3a7 4501{
4f18499c 4502 /* If we are partitioning hot/cold basic blocks, we don't want to
4503 mess up unconditional or indirect jumps that cross between hot
7562ed74 4504 and cold sections.
4505
1118aef7 4506 Basic block partitioning may result in some jumps that appear to
a0c938f0 4507 be optimizable (or blocks that appear to be mergeable), but which really
4508 must be left untouched (they are required to make it safely across
4509 partition boundaries). See the comments at the top of
1118aef7 4510 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4511
1897b881 4512 if (BB_PARTITION (a) != BB_PARTITION (b))
7562ed74 4513 return false;
4f18499c 4514
79f958cb 4515 /* Protect the loop latches. */
4516 if (current_loops && b->loop_father->latch == b)
4517 return false;
4518
d945554e 4519 /* If we would end up moving B's instructions, make sure it doesn't fall
4520 through into the exit block, since we cannot recover from a fallthrough
4521 edge into the exit block occurring in the middle of a function. */
4522 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4523 {
4524 edge e = find_fallthru_edge (b->succs);
34154e27 4525 if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
d945554e 4526 return false;
4527 }
4528
c60fa3a7 4529 /* There must be exactly one edge in between the blocks. */
ea091dfd 4530 return (single_succ_p (a)
4531 && single_succ (a) == b
4532 && single_pred_p (b) == 1
cd665a06 4533 && a != b
c60fa3a7 4534 /* Must be simple edge. */
ea091dfd 4535 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
34154e27 4536 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4537 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
4aaec180 4538 /* If the jump insn has side effects, we can't kill the edge.
4539 When not optimizing, try_redirect_by_replacing_jump will
4540 not allow us to redirect an edge by replacing a table jump. */
6d7dc5b9 4541 && (!JUMP_P (BB_END (a))
4aaec180 4542 || ((!optimize || reload_completed)
5496dbfc 4543 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
c60fa3a7 4544}
4545
a53ff4c1 4546/* Merge block A and B. The blocks must be mergeable. */
4547
c60fa3a7 4548static void
4549cfg_layout_merge_blocks (basic_block a, basic_block b)
4550{
18b762f0 4551 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
d4afc00c 4552 rtx insn;
18b762f0 4553
1b4345f7 4554 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
c60fa3a7 4555
3072d30e 4556 if (dump_file)
18b762f0 4557 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4558 a->index);
3072d30e 4559
c60fa3a7 4560 /* If there was a CODE_LABEL beginning B, delete it. */
6d7dc5b9 4561 if (LABEL_P (BB_HEAD (b)))
7b07eae7 4562 {
7b07eae7 4563 delete_insn (BB_HEAD (b));
4564 }
c60fa3a7 4565
4566 /* We should have fallthru edge in a, or we can do dummy redirection to get
4567 it cleaned up. */
6d7dc5b9 4568 if (JUMP_P (BB_END (a)))
cd665a06 4569 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
cc636d56 4570 gcc_assert (!JUMP_P (BB_END (a)));
c60fa3a7 4571
8ddad41d 4572 /* When not optimizing CFG and the edge is the only place in RTL which holds
9c388755 4573 some unique locus, emit a nop with that locus in between. */
8ddad41d 4574 if (!optimize)
4575 emit_nop_for_unique_locus_between (a, b);
9c388755 4576
eda57d74 4577 /* Move things from b->footer after a->footer. */
4578 if (BB_FOOTER (b))
c60fa3a7 4579 {
eda57d74 4580 if (!BB_FOOTER (a))
4581 BB_FOOTER (a) = BB_FOOTER (b);
4582 else
4583 {
4584 rtx last = BB_FOOTER (a);
4585
4586 while (NEXT_INSN (last))
4587 last = NEXT_INSN (last);
4588 NEXT_INSN (last) = BB_FOOTER (b);
4589 PREV_INSN (BB_FOOTER (b)) = last;
4590 }
4591 BB_FOOTER (b) = NULL;
4592 }
4593
4594 /* Move things from b->header before a->footer.
4595 Note that this may include dead tablejump data, but we don't clean
4596 those up until we go out of cfglayout mode. */
4597 if (BB_HEADER (b))
4598 {
4599 if (! BB_FOOTER (a))
4600 BB_FOOTER (a) = BB_HEADER (b);
4601 else
4602 {
4603 rtx last = BB_HEADER (b);
4604
4605 while (NEXT_INSN (last))
4606 last = NEXT_INSN (last);
4607 NEXT_INSN (last) = BB_FOOTER (a);
4608 PREV_INSN (BB_FOOTER (a)) = last;
4609 BB_FOOTER (a) = BB_HEADER (b);
4610 }
43e94e51 4611 BB_HEADER (b) = NULL;
c60fa3a7 4612 }
4613
4614 /* In the case basic blocks are not adjacent, move them around. */
5496dbfc 4615 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
c60fa3a7 4616 {
d4afc00c 4617 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
f41d4683 4618
d4afc00c 4619 emit_insn_after_noloc (insn, BB_END (a), a);
c60fa3a7 4620 }
4621 /* Otherwise just re-associate the instructions. */
4622 else
4623 {
5496dbfc 4624 insn = BB_HEAD (b);
5496dbfc 4625 BB_END (a) = BB_END (b);
c60fa3a7 4626 }
4627
d4afc00c 4628 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4629 We need to explicitly call. */
4630 update_bb_for_insn_chain (insn, BB_END (b), a);
4631
4632 /* Skip possible DELETED_LABEL insn. */
4633 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4634 insn = NEXT_INSN (insn);
4635 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
eda57d74 4636 BB_HEAD (b) = BB_END (b) = NULL;
d4afc00c 4637 delete_insn (insn);
4638
3072d30e 4639 df_bb_delete (b->index);
4640
18b762f0 4641 /* If B was a forwarder block, propagate the locus on the edge. */
8e7408e3 4642 if (forwarder_p
81ad108c 4643 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
18b762f0 4644 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4645
450d042a 4646 if (dump_file)
18b762f0 4647 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
c60fa3a7 4648}
4649
4650/* Split edge E. */
5f5d4cd1 4651
c60fa3a7 4652static basic_block
4653cfg_layout_split_edge (edge e)
4654{
c60fa3a7 4655 basic_block new_bb =
34154e27 4656 create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
5496dbfc 4657 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
c60fa3a7 4658 NULL_RTX, e->src);
4659
34154e27 4660 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
e1b2b77c 4661 BB_COPY_PARTITION (new_bb, e->src);
4662 else
4663 BB_COPY_PARTITION (new_bb, e->dest);
3bdc4312 4664 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
c60fa3a7 4665 redirect_edge_and_branch_force (e, new_bb);
4666
4667 return new_bb;
4668}
4669
5f5d4cd1 4670/* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4671
4672static void
4673rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4674{
4675}
4676
9631926a 4677/* Return true if BB contains only labels or non-executable
4678 instructions. */
4679
4680static bool
4681rtl_block_empty_p (basic_block bb)
4682{
4683 rtx insn;
4684
34154e27 4685 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4686 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
9631926a 4687 return true;
4688
4689 FOR_BB_INSNS (bb, insn)
4690 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4691 return false;
4692
4693 return true;
4694}
4695
4696/* Split a basic block if it ends with a conditional branch and if
4697 the other part of the block is not empty. */
4698
4699static basic_block
4700rtl_split_block_before_cond_jump (basic_block bb)
4701{
4702 rtx insn;
4703 rtx split_point = NULL;
4704 rtx last = NULL;
4705 bool found_code = false;
4706
4707 FOR_BB_INSNS (bb, insn)
4708 {
4709 if (any_condjump_p (insn))
4710 split_point = last;
4711 else if (NONDEBUG_INSN_P (insn))
4712 found_code = true;
4713 last = insn;
4714 }
4715
4716 /* Did not find everything. */
4717 if (found_code && split_point)
4718 return split_block (bb, split_point)->dest;
4719 else
4720 return NULL;
4721}
4722
4ee9c684 4723/* Return 1 if BB ends with a call, possibly followed by some
4724 instructions that must stay with the call, 0 otherwise. */
4725
4726static bool
47aaf6e6 4727rtl_block_ends_with_call_p (basic_block bb)
4ee9c684 4728{
4729 rtx insn = BB_END (bb);
4730
6d7dc5b9 4731 while (!CALL_P (insn)
4ee9c684 4732 && insn != BB_HEAD (bb)
f69dfa18 4733 && (keep_with_call_p (insn)
9845d120 4734 || NOTE_P (insn)
4735 || DEBUG_INSN_P (insn)))
4ee9c684 4736 insn = PREV_INSN (insn);
6d7dc5b9 4737 return (CALL_P (insn));
4ee9c684 4738}
4739
4740/* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4741
4742static bool
5493cb9a 4743rtl_block_ends_with_condjump_p (const_basic_block bb)
4ee9c684 4744{
4745 return any_condjump_p (BB_END (bb));
4746}
4747
4748/* Return true if we need to add fake edge to exit.
4749 Helper function for rtl_flow_call_edges_add. */
4750
4751static bool
5493cb9a 4752need_fake_edge_p (const_rtx insn)
4ee9c684 4753{
4754 if (!INSN_P (insn))
4755 return false;
4756
6d7dc5b9 4757 if ((CALL_P (insn)
4ee9c684 4758 && !SIBLING_CALL_P (insn)
4759 && !find_reg_note (insn, REG_NORETURN, NULL)
9c2a0c05 4760 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4ee9c684 4761 return true;
4762
4763 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4764 && MEM_VOLATILE_P (PATTERN (insn)))
4765 || (GET_CODE (PATTERN (insn)) == PARALLEL
4766 && asm_noperands (insn) != -1
4767 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4768 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4769}
4770
4771/* Add fake edges to the function exit for any non constant and non noreturn
4772 calls, volatile inline assembly in the bitmap of blocks specified by
4773 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4774 that were split.
4775
4776 The goal is to expose cases in which entering a basic block does not imply
4777 that all subsequent instructions must be executed. */
4778
4779static int
4780rtl_flow_call_edges_add (sbitmap blocks)
4781{
4782 int i;
4783 int blocks_split = 0;
fe672ac0 4784 int last_bb = last_basic_block_for_fn (cfun);
4ee9c684 4785 bool check_last_block = false;
4786
a28770e1 4787 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
4ee9c684 4788 return 0;
4789
4790 if (! blocks)
4791 check_last_block = true;
4792 else
34154e27 4793 check_last_block = bitmap_bit_p (blocks,
4794 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
4ee9c684 4795
4796 /* In the last basic block, before epilogue generation, there will be
4797 a fallthru edge to EXIT. Special care is required if the last insn
4798 of the last basic block is a call because make_edge folds duplicate
4799 edges, which would result in the fallthru edge also being marked
4800 fake, which would result in the fallthru edge being removed by
4801 remove_fake_edges, which would result in an invalid CFG.
4802
4803 Moreover, we can't elide the outgoing fake edge, since the block
4804 profiler needs to take this into account in order to solve the minimal
4805 spanning tree in the case that the call doesn't return.
4806
4807 Handle this by adding a dummy instruction in a new last basic block. */
4808 if (check_last_block)
4809 {
34154e27 4810 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
4ee9c684 4811 rtx insn = BB_END (bb);
4812
4813 /* Back up past insns that must be kept in the same block as a call. */
4814 while (insn != BB_HEAD (bb)
4815 && keep_with_call_p (insn))
4816 insn = PREV_INSN (insn);
4817
4818 if (need_fake_edge_p (insn))
4819 {
4820 edge e;
4821
34154e27 4822 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
c6356c17 4823 if (e)
4824 {
18b42941 4825 insert_insn_on_edge (gen_use (const0_rtx), e);
c6356c17 4826 commit_edge_insertions ();
4827 }
4ee9c684 4828 }
4829 }
4830
4831 /* Now add fake edges to the function exit for any non constant
4832 calls since there is no way that we can determine if they will
4833 return or not... */
4834
4d2e5d52 4835 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4ee9c684 4836 {
f5a6b05f 4837 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
4ee9c684 4838 rtx insn;
4839 rtx prev_insn;
4840
4841 if (!bb)
4842 continue;
4843
08b7917c 4844 if (blocks && !bitmap_bit_p (blocks, i))
4ee9c684 4845 continue;
4846
4847 for (insn = BB_END (bb); ; insn = prev_insn)
4848 {
4849 prev_insn = PREV_INSN (insn);
4850 if (need_fake_edge_p (insn))
4851 {
4852 edge e;
4853 rtx split_at_insn = insn;
4854
4855 /* Don't split the block between a call and an insn that should
a0c938f0 4856 remain in the same block as the call. */
6d7dc5b9 4857 if (CALL_P (insn))
4ee9c684 4858 while (split_at_insn != BB_END (bb)
4859 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4860 split_at_insn = NEXT_INSN (split_at_insn);
4861
4862 /* The handling above of the final block before the epilogue
a0c938f0 4863 should be enough to verify that there is no edge to the exit
4ee9c684 4864 block in CFG already. Calling make_edge in such case would
4865 cause us to mark that edge as fake and remove it later. */
4866
4867#ifdef ENABLE_CHECKING
4868 if (split_at_insn == BB_END (bb))
cd665a06 4869 {
34154e27 4870 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
c6356c17 4871 gcc_assert (e == NULL);
cd665a06 4872 }
4ee9c684 4873#endif
4874
4875 /* Note that the following may create a new basic block
4876 and renumber the existing basic blocks. */
4877 if (split_at_insn != BB_END (bb))
4878 {
4879 e = split_block (bb, split_at_insn);
4880 if (e)
4881 blocks_split++;
4882 }
4883
34154e27 4884 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
4ee9c684 4885 }
4886
4887 if (insn == BB_HEAD (bb))
4888 break;
4889 }
4890 }
4891
4892 if (blocks_split)
4893 verify_flow_info ();
4894
4895 return blocks_split;
4896}
4897
c50ae675 4898/* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
d90dbd3e 4899 the conditional branch target, SECOND_HEAD should be the fall-thru
c50ae675 4900 there is no need to handle this here the loop versioning code handles
4901 this. the reason for SECON_HEAD is that it is needed for condition
4902 in trees, and this should be of the same type since it is a hook. */
4903static void
4904rtl_lv_add_condition_to_bb (basic_block first_head ,
a0c938f0 4905 basic_block second_head ATTRIBUTE_UNUSED,
4906 basic_block cond_bb, void *comp_rtx)
c50ae675 4907{
4908 rtx label, seq, jump;
4909 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4910 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4911 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4912 enum machine_mode mode;
4913
4914
4915 label = block_label (first_head);
4916 mode = GET_MODE (op0);
4917 if (mode == VOIDmode)
4918 mode = GET_MODE (op1);
4919
4920 start_sequence ();
4921 op0 = force_operand (op0, NULL_RTX);
4922 op1 = force_operand (op1, NULL_RTX);
4923 do_compare_rtx_and_jump (op0, op1, comp, 0,
79ab74cc 4924 mode, NULL_RTX, NULL_RTX, label, -1);
c50ae675 4925 jump = get_last_insn ();
4926 JUMP_LABEL (jump) = label;
4927 LABEL_NUSES (label)++;
4928 seq = get_insns ();
4929 end_sequence ();
4930
4931 /* Add the new cond , in the new head. */
9af5ce0c 4932 emit_insn_after (seq, BB_END (cond_bb));
c50ae675 4933}
4934
4935
4936/* Given a block B with unconditional branch at its end, get the
4937 store the return the branch edge and the fall-thru edge in
4938 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4939static void
4940rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4941 edge *fallthru_edge)
4942{
4943 edge e = EDGE_SUCC (b, 0);
4944
4945 if (e->flags & EDGE_FALLTHRU)
4946 {
4947 *fallthru_edge = e;
4948 *branch_edge = EDGE_SUCC (b, 1);
4949 }
4950 else
4951 {
4952 *branch_edge = e;
4953 *fallthru_edge = EDGE_SUCC (b, 1);
4954 }
4955}
4956
e0dde8f8 4957void
4958init_rtl_bb_info (basic_block bb)
4959{
43e94e51 4960 gcc_assert (!bb->il.x.rtl);
4961 bb->il.x.head_ = NULL;
4962 bb->il.x.rtl = ggc_alloc_cleared_rtl_bb_info ();
e0dde8f8 4963}
4964
611d2ac1 4965/* Returns true if it is possible to remove edge E by redirecting
4966 it to the destination of the other edge from E->src. */
4967
4968static bool
5493cb9a 4969rtl_can_remove_branch_p (const_edge e)
611d2ac1 4970{
5493cb9a 4971 const_basic_block src = e->src;
4972 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
4973 const_rtx insn = BB_END (src), set;
611d2ac1 4974
4975 /* The conditions are taken from try_redirect_by_replacing_jump. */
34154e27 4976 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
611d2ac1 4977 return false;
4978
4979 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4980 return false;
4981
aa78dca5 4982 if (BB_PARTITION (src) != BB_PARTITION (target))
611d2ac1 4983 return false;
4984
4985 if (!onlyjump_p (insn)
4986 || tablejump_p (insn, NULL, NULL))
4987 return false;
4988
4989 set = single_set (insn);
4990 if (!set || side_effects_p (set))
4991 return false;
4992
4993 return true;
4994}
4995
0a55d497 4996static basic_block
4997rtl_duplicate_bb (basic_block bb)
4998{
4999 bb = cfg_layout_duplicate_bb (bb);
5000 bb->aux = NULL;
5001 return bb;
5002}
5003
98193482 5004/* Do book-keeping of basic block BB for the profile consistency checker.
5005 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
5006 then do post-pass accounting. Store the counting in RECORD. */
5007static void
5008rtl_account_profile_record (basic_block bb, int after_pass,
5009 struct profile_record *record)
5010{
5011 rtx insn;
5012 FOR_BB_INSNS (bb, insn)
5013 if (INSN_P (insn))
5014 {
5015 record->size[after_pass]
5016 += insn_rtx_cost (PATTERN (insn), false);
f26d8580 5017 if (profile_status_for_fn (cfun) == PROFILE_READ)
98193482 5018 record->time[after_pass]
5019 += insn_rtx_cost (PATTERN (insn), true) * bb->count;
f26d8580 5020 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
98193482 5021 record->time[after_pass]
5022 += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
5023 }
5024}
5025
1026363d 5026/* Implementation of CFG manipulation for linearized RTL. */
5027struct cfg_hooks rtl_cfg_hooks = {
5f5d4cd1 5028 "rtl",
1026363d 5029 rtl_verify_flow_info,
028f8cc7 5030 rtl_dump_bb,
e079344a 5031 rtl_dump_bb_for_graph,
c60fa3a7 5032 rtl_create_basic_block,
1026363d 5033 rtl_redirect_edge_and_branch,
5034 rtl_redirect_edge_and_branch_force,
611d2ac1 5035 rtl_can_remove_branch_p,
1026363d 5036 rtl_delete_block,
5037 rtl_split_block,
5f5d4cd1 5038 rtl_move_block_after,
c60fa3a7 5039 rtl_can_merge_blocks, /* can_merge_blocks_p */
5040 rtl_merge_blocks,
4ee9c684 5041 rtl_predict_edge,
5042 rtl_predicted_by_p,
0a55d497 5043 cfg_layout_can_duplicate_bb_p,
5044 rtl_duplicate_bb,
5f5d4cd1 5045 rtl_split_edge,
5046 rtl_make_forwarder_block,
4ee9c684 5047 rtl_tidy_fallthru_edge,
202bbc06 5048 rtl_force_nonfallthru,
4ee9c684 5049 rtl_block_ends_with_call_p,
5050 rtl_block_ends_with_condjump_p,
26b12c91 5051 rtl_flow_call_edges_add,
5052 NULL, /* execute_on_growing_pred */
c50ae675 5053 NULL, /* execute_on_shrinking_pred */
5054 NULL, /* duplicate loop for trees */
5055 NULL, /* lv_add_condition_to_bb */
5056 NULL, /* lv_adjust_loop_header_phi*/
5057 NULL, /* extract_cond_bb_edges */
9631926a 5058 NULL, /* flush_pending_stmts */
5059 rtl_block_empty_p, /* block_empty_p */
5060 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
98193482 5061 rtl_account_profile_record,
1026363d 5062};
5063
5064/* Implementation of CFG manipulation for cfg layout RTL, where
5065 basic block connected via fallthru edges does not have to be adjacent.
5066 This representation will hopefully become the default one in future
5067 version of the compiler. */
4ee9c684 5068
1026363d 5069struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5f5d4cd1 5070 "cfglayout mode",
c60fa3a7 5071 rtl_verify_flow_info_1,
028f8cc7 5072 rtl_dump_bb,
e079344a 5073 rtl_dump_bb_for_graph,
c60fa3a7 5074 cfg_layout_create_basic_block,
1026363d 5075 cfg_layout_redirect_edge_and_branch,
5076 cfg_layout_redirect_edge_and_branch_force,
611d2ac1 5077 rtl_can_remove_branch_p,
1026363d 5078 cfg_layout_delete_block,
5079 cfg_layout_split_block,
5f5d4cd1 5080 rtl_move_block_after,
c60fa3a7 5081 cfg_layout_can_merge_blocks_p,
5082 cfg_layout_merge_blocks,
4ee9c684 5083 rtl_predict_edge,
5084 rtl_predicted_by_p,
5085 cfg_layout_can_duplicate_bb_p,
5086 cfg_layout_duplicate_bb,
5f5d4cd1 5087 cfg_layout_split_edge,
5088 rtl_make_forwarder_block,
202bbc06 5089 NULL, /* tidy_fallthru_edge */
5090 rtl_force_nonfallthru,
4ee9c684 5091 rtl_block_ends_with_call_p,
5092 rtl_block_ends_with_condjump_p,
26b12c91 5093 rtl_flow_call_edges_add,
5094 NULL, /* execute_on_growing_pred */
c50ae675 5095 NULL, /* execute_on_shrinking_pred */
5096 duplicate_loop_to_header_edge, /* duplicate loop for trees */
5097 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5098 NULL, /* lv_adjust_loop_header_phi*/
5099 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
9631926a 5100 NULL, /* flush_pending_stmts */
5101 rtl_block_empty_p, /* block_empty_p */
5102 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
98193482 5103 rtl_account_profile_record,
1026363d 5104};
23a070f3 5105
5106#include "gt-cfgrtl.h"