]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfgrtl.c
RISC-V: Add testcase for pr114734
[thirdparty/gcc.git] / gcc / cfgrtl.c
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
22
23 Available functionality:
24 - Basic CFG/RTL manipulation API documented in cfghooks.h
25 - CFG-aware instruction chain manipulation
26 delete_insn, delete_insn_chain
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
33
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
39 \f
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "backend.h"
44 #include "target.h"
45 #include "rtl.h"
46 #include "tree.h"
47 #include "cfghooks.h"
48 #include "df.h"
49 #include "insn-config.h"
50 #include "memmodel.h"
51 #include "emit-rtl.h"
52 #include "cfgrtl.h"
53 #include "cfganal.h"
54 #include "cfgbuild.h"
55 #include "cfgcleanup.h"
56 #include "bb-reorder.h"
57 #include "rtl-error.h"
58 #include "insn-attr.h"
59 #include "dojump.h"
60 #include "expr.h"
61 #include "cfgloop.h"
62 #include "tree-pass.h"
63 #include "print-rtl.h"
64
65 /* Disable warnings about missing quoting in GCC diagnostics. */
66 #if __GNUC__ >= 10
67 # pragma GCC diagnostic push
68 # pragma GCC diagnostic ignored "-Wformat-diag"
69 #endif
70
71 /* Holds the interesting leading and trailing notes for the function.
72 Only applicable if the CFG is in cfglayout mode. */
73 static GTY(()) rtx_insn *cfg_layout_function_footer;
74 static GTY(()) rtx_insn *cfg_layout_function_header;
75
76 static rtx_insn *skip_insns_after_block (basic_block);
77 static void record_effective_endpoints (void);
78 static void fixup_reorder_chain (void);
79
80 void verify_insn_chain (void);
81 static void fixup_fallthru_exit_predecessor (void);
82 static int can_delete_note_p (const rtx_note *);
83 static int can_delete_label_p (const rtx_code_label *);
84 static basic_block rtl_split_edge (edge);
85 static bool rtl_move_block_after (basic_block, basic_block);
86 static int rtl_verify_flow_info (void);
87 static basic_block cfg_layout_split_block (basic_block, void *);
88 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
89 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
90 static void cfg_layout_delete_block (basic_block);
91 static void rtl_delete_block (basic_block);
92 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
93 static edge rtl_redirect_edge_and_branch (edge, basic_block);
94 static basic_block rtl_split_block (basic_block, void *);
95 static void rtl_dump_bb (FILE *, basic_block, int, dump_flags_t);
96 static int rtl_verify_flow_info_1 (void);
97 static void rtl_make_forwarder_block (edge);
98 \f
99 /* Return true if NOTE is not one of the ones that must be kept paired,
100 so that we may simply delete it. */
101
102 static int
103 can_delete_note_p (const rtx_note *note)
104 {
105 switch (NOTE_KIND (note))
106 {
107 case NOTE_INSN_DELETED:
108 case NOTE_INSN_BASIC_BLOCK:
109 case NOTE_INSN_EPILOGUE_BEG:
110 return true;
111
112 default:
113 return false;
114 }
115 }
116
117 /* True if a given label can be deleted. */
118
119 static int
120 can_delete_label_p (const rtx_code_label *label)
121 {
122 return (!LABEL_PRESERVE_P (label)
123 /* User declared labels must be preserved. */
124 && LABEL_NAME (label) == 0
125 && !vec_safe_contains<rtx_insn *> (forced_labels,
126 const_cast<rtx_code_label *> (label)));
127 }
128
129 /* Delete INSN by patching it out. */
130
131 void
132 delete_insn (rtx_insn *insn)
133 {
134 rtx note;
135 bool really_delete = true;
136
137 if (LABEL_P (insn))
138 {
139 /* Some labels can't be directly removed from the INSN chain, as they
140 might be references via variables, constant pool etc.
141 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
142 if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
143 {
144 const char *name = LABEL_NAME (insn);
145 basic_block bb = BLOCK_FOR_INSN (insn);
146 rtx_insn *bb_note = NEXT_INSN (insn);
147
148 really_delete = false;
149 PUT_CODE (insn, NOTE);
150 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
151 NOTE_DELETED_LABEL_NAME (insn) = name;
152
153 /* If the note following the label starts a basic block, and the
154 label is a member of the same basic block, interchange the two. */
155 if (bb_note != NULL_RTX
156 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
157 && bb != NULL
158 && bb == BLOCK_FOR_INSN (bb_note))
159 {
160 reorder_insns_nobb (insn, insn, bb_note);
161 BB_HEAD (bb) = bb_note;
162 if (BB_END (bb) == bb_note)
163 BB_END (bb) = insn;
164 }
165 }
166
167 remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
168 }
169
170 if (really_delete)
171 {
172 /* If this insn has already been deleted, something is very wrong. */
173 gcc_assert (!insn->deleted ());
174 if (INSN_P (insn))
175 df_insn_delete (insn);
176 remove_insn (insn);
177 insn->set_deleted ();
178 }
179
180 /* If deleting a jump, decrement the use count of the label. Deleting
181 the label itself should happen in the normal course of block merging. */
182 if (JUMP_P (insn))
183 {
184 if (JUMP_LABEL (insn)
185 && LABEL_P (JUMP_LABEL (insn)))
186 LABEL_NUSES (JUMP_LABEL (insn))--;
187
188 /* If there are more targets, remove them too. */
189 while ((note
190 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
191 && LABEL_P (XEXP (note, 0)))
192 {
193 LABEL_NUSES (XEXP (note, 0))--;
194 remove_note (insn, note);
195 }
196 }
197
198 /* Also if deleting any insn that references a label as an operand. */
199 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
200 && LABEL_P (XEXP (note, 0)))
201 {
202 LABEL_NUSES (XEXP (note, 0))--;
203 remove_note (insn, note);
204 }
205
206 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
207 {
208 rtvec vec = table->get_labels ();
209 int len = GET_NUM_ELEM (vec);
210 int i;
211
212 for (i = 0; i < len; i++)
213 {
214 rtx label = XEXP (RTVEC_ELT (vec, i), 0);
215
216 /* When deleting code in bulk (e.g. removing many unreachable
217 blocks) we can delete a label that's a target of the vector
218 before deleting the vector itself. */
219 if (!NOTE_P (label))
220 LABEL_NUSES (label)--;
221 }
222 }
223 }
224
225 /* Like delete_insn but also purge dead edges from BB.
226 Return true if any edges are eliminated. */
227
228 bool
229 delete_insn_and_edges (rtx_insn *insn)
230 {
231 bool purge = false;
232
233 if (INSN_P (insn) && BLOCK_FOR_INSN (insn))
234 {
235 basic_block bb = BLOCK_FOR_INSN (insn);
236 if (BB_END (bb) == insn)
237 purge = true;
238 else if (DEBUG_INSN_P (BB_END (bb)))
239 for (rtx_insn *dinsn = NEXT_INSN (insn);
240 DEBUG_INSN_P (dinsn); dinsn = NEXT_INSN (dinsn))
241 if (BB_END (bb) == dinsn)
242 {
243 purge = true;
244 break;
245 }
246 }
247 delete_insn (insn);
248 if (purge)
249 return purge_dead_edges (BLOCK_FOR_INSN (insn));
250 return false;
251 }
252
253 /* Unlink a chain of insns between START and FINISH, leaving notes
254 that must be paired. If CLEAR_BB is true, we set bb field for
255 insns that cannot be removed to NULL. */
256
257 void
258 delete_insn_chain (rtx start, rtx_insn *finish, bool clear_bb)
259 {
260 /* Unchain the insns one by one. It would be quicker to delete all of these
261 with a single unchaining, rather than one at a time, but we need to keep
262 the NOTE's. */
263 rtx_insn *current = finish;
264 while (1)
265 {
266 rtx_insn *prev = PREV_INSN (current);
267 if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
268 ;
269 else
270 delete_insn (current);
271
272 if (clear_bb && !current->deleted ())
273 set_block_for_insn (current, NULL);
274
275 if (current == start)
276 break;
277 current = prev;
278 }
279 }
280 \f
281 /* Create a new basic block consisting of the instructions between HEAD and END
282 inclusive. This function is designed to allow fast BB construction - reuses
283 the note and basic block struct in BB_NOTE, if any and do not grow
284 BASIC_BLOCK chain and should be used directly only by CFG construction code.
285 END can be NULL in to create new empty basic block before HEAD. Both END
286 and HEAD can be NULL to create basic block at the end of INSN chain.
287 AFTER is the basic block we should be put after. */
288
289 basic_block
290 create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
291 basic_block after)
292 {
293 basic_block bb;
294
295 if (bb_note
296 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
297 && bb->aux == NULL)
298 {
299 /* If we found an existing note, thread it back onto the chain. */
300
301 rtx_insn *after;
302
303 if (LABEL_P (head))
304 after = head;
305 else
306 {
307 after = PREV_INSN (head);
308 head = bb_note;
309 }
310
311 if (after != bb_note && NEXT_INSN (after) != bb_note)
312 reorder_insns_nobb (bb_note, bb_note, after);
313 }
314 else
315 {
316 /* Otherwise we must create a note and a basic block structure. */
317
318 bb = alloc_block ();
319
320 init_rtl_bb_info (bb);
321 if (!head && !end)
322 head = end = bb_note
323 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
324 else if (LABEL_P (head) && end)
325 {
326 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
327 if (head == end)
328 end = bb_note;
329 }
330 else
331 {
332 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
333 head = bb_note;
334 if (!end)
335 end = head;
336 }
337
338 NOTE_BASIC_BLOCK (bb_note) = bb;
339 }
340
341 /* Always include the bb note in the block. */
342 if (NEXT_INSN (end) == bb_note)
343 end = bb_note;
344
345 BB_HEAD (bb) = head;
346 BB_END (bb) = end;
347 bb->index = last_basic_block_for_fn (cfun)++;
348 bb->flags = BB_NEW | BB_RTL;
349 link_block (bb, after);
350 SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
351 df_bb_refs_record (bb->index, false);
352 update_bb_for_insn (bb);
353 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
354
355 /* Tag the block so that we know it has been used when considering
356 other basic block notes. */
357 bb->aux = bb;
358
359 return bb;
360 }
361
362 /* Create new basic block consisting of instructions in between HEAD and END
363 and place it to the BB chain after block AFTER. END can be NULL to
364 create a new empty basic block before HEAD. Both END and HEAD can be
365 NULL to create basic block at the end of INSN chain. */
366
367 static basic_block
368 rtl_create_basic_block (void *headp, void *endp, basic_block after)
369 {
370 rtx_insn *head = (rtx_insn *) headp;
371 rtx_insn *end = (rtx_insn *) endp;
372 basic_block bb;
373
374 /* Grow the basic block array if needed. */
375 if ((size_t) last_basic_block_for_fn (cfun)
376 >= basic_block_info_for_fn (cfun)->length ())
377 {
378 size_t new_size =
379 (last_basic_block_for_fn (cfun)
380 + (last_basic_block_for_fn (cfun) + 3) / 4);
381 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
382 }
383
384 n_basic_blocks_for_fn (cfun)++;
385
386 bb = create_basic_block_structure (head, end, NULL, after);
387 bb->aux = NULL;
388 return bb;
389 }
390
391 static basic_block
392 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
393 {
394 basic_block newbb = rtl_create_basic_block (head, end, after);
395
396 return newbb;
397 }
398 \f
399 /* Delete the insns in a (non-live) block. We physically delete every
400 non-deleted-note insn, and update the flow graph appropriately.
401
402 Return nonzero if we deleted an exception handler. */
403
404 /* ??? Preserving all such notes strikes me as wrong. It would be nice
405 to post-process the stream to remove empty blocks, loops, ranges, etc. */
406
407 static void
408 rtl_delete_block (basic_block b)
409 {
410 rtx_insn *insn, *end;
411
412 /* If the head of this block is a CODE_LABEL, then it might be the
413 label for an exception handler which can't be reached. We need
414 to remove the label from the exception_handler_label list. */
415 insn = BB_HEAD (b);
416
417 end = get_last_bb_insn (b);
418
419 /* Selectively delete the entire chain. */
420 BB_HEAD (b) = NULL;
421 delete_insn_chain (insn, end, true);
422
423
424 if (dump_file)
425 fprintf (dump_file, "deleting block %d\n", b->index);
426 df_bb_delete (b->index);
427 }
428 \f
429 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
430
431 void
432 compute_bb_for_insn (void)
433 {
434 basic_block bb;
435
436 FOR_EACH_BB_FN (bb, cfun)
437 {
438 rtx_insn *end = BB_END (bb);
439 rtx_insn *insn;
440
441 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
442 {
443 BLOCK_FOR_INSN (insn) = bb;
444 if (insn == end)
445 break;
446 }
447 }
448 }
449
450 /* Release the basic_block_for_insn array. */
451
452 unsigned int
453 free_bb_for_insn (void)
454 {
455 rtx_insn *insn;
456 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
457 if (!BARRIER_P (insn))
458 BLOCK_FOR_INSN (insn) = NULL;
459 return 0;
460 }
461
462 namespace {
463
464 const pass_data pass_data_free_cfg =
465 {
466 RTL_PASS, /* type */
467 "*free_cfg", /* name */
468 OPTGROUP_NONE, /* optinfo_flags */
469 TV_NONE, /* tv_id */
470 0, /* properties_required */
471 0, /* properties_provided */
472 PROP_cfg, /* properties_destroyed */
473 0, /* todo_flags_start */
474 0, /* todo_flags_finish */
475 };
476
477 class pass_free_cfg : public rtl_opt_pass
478 {
479 public:
480 pass_free_cfg (gcc::context *ctxt)
481 : rtl_opt_pass (pass_data_free_cfg, ctxt)
482 {}
483
484 /* opt_pass methods: */
485 virtual unsigned int execute (function *);
486
487 }; // class pass_free_cfg
488
489 unsigned int
490 pass_free_cfg::execute (function *)
491 {
492 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
493 valid at that point so it would be too late to call df_analyze. */
494 if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
495 {
496 df_note_add_problem ();
497 df_analyze ();
498 }
499
500 if (crtl->has_bb_partition)
501 insert_section_boundary_note ();
502
503 free_bb_for_insn ();
504 return 0;
505 }
506
507 } // anon namespace
508
509 rtl_opt_pass *
510 make_pass_free_cfg (gcc::context *ctxt)
511 {
512 return new pass_free_cfg (ctxt);
513 }
514
515 /* Return RTX to emit after when we want to emit code on the entry of function. */
516 rtx_insn *
517 entry_of_function (void)
518 {
519 return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
520 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
521 }
522
523 /* Emit INSN at the entry point of the function, ensuring that it is only
524 executed once per function. */
525 void
526 emit_insn_at_entry (rtx insn)
527 {
528 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
529 edge e = ei_safe_edge (ei);
530 gcc_assert (e->flags & EDGE_FALLTHRU);
531
532 insert_insn_on_edge (insn, e);
533 commit_edge_insertions ();
534 }
535
536 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
537 (or BARRIER if found) and notify df of the bb change.
538 The insn chain range is inclusive
539 (i.e. both BEGIN and END will be updated. */
540
541 static void
542 update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
543 {
544 rtx_insn *insn;
545
546 end = NEXT_INSN (end);
547 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
548 if (!BARRIER_P (insn))
549 df_insn_change_bb (insn, bb);
550 }
551
552 /* Update BLOCK_FOR_INSN of insns in BB to BB,
553 and notify df of the change. */
554
555 void
556 update_bb_for_insn (basic_block bb)
557 {
558 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
559 }
560
561 \f
562 /* Like active_insn_p, except keep the return value use or clobber around
563 even after reload. */
564
565 static bool
566 flow_active_insn_p (const rtx_insn *insn)
567 {
568 if (active_insn_p (insn))
569 return true;
570
571 /* A clobber of the function return value exists for buggy
572 programs that fail to return a value. Its effect is to
573 keep the return value from being live across the entire
574 function. If we allow it to be skipped, we introduce the
575 possibility for register lifetime confusion.
576 Similarly, keep a USE of the function return value, otherwise
577 the USE is dropped and we could fail to thread jump if USE
578 appears on some paths and not on others, see PR90257. */
579 if ((GET_CODE (PATTERN (insn)) == CLOBBER
580 || GET_CODE (PATTERN (insn)) == USE)
581 && REG_P (XEXP (PATTERN (insn), 0))
582 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
583 return true;
584
585 return false;
586 }
587
588 /* Return true if the block has no effect and only forwards control flow to
589 its single destination. */
590
591 bool
592 contains_no_active_insn_p (const_basic_block bb)
593 {
594 rtx_insn *insn;
595
596 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
597 || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
598 || !single_succ_p (bb)
599 || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
600 return false;
601
602 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
603 if (INSN_P (insn) && flow_active_insn_p (insn))
604 return false;
605
606 return (!INSN_P (insn)
607 || (JUMP_P (insn) && simplejump_p (insn))
608 || !flow_active_insn_p (insn));
609 }
610
611 /* Likewise, but protect loop latches, headers and preheaders. */
612 /* FIXME: Make this a cfg hook. */
613
614 bool
615 forwarder_block_p (const_basic_block bb)
616 {
617 if (!contains_no_active_insn_p (bb))
618 return false;
619
620 /* Protect loop latches, headers and preheaders. */
621 if (current_loops)
622 {
623 basic_block dest;
624 if (bb->loop_father->header == bb)
625 return false;
626 dest = EDGE_SUCC (bb, 0)->dest;
627 if (dest->loop_father->header == dest)
628 return false;
629 }
630
631 return true;
632 }
633
634 /* Return nonzero if we can reach target from src by falling through. */
635 /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
636
637 bool
638 can_fallthru (basic_block src, basic_block target)
639 {
640 rtx_insn *insn = BB_END (src);
641 rtx_insn *insn2;
642 edge e;
643 edge_iterator ei;
644
645 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
646 return true;
647 if (src->next_bb != target)
648 return false;
649
650 /* ??? Later we may add code to move jump tables offline. */
651 if (tablejump_p (insn, NULL, NULL))
652 return false;
653
654 FOR_EACH_EDGE (e, ei, src->succs)
655 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
656 && e->flags & EDGE_FALLTHRU)
657 return false;
658
659 insn2 = BB_HEAD (target);
660 if (!active_insn_p (insn2))
661 insn2 = next_active_insn (insn2);
662
663 return next_active_insn (insn) == insn2;
664 }
665
666 /* Return nonzero if we could reach target from src by falling through,
667 if the target was made adjacent. If we already have a fall-through
668 edge to the exit block, we can't do that. */
669 static bool
670 could_fall_through (basic_block src, basic_block target)
671 {
672 edge e;
673 edge_iterator ei;
674
675 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
676 return true;
677 FOR_EACH_EDGE (e, ei, src->succs)
678 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
679 && e->flags & EDGE_FALLTHRU)
680 return 0;
681 return true;
682 }
683 \f
684 /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
685 rtx_note *
686 bb_note (basic_block bb)
687 {
688 rtx_insn *note;
689
690 note = BB_HEAD (bb);
691 if (LABEL_P (note))
692 note = NEXT_INSN (note);
693
694 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
695 return as_a <rtx_note *> (note);
696 }
697
698 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
699 note associated with the BLOCK. */
700
701 static rtx_insn *
702 first_insn_after_basic_block_note (basic_block block)
703 {
704 rtx_insn *insn;
705
706 /* Get the first instruction in the block. */
707 insn = BB_HEAD (block);
708
709 if (insn == NULL_RTX)
710 return NULL;
711 if (LABEL_P (insn))
712 insn = NEXT_INSN (insn);
713 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
714
715 return NEXT_INSN (insn);
716 }
717
718 /* Creates a new basic block just after basic block BB by splitting
719 everything after specified instruction INSNP. */
720
721 static basic_block
722 rtl_split_block (basic_block bb, void *insnp)
723 {
724 basic_block new_bb;
725 rtx_insn *insn = (rtx_insn *) insnp;
726 edge e;
727 edge_iterator ei;
728
729 if (!insn)
730 {
731 insn = first_insn_after_basic_block_note (bb);
732
733 if (insn)
734 {
735 rtx_insn *next = insn;
736
737 insn = PREV_INSN (insn);
738
739 /* If the block contains only debug insns, insn would have
740 been NULL in a non-debug compilation, and then we'd end
741 up emitting a DELETED note. For -fcompare-debug
742 stability, emit the note too. */
743 if (insn != BB_END (bb)
744 && DEBUG_INSN_P (next)
745 && DEBUG_INSN_P (BB_END (bb)))
746 {
747 while (next != BB_END (bb) && DEBUG_INSN_P (next))
748 next = NEXT_INSN (next);
749
750 if (next == BB_END (bb))
751 emit_note_after (NOTE_INSN_DELETED, next);
752 }
753 }
754 else
755 insn = get_last_insn ();
756 }
757
758 /* We probably should check type of the insn so that we do not create
759 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
760 bother. */
761 if (insn == BB_END (bb))
762 emit_note_after (NOTE_INSN_DELETED, insn);
763
764 /* Create the new basic block. */
765 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
766 BB_COPY_PARTITION (new_bb, bb);
767 BB_END (bb) = insn;
768
769 /* Redirect the outgoing edges. */
770 new_bb->succs = bb->succs;
771 bb->succs = NULL;
772 FOR_EACH_EDGE (e, ei, new_bb->succs)
773 e->src = new_bb;
774
775 /* The new block starts off being dirty. */
776 df_set_bb_dirty (bb);
777 return new_bb;
778 }
779
780 /* Return true if the single edge between blocks A and B is the only place
781 in RTL which holds some unique locus. */
782
783 static bool
784 unique_locus_on_edge_between_p (basic_block a, basic_block b)
785 {
786 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
787 rtx_insn *insn, *end;
788
789 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
790 return false;
791
792 /* First scan block A backward. */
793 insn = BB_END (a);
794 end = PREV_INSN (BB_HEAD (a));
795 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
796 insn = PREV_INSN (insn);
797
798 if (insn != end && INSN_LOCATION (insn) == goto_locus)
799 return false;
800
801 /* Then scan block B forward. */
802 insn = BB_HEAD (b);
803 if (insn)
804 {
805 end = NEXT_INSN (BB_END (b));
806 while (insn != end && !NONDEBUG_INSN_P (insn))
807 insn = NEXT_INSN (insn);
808
809 if (insn != end && INSN_HAS_LOCATION (insn)
810 && INSN_LOCATION (insn) == goto_locus)
811 return false;
812 }
813
814 return true;
815 }
816
817 /* If the single edge between blocks A and B is the only place in RTL which
818 holds some unique locus, emit a nop with that locus between the blocks. */
819
820 static void
821 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
822 {
823 if (!unique_locus_on_edge_between_p (a, b))
824 return;
825
826 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
827 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
828 }
829
830 /* Blocks A and B are to be merged into a single block A. The insns
831 are already contiguous. */
832
833 static void
834 rtl_merge_blocks (basic_block a, basic_block b)
835 {
836 /* If B is a forwarder block whose outgoing edge has no location, we'll
837 propagate the locus of the edge between A and B onto it. */
838 const bool forward_edge_locus
839 = (b->flags & BB_FORWARDER_BLOCK) != 0
840 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
841 rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
842 rtx_insn *del_first = NULL, *del_last = NULL;
843 rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
844 int b_empty = 0;
845
846 if (dump_file)
847 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
848 a->index);
849
850 while (DEBUG_INSN_P (b_end))
851 b_end = PREV_INSN (b_debug_start = b_end);
852
853 /* If there was a CODE_LABEL beginning B, delete it. */
854 if (LABEL_P (b_head))
855 {
856 /* Detect basic blocks with nothing but a label. This can happen
857 in particular at the end of a function. */
858 if (b_head == b_end)
859 b_empty = 1;
860
861 del_first = del_last = b_head;
862 b_head = NEXT_INSN (b_head);
863 }
864
865 /* Delete the basic block note and handle blocks containing just that
866 note. */
867 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
868 {
869 if (b_head == b_end)
870 b_empty = 1;
871 if (! del_last)
872 del_first = b_head;
873
874 del_last = b_head;
875 b_head = NEXT_INSN (b_head);
876 }
877
878 /* If there was a jump out of A, delete it. */
879 if (JUMP_P (a_end))
880 {
881 rtx_insn *prev;
882
883 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
884 if (!NOTE_P (prev)
885 || NOTE_INSN_BASIC_BLOCK_P (prev)
886 || prev == BB_HEAD (a))
887 break;
888
889 del_first = a_end;
890
891 /* If this was a conditional jump, we need to also delete
892 the insn that set cc0. */
893 if (HAVE_cc0 && only_sets_cc0_p (prev))
894 {
895 rtx_insn *tmp = prev;
896
897 prev = prev_nonnote_insn (prev);
898 if (!prev)
899 prev = BB_HEAD (a);
900 del_first = tmp;
901 }
902
903 a_end = PREV_INSN (del_first);
904 }
905 else if (BARRIER_P (NEXT_INSN (a_end)))
906 del_first = NEXT_INSN (a_end);
907
908 /* Delete everything marked above as well as crap that might be
909 hanging out between the two blocks. */
910 BB_END (a) = a_end;
911 BB_HEAD (b) = b_empty ? NULL : b_head;
912 delete_insn_chain (del_first, del_last, true);
913
914 /* If not optimizing, preserve the locus of the single edge between
915 blocks A and B if necessary by emitting a nop. */
916 if (!optimize
917 && !forward_edge_locus
918 && !DECL_IGNORED_P (current_function_decl))
919 {
920 emit_nop_for_unique_locus_between (a, b);
921 a_end = BB_END (a);
922 }
923
924 /* Reassociate the insns of B with A. */
925 if (!b_empty)
926 {
927 update_bb_for_insn_chain (a_end, b_debug_end, a);
928
929 BB_END (a) = b_debug_end;
930 BB_HEAD (b) = NULL;
931 }
932 else if (b_end != b_debug_end)
933 {
934 /* Move any deleted labels and other notes between the end of A
935 and the debug insns that make up B after the debug insns,
936 bringing the debug insns into A while keeping the notes after
937 the end of A. */
938 if (NEXT_INSN (a_end) != b_debug_start)
939 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
940 b_debug_end);
941 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
942 BB_END (a) = b_debug_end;
943 }
944
945 df_bb_delete (b->index);
946
947 if (forward_edge_locus)
948 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
949
950 if (dump_file)
951 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
952 }
953
954
955 /* Return true when block A and B can be merged. */
956
957 static bool
958 rtl_can_merge_blocks (basic_block a, basic_block b)
959 {
960 /* If we are partitioning hot/cold basic blocks, we don't want to
961 mess up unconditional or indirect jumps that cross between hot
962 and cold sections.
963
964 Basic block partitioning may result in some jumps that appear to
965 be optimizable (or blocks that appear to be mergeable), but which really
966 must be left untouched (they are required to make it safely across
967 partition boundaries). See the comments at the top of
968 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
969
970 if (BB_PARTITION (a) != BB_PARTITION (b))
971 return false;
972
973 /* Protect the loop latches. */
974 if (current_loops && b->loop_father->latch == b)
975 return false;
976
977 /* There must be exactly one edge in between the blocks. */
978 return (single_succ_p (a)
979 && single_succ (a) == b
980 && single_pred_p (b)
981 && a != b
982 /* Must be simple edge. */
983 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
984 && a->next_bb == b
985 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
986 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
987 /* If the jump insn has side effects,
988 we can't kill the edge. */
989 && (!JUMP_P (BB_END (a))
990 || (reload_completed
991 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
992 }
993 \f
994 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
995 exist. */
996
997 rtx_code_label *
998 block_label (basic_block block)
999 {
1000 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
1001 return NULL;
1002
1003 if (!LABEL_P (BB_HEAD (block)))
1004 {
1005 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
1006 }
1007
1008 return as_a <rtx_code_label *> (BB_HEAD (block));
1009 }
1010
1011 /* Remove all barriers from BB_FOOTER of a BB. */
1012
1013 static void
1014 remove_barriers_from_footer (basic_block bb)
1015 {
1016 rtx_insn *insn = BB_FOOTER (bb);
1017
1018 /* Remove barriers but keep jumptables. */
1019 while (insn)
1020 {
1021 if (BARRIER_P (insn))
1022 {
1023 if (PREV_INSN (insn))
1024 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1025 else
1026 BB_FOOTER (bb) = NEXT_INSN (insn);
1027 if (NEXT_INSN (insn))
1028 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1029 }
1030 if (LABEL_P (insn))
1031 return;
1032 insn = NEXT_INSN (insn);
1033 }
1034 }
1035
1036 /* Attempt to perform edge redirection by replacing possibly complex jump
1037 instruction by unconditional jump or removing jump completely. This can
1038 apply only if all edges now point to the same block. The parameters and
1039 return values are equivalent to redirect_edge_and_branch. */
1040
1041 edge
1042 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
1043 {
1044 basic_block src = e->src;
1045 rtx_insn *insn = BB_END (src), *kill_from;
1046 rtx set;
1047 int fallthru = 0;
1048
1049 /* If we are partitioning hot/cold basic blocks, we don't want to
1050 mess up unconditional or indirect jumps that cross between hot
1051 and cold sections.
1052
1053 Basic block partitioning may result in some jumps that appear to
1054 be optimizable (or blocks that appear to be mergeable), but which really
1055 must be left untouched (they are required to make it safely across
1056 partition boundaries). See the comments at the top of
1057 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
1058
1059 if (BB_PARTITION (src) != BB_PARTITION (target))
1060 return NULL;
1061
1062 /* We can replace or remove a complex jump only when we have exactly
1063 two edges. Also, if we have exactly one outgoing edge, we can
1064 redirect that. */
1065 if (EDGE_COUNT (src->succs) >= 3
1066 /* Verify that all targets will be TARGET. Specifically, the
1067 edge that is not E must also go to TARGET. */
1068 || (EDGE_COUNT (src->succs) == 2
1069 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1070 return NULL;
1071
1072 if (!onlyjump_p (insn))
1073 return NULL;
1074 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1075 return NULL;
1076
1077 /* Avoid removing branch with side effects. */
1078 set = single_set (insn);
1079 if (!set || side_effects_p (set))
1080 return NULL;
1081
1082 /* In case we zap a conditional jump, we'll need to kill
1083 the cc0 setter too. */
1084 kill_from = insn;
1085 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn))
1086 && only_sets_cc0_p (PREV_INSN (insn)))
1087 kill_from = PREV_INSN (insn);
1088
1089 /* See if we can create the fallthru edge. */
1090 if (in_cfglayout || can_fallthru (src, target))
1091 {
1092 if (dump_file)
1093 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1094 fallthru = 1;
1095
1096 /* Selectively unlink whole insn chain. */
1097 if (in_cfglayout)
1098 {
1099 delete_insn_chain (kill_from, BB_END (src), false);
1100 remove_barriers_from_footer (src);
1101 }
1102 else
1103 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1104 false);
1105 }
1106
1107 /* If this already is simplejump, redirect it. */
1108 else if (simplejump_p (insn))
1109 {
1110 if (e->dest == target)
1111 return NULL;
1112 if (dump_file)
1113 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1114 INSN_UID (insn), e->dest->index, target->index);
1115 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1116 block_label (target), 0))
1117 {
1118 gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
1119 return NULL;
1120 }
1121 }
1122
1123 /* Cannot do anything for target exit block. */
1124 else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1125 return NULL;
1126
1127 /* Or replace possibly complicated jump insn by simple jump insn. */
1128 else
1129 {
1130 rtx_code_label *target_label = block_label (target);
1131 rtx_insn *barrier;
1132 rtx_insn *label;
1133 rtx_jump_table_data *table;
1134
1135 emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
1136 JUMP_LABEL (BB_END (src)) = target_label;
1137 LABEL_NUSES (target_label)++;
1138 if (dump_file)
1139 fprintf (dump_file, "Replacing insn %i by jump %i\n",
1140 INSN_UID (insn), INSN_UID (BB_END (src)));
1141
1142
1143 delete_insn_chain (kill_from, insn, false);
1144
1145 /* Recognize a tablejump that we are converting to a
1146 simple jump and remove its associated CODE_LABEL
1147 and ADDR_VEC or ADDR_DIFF_VEC. */
1148 if (tablejump_p (insn, &label, &table))
1149 delete_insn_chain (label, table, false);
1150
1151 barrier = next_nonnote_nondebug_insn (BB_END (src));
1152 if (!barrier || !BARRIER_P (barrier))
1153 emit_barrier_after (BB_END (src));
1154 else
1155 {
1156 if (barrier != NEXT_INSN (BB_END (src)))
1157 {
1158 /* Move the jump before barrier so that the notes
1159 which originally were or were created before jump table are
1160 inside the basic block. */
1161 rtx_insn *new_insn = BB_END (src);
1162
1163 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1164 PREV_INSN (barrier), src);
1165
1166 SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1167 SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1168
1169 SET_NEXT_INSN (new_insn) = barrier;
1170 SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1171
1172 SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1173 SET_PREV_INSN (barrier) = new_insn;
1174 }
1175 }
1176 }
1177
1178 /* Keep only one edge out and set proper flags. */
1179 if (!single_succ_p (src))
1180 remove_edge (e);
1181 gcc_assert (single_succ_p (src));
1182
1183 e = single_succ_edge (src);
1184 if (fallthru)
1185 e->flags = EDGE_FALLTHRU;
1186 else
1187 e->flags = 0;
1188
1189 e->probability = profile_probability::always ();
1190
1191 if (e->dest != target)
1192 redirect_edge_succ (e, target);
1193 return e;
1194 }
1195
1196 /* Subroutine of redirect_branch_edge that tries to patch the jump
1197 instruction INSN so that it reaches block NEW. Do this
1198 only when it originally reached block OLD. Return true if this
1199 worked or the original target wasn't OLD, return false if redirection
1200 doesn't work. */
1201
1202 static bool
1203 patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
1204 {
1205 rtx_jump_table_data *table;
1206 rtx tmp;
1207 /* Recognize a tablejump and adjust all matching cases. */
1208 if (tablejump_p (insn, NULL, &table))
1209 {
1210 rtvec vec;
1211 int j;
1212 rtx_code_label *new_label = block_label (new_bb);
1213
1214 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1215 return false;
1216 vec = table->get_labels ();
1217
1218 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1219 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1220 {
1221 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1222 --LABEL_NUSES (old_label);
1223 ++LABEL_NUSES (new_label);
1224 }
1225
1226 /* Handle casesi dispatch insns. */
1227 if ((tmp = tablejump_casesi_pattern (insn)) != NULL_RTX
1228 && label_ref_label (XEXP (SET_SRC (tmp), 2)) == old_label)
1229 {
1230 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1231 new_label);
1232 --LABEL_NUSES (old_label);
1233 ++LABEL_NUSES (new_label);
1234 }
1235 }
1236 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1237 {
1238 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1239 rtx note;
1240
1241 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1242 return false;
1243 rtx_code_label *new_label = block_label (new_bb);
1244
1245 for (i = 0; i < n; ++i)
1246 {
1247 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1248 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1249 if (XEXP (old_ref, 0) == old_label)
1250 {
1251 ASM_OPERANDS_LABEL (tmp, i)
1252 = gen_rtx_LABEL_REF (Pmode, new_label);
1253 --LABEL_NUSES (old_label);
1254 ++LABEL_NUSES (new_label);
1255 }
1256 }
1257
1258 if (JUMP_LABEL (insn) == old_label)
1259 {
1260 JUMP_LABEL (insn) = new_label;
1261 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1262 if (note)
1263 remove_note (insn, note);
1264 }
1265 else
1266 {
1267 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1268 if (note)
1269 remove_note (insn, note);
1270 if (JUMP_LABEL (insn) != new_label
1271 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1272 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1273 }
1274 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1275 != NULL_RTX)
1276 XEXP (note, 0) = new_label;
1277 }
1278 else
1279 {
1280 /* ?? We may play the games with moving the named labels from
1281 one basic block to the other in case only one computed_jump is
1282 available. */
1283 if (computed_jump_p (insn)
1284 /* A return instruction can't be redirected. */
1285 || returnjump_p (insn))
1286 return false;
1287
1288 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1289 {
1290 /* If the insn doesn't go where we think, we're confused. */
1291 gcc_assert (JUMP_LABEL (insn) == old_label);
1292
1293 /* If the substitution doesn't succeed, die. This can happen
1294 if the back end emitted unrecognizable instructions or if
1295 target is exit block on some arches. Or for crossing
1296 jumps. */
1297 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1298 block_label (new_bb), 0))
1299 {
1300 gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1301 || CROSSING_JUMP_P (insn));
1302 return false;
1303 }
1304 }
1305 }
1306 return true;
1307 }
1308
1309
1310 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1311 NULL on failure */
1312 static edge
1313 redirect_branch_edge (edge e, basic_block target)
1314 {
1315 rtx_insn *old_label = BB_HEAD (e->dest);
1316 basic_block src = e->src;
1317 rtx_insn *insn = BB_END (src);
1318
1319 /* We can only redirect non-fallthru edges of jump insn. */
1320 if (e->flags & EDGE_FALLTHRU)
1321 return NULL;
1322 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1323 return NULL;
1324
1325 if (!currently_expanding_to_rtl)
1326 {
1327 if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
1328 return NULL;
1329 }
1330 else
1331 /* When expanding this BB might actually contain multiple
1332 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1333 Redirect all of those that match our label. */
1334 FOR_BB_INSNS (src, insn)
1335 if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
1336 old_label, target))
1337 return NULL;
1338
1339 if (dump_file)
1340 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1341 e->src->index, e->dest->index, target->index);
1342
1343 if (e->dest != target)
1344 e = redirect_edge_succ_nodup (e, target);
1345
1346 return e;
1347 }
1348
1349 /* Called when edge E has been redirected to a new destination,
1350 in order to update the region crossing flag on the edge and
1351 jump. */
1352
1353 static void
1354 fixup_partition_crossing (edge e)
1355 {
1356 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1357 == EXIT_BLOCK_PTR_FOR_FN (cfun))
1358 return;
1359 /* If we redirected an existing edge, it may already be marked
1360 crossing, even though the new src is missing a reg crossing note.
1361 But make sure reg crossing note doesn't already exist before
1362 inserting. */
1363 if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1364 {
1365 e->flags |= EDGE_CROSSING;
1366 if (JUMP_P (BB_END (e->src)))
1367 CROSSING_JUMP_P (BB_END (e->src)) = 1;
1368 }
1369 else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1370 {
1371 e->flags &= ~EDGE_CROSSING;
1372 /* Remove the section crossing note from jump at end of
1373 src if it exists, and if no other successors are
1374 still crossing. */
1375 if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
1376 {
1377 bool has_crossing_succ = false;
1378 edge e2;
1379 edge_iterator ei;
1380 FOR_EACH_EDGE (e2, ei, e->src->succs)
1381 {
1382 has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1383 if (has_crossing_succ)
1384 break;
1385 }
1386 if (!has_crossing_succ)
1387 CROSSING_JUMP_P (BB_END (e->src)) = 0;
1388 }
1389 }
1390 }
1391
1392 /* Called when block BB has been reassigned to the cold partition,
1393 because it is now dominated by another cold block,
1394 to ensure that the region crossing attributes are updated. */
1395
1396 static void
1397 fixup_new_cold_bb (basic_block bb)
1398 {
1399 edge e;
1400 edge_iterator ei;
1401
1402 /* This is called when a hot bb is found to now be dominated
1403 by a cold bb and therefore needs to become cold. Therefore,
1404 its preds will no longer be region crossing. Any non-dominating
1405 preds that were previously hot would also have become cold
1406 in the caller for the same region. Any preds that were previously
1407 region-crossing will be adjusted in fixup_partition_crossing. */
1408 FOR_EACH_EDGE (e, ei, bb->preds)
1409 {
1410 fixup_partition_crossing (e);
1411 }
1412
1413 /* Possibly need to make bb's successor edges region crossing,
1414 or remove stale region crossing. */
1415 FOR_EACH_EDGE (e, ei, bb->succs)
1416 {
1417 /* We can't have fall-through edges across partition boundaries.
1418 Note that force_nonfallthru will do any necessary partition
1419 boundary fixup by calling fixup_partition_crossing itself. */
1420 if ((e->flags & EDGE_FALLTHRU)
1421 && BB_PARTITION (bb) != BB_PARTITION (e->dest)
1422 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1423 force_nonfallthru (e);
1424 else
1425 fixup_partition_crossing (e);
1426 }
1427 }
1428
1429 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1430 expense of adding new instructions or reordering basic blocks.
1431
1432 Function can be also called with edge destination equivalent to the TARGET.
1433 Then it should try the simplifications and do nothing if none is possible.
1434
1435 Return edge representing the branch if transformation succeeded. Return NULL
1436 on failure.
1437 We still return NULL in case E already destinated TARGET and we didn't
1438 managed to simplify instruction stream. */
1439
1440 static edge
1441 rtl_redirect_edge_and_branch (edge e, basic_block target)
1442 {
1443 edge ret;
1444 basic_block src = e->src;
1445 basic_block dest = e->dest;
1446
1447 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1448 return NULL;
1449
1450 if (dest == target)
1451 return e;
1452
1453 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1454 {
1455 df_set_bb_dirty (src);
1456 fixup_partition_crossing (ret);
1457 return ret;
1458 }
1459
1460 ret = redirect_branch_edge (e, target);
1461 if (!ret)
1462 return NULL;
1463
1464 df_set_bb_dirty (src);
1465 fixup_partition_crossing (ret);
1466 return ret;
1467 }
1468
1469 /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1470
1471 void
1472 emit_barrier_after_bb (basic_block bb)
1473 {
1474 rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
1475 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
1476 || current_ir_type () == IR_RTL_CFGLAYOUT);
1477 if (current_ir_type () == IR_RTL_CFGLAYOUT)
1478 {
1479 rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1480
1481 if (BB_FOOTER (bb))
1482 {
1483 rtx_insn *footer_tail = BB_FOOTER (bb);
1484
1485 while (NEXT_INSN (footer_tail))
1486 footer_tail = NEXT_INSN (footer_tail);
1487 if (!BARRIER_P (footer_tail))
1488 {
1489 SET_NEXT_INSN (footer_tail) = insn;
1490 SET_PREV_INSN (insn) = footer_tail;
1491 }
1492 }
1493 else
1494 BB_FOOTER (bb) = insn;
1495 }
1496 }
1497
1498 /* Like force_nonfallthru below, but additionally performs redirection
1499 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1500 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1501 simple_return_rtx, indicating which kind of returnjump to create.
1502 It should be NULL otherwise. */
1503
1504 basic_block
1505 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1506 {
1507 basic_block jump_block, new_bb = NULL, src = e->src;
1508 rtx note;
1509 edge new_edge;
1510 int abnormal_edge_flags = 0;
1511 bool asm_goto_edge = false;
1512 int loc;
1513
1514 /* In the case the last instruction is conditional jump to the next
1515 instruction, first redirect the jump itself and then continue
1516 by creating a basic block afterwards to redirect fallthru edge. */
1517 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1518 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1519 && any_condjump_p (BB_END (e->src))
1520 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1521 {
1522 rtx note;
1523 edge b = unchecked_make_edge (e->src, target, 0);
1524 bool redirected;
1525
1526 redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
1527 block_label (target), 0);
1528 gcc_assert (redirected);
1529
1530 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1531 if (note)
1532 {
1533 int prob = XINT (note, 0);
1534
1535 b->probability = profile_probability::from_reg_br_prob_note (prob);
1536 e->probability -= e->probability;
1537 }
1538 }
1539
1540 if (e->flags & EDGE_ABNORMAL)
1541 {
1542 /* Irritating special case - fallthru edge to the same block as abnormal
1543 edge.
1544 We can't redirect abnormal edge, but we still can split the fallthru
1545 one and create separate abnormal edge to original destination.
1546 This allows bb-reorder to make such edge non-fallthru. */
1547 gcc_assert (e->dest == target);
1548 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1549 e->flags &= EDGE_FALLTHRU;
1550 }
1551 else
1552 {
1553 gcc_assert (e->flags & EDGE_FALLTHRU);
1554 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1555 {
1556 /* We can't redirect the entry block. Create an empty block
1557 at the start of the function which we use to add the new
1558 jump. */
1559 edge tmp;
1560 edge_iterator ei;
1561 bool found = false;
1562
1563 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1564 ENTRY_BLOCK_PTR_FOR_FN (cfun));
1565 bb->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
1566
1567 /* Make sure new block ends up in correct hot/cold section. */
1568 BB_COPY_PARTITION (bb, e->dest);
1569
1570 /* Change the existing edge's source to be the new block, and add
1571 a new edge from the entry block to the new block. */
1572 e->src = bb;
1573 for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1574 (tmp = ei_safe_edge (ei)); )
1575 {
1576 if (tmp == e)
1577 {
1578 ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
1579 found = true;
1580 break;
1581 }
1582 else
1583 ei_next (&ei);
1584 }
1585
1586 gcc_assert (found);
1587
1588 vec_safe_push (bb->succs, e);
1589 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1590 EDGE_FALLTHRU);
1591 }
1592 }
1593
1594 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1595 don't point to the target or fallthru label. */
1596 if (JUMP_P (BB_END (e->src))
1597 && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
1598 && (e->flags & EDGE_FALLTHRU)
1599 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1600 {
1601 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1602 bool adjust_jump_target = false;
1603
1604 for (i = 0; i < n; ++i)
1605 {
1606 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1607 {
1608 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1609 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1610 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1611 adjust_jump_target = true;
1612 }
1613 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1614 asm_goto_edge = true;
1615 }
1616 if (adjust_jump_target)
1617 {
1618 rtx_insn *insn = BB_END (e->src);
1619 rtx note;
1620 rtx_insn *old_label = BB_HEAD (e->dest);
1621 rtx_insn *new_label = BB_HEAD (target);
1622
1623 if (JUMP_LABEL (insn) == old_label)
1624 {
1625 JUMP_LABEL (insn) = new_label;
1626 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1627 if (note)
1628 remove_note (insn, note);
1629 }
1630 else
1631 {
1632 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1633 if (note)
1634 remove_note (insn, note);
1635 if (JUMP_LABEL (insn) != new_label
1636 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1637 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1638 }
1639 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1640 != NULL_RTX)
1641 XEXP (note, 0) = new_label;
1642 }
1643 }
1644
1645 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1646 {
1647 rtx_insn *new_head;
1648 profile_count count = e->count ();
1649 profile_probability probability = e->probability;
1650 /* Create the new structures. */
1651
1652 /* If the old block ended with a tablejump, skip its table
1653 by searching forward from there. Otherwise start searching
1654 forward from the last instruction of the old block. */
1655 rtx_jump_table_data *table;
1656 if (tablejump_p (BB_END (e->src), NULL, &table))
1657 new_head = table;
1658 else
1659 new_head = BB_END (e->src);
1660 new_head = NEXT_INSN (new_head);
1661
1662 jump_block = create_basic_block (new_head, NULL, e->src);
1663 jump_block->count = count;
1664
1665 /* Make sure new block ends up in correct hot/cold section. */
1666
1667 BB_COPY_PARTITION (jump_block, e->src);
1668
1669 /* Wire edge in. */
1670 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1671 new_edge->probability = probability;
1672
1673 /* Redirect old edge. */
1674 redirect_edge_pred (e, jump_block);
1675 e->probability = profile_probability::always ();
1676
1677 /* If e->src was previously region crossing, it no longer is
1678 and the reg crossing note should be removed. */
1679 fixup_partition_crossing (new_edge);
1680
1681 /* If asm goto has any label refs to target's label,
1682 add also edge from asm goto bb to target. */
1683 if (asm_goto_edge)
1684 {
1685 new_edge->probability = new_edge->probability.apply_scale (1, 2);
1686 jump_block->count = jump_block->count.apply_scale (1, 2);
1687 edge new_edge2 = make_edge (new_edge->src, target,
1688 e->flags & ~EDGE_FALLTHRU);
1689 new_edge2->probability = probability - new_edge->probability;
1690 }
1691
1692 new_bb = jump_block;
1693 }
1694 else
1695 jump_block = e->src;
1696
1697 loc = e->goto_locus;
1698 e->flags &= ~EDGE_FALLTHRU;
1699 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1700 {
1701 if (jump_label == ret_rtx)
1702 emit_jump_insn_after_setloc (targetm.gen_return (),
1703 BB_END (jump_block), loc);
1704 else
1705 {
1706 gcc_assert (jump_label == simple_return_rtx);
1707 emit_jump_insn_after_setloc (targetm.gen_simple_return (),
1708 BB_END (jump_block), loc);
1709 }
1710 set_return_jump_label (BB_END (jump_block));
1711 }
1712 else
1713 {
1714 rtx_code_label *label = block_label (target);
1715 emit_jump_insn_after_setloc (targetm.gen_jump (label),
1716 BB_END (jump_block), loc);
1717 JUMP_LABEL (BB_END (jump_block)) = label;
1718 LABEL_NUSES (label)++;
1719 }
1720
1721 /* We might be in cfg layout mode, and if so, the following routine will
1722 insert the barrier correctly. */
1723 emit_barrier_after_bb (jump_block);
1724 redirect_edge_succ_nodup (e, target);
1725
1726 if (abnormal_edge_flags)
1727 make_edge (src, target, abnormal_edge_flags);
1728
1729 df_mark_solutions_dirty ();
1730 fixup_partition_crossing (e);
1731 return new_bb;
1732 }
1733
1734 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1735 (and possibly create new basic block) to make edge non-fallthru.
1736 Return newly created BB or NULL if none. */
1737
1738 static basic_block
1739 rtl_force_nonfallthru (edge e)
1740 {
1741 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1742 }
1743
1744 /* Redirect edge even at the expense of creating new jump insn or
1745 basic block. Return new basic block if created, NULL otherwise.
1746 Conversion must be possible. */
1747
1748 static basic_block
1749 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1750 {
1751 if (redirect_edge_and_branch (e, target)
1752 || e->dest == target)
1753 return NULL;
1754
1755 /* In case the edge redirection failed, try to force it to be non-fallthru
1756 and redirect newly created simplejump. */
1757 df_set_bb_dirty (e->src);
1758 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1759 }
1760
1761 /* The given edge should potentially be a fallthru edge. If that is in
1762 fact true, delete the jump and barriers that are in the way. */
1763
1764 static void
1765 rtl_tidy_fallthru_edge (edge e)
1766 {
1767 rtx_insn *q;
1768 basic_block b = e->src, c = b->next_bb;
1769
1770 /* ??? In a late-running flow pass, other folks may have deleted basic
1771 blocks by nopping out blocks, leaving multiple BARRIERs between here
1772 and the target label. They ought to be chastised and fixed.
1773
1774 We can also wind up with a sequence of undeletable labels between
1775 one block and the next.
1776
1777 So search through a sequence of barriers, labels, and notes for
1778 the head of block C and assert that we really do fall through. */
1779
1780 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1781 if (NONDEBUG_INSN_P (q))
1782 return;
1783
1784 /* Remove what will soon cease being the jump insn from the source block.
1785 If block B consisted only of this single jump, turn it into a deleted
1786 note. */
1787 q = BB_END (b);
1788 if (JUMP_P (q)
1789 && onlyjump_p (q)
1790 && (any_uncondjump_p (q)
1791 || single_succ_p (b)))
1792 {
1793 rtx_insn *label;
1794 rtx_jump_table_data *table;
1795
1796 if (tablejump_p (q, &label, &table))
1797 {
1798 /* The label is likely mentioned in some instruction before
1799 the tablejump and might not be DCEd, so turn it into
1800 a note instead and move before the tablejump that is going to
1801 be deleted. */
1802 const char *name = LABEL_NAME (label);
1803 PUT_CODE (label, NOTE);
1804 NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1805 NOTE_DELETED_LABEL_NAME (label) = name;
1806 reorder_insns (label, label, PREV_INSN (q));
1807 delete_insn (table);
1808 }
1809
1810 /* If this was a conditional jump, we need to also delete
1811 the insn that set cc0. */
1812 if (HAVE_cc0 && any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1813 q = PREV_INSN (q);
1814
1815 q = PREV_INSN (q);
1816 }
1817 /* Unconditional jumps with side-effects (i.e. which we can't just delete
1818 together with the barrier) should never have a fallthru edge. */
1819 else if (JUMP_P (q) && any_uncondjump_p (q))
1820 return;
1821
1822 /* Selectively unlink the sequence. */
1823 if (q != PREV_INSN (BB_HEAD (c)))
1824 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1825
1826 e->flags |= EDGE_FALLTHRU;
1827 }
1828 \f
1829 /* Should move basic block BB after basic block AFTER. NIY. */
1830
1831 static bool
1832 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1833 basic_block after ATTRIBUTE_UNUSED)
1834 {
1835 return false;
1836 }
1837
1838 /* Locate the last bb in the same partition as START_BB. */
1839
1840 static basic_block
1841 last_bb_in_partition (basic_block start_bb)
1842 {
1843 basic_block bb;
1844 FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1845 {
1846 if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1847 return bb;
1848 }
1849 /* Return bb before the exit block. */
1850 return bb->prev_bb;
1851 }
1852
1853 /* Split a (typically critical) edge. Return the new block.
1854 The edge must not be abnormal.
1855
1856 ??? The code generally expects to be called on critical edges.
1857 The case of a block ending in an unconditional jump to a
1858 block with multiple predecessors is not handled optimally. */
1859
1860 static basic_block
1861 rtl_split_edge (edge edge_in)
1862 {
1863 basic_block bb, new_bb;
1864 rtx_insn *before;
1865
1866 /* Abnormal edges cannot be split. */
1867 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1868
1869 /* We are going to place the new block in front of edge destination.
1870 Avoid existence of fallthru predecessors. */
1871 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1872 {
1873 edge e = find_fallthru_edge (edge_in->dest->preds);
1874
1875 if (e)
1876 force_nonfallthru (e);
1877 }
1878
1879 /* Create the basic block note. */
1880 if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1881 before = BB_HEAD (edge_in->dest);
1882 else
1883 before = NULL;
1884
1885 /* If this is a fall through edge to the exit block, the blocks might be
1886 not adjacent, and the right place is after the source. */
1887 if ((edge_in->flags & EDGE_FALLTHRU)
1888 && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1889 {
1890 before = NEXT_INSN (BB_END (edge_in->src));
1891 bb = create_basic_block (before, NULL, edge_in->src);
1892 BB_COPY_PARTITION (bb, edge_in->src);
1893 }
1894 else
1895 {
1896 if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1897 {
1898 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1899 BB_COPY_PARTITION (bb, edge_in->dest);
1900 }
1901 else
1902 {
1903 basic_block after = edge_in->dest->prev_bb;
1904 /* If this is post-bb reordering, and the edge crosses a partition
1905 boundary, the new block needs to be inserted in the bb chain
1906 at the end of the src partition (since we put the new bb into
1907 that partition, see below). Otherwise we may end up creating
1908 an extra partition crossing in the chain, which is illegal.
1909 It can't go after the src, because src may have a fall-through
1910 to a different block. */
1911 if (crtl->bb_reorder_complete
1912 && (edge_in->flags & EDGE_CROSSING))
1913 {
1914 after = last_bb_in_partition (edge_in->src);
1915 before = get_last_bb_insn (after);
1916 /* The instruction following the last bb in partition should
1917 be a barrier, since it cannot end in a fall-through. */
1918 gcc_checking_assert (BARRIER_P (before));
1919 before = NEXT_INSN (before);
1920 }
1921 bb = create_basic_block (before, NULL, after);
1922 /* Put the split bb into the src partition, to avoid creating
1923 a situation where a cold bb dominates a hot bb, in the case
1924 where src is cold and dest is hot. The src will dominate
1925 the new bb (whereas it might not have dominated dest). */
1926 BB_COPY_PARTITION (bb, edge_in->src);
1927 }
1928 }
1929
1930 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1931
1932 /* Can't allow a region crossing edge to be fallthrough. */
1933 if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
1934 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1935 {
1936 new_bb = force_nonfallthru (single_succ_edge (bb));
1937 gcc_assert (!new_bb);
1938 }
1939
1940 /* For non-fallthru edges, we must adjust the predecessor's
1941 jump instruction to target our new block. */
1942 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1943 {
1944 edge redirected = redirect_edge_and_branch (edge_in, bb);
1945 gcc_assert (redirected);
1946 }
1947 else
1948 {
1949 if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1950 {
1951 /* For asm goto even splitting of fallthru edge might
1952 need insn patching, as other labels might point to the
1953 old label. */
1954 rtx_insn *last = BB_END (edge_in->src);
1955 if (last
1956 && JUMP_P (last)
1957 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1958 && (extract_asm_operands (PATTERN (last))
1959 || JUMP_LABEL (last) == before)
1960 && patch_jump_insn (last, before, bb))
1961 df_set_bb_dirty (edge_in->src);
1962 }
1963 redirect_edge_succ (edge_in, bb);
1964 }
1965
1966 return bb;
1967 }
1968
1969 /* Queue instructions for insertion on an edge between two basic blocks.
1970 The new instructions and basic blocks (if any) will not appear in the
1971 CFG until commit_edge_insertions is called. */
1972
1973 void
1974 insert_insn_on_edge (rtx pattern, edge e)
1975 {
1976 /* We cannot insert instructions on an abnormal critical edge.
1977 It will be easier to find the culprit if we die now. */
1978 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1979
1980 if (e->insns.r == NULL_RTX)
1981 start_sequence ();
1982 else
1983 push_to_sequence (e->insns.r);
1984
1985 emit_insn (pattern);
1986
1987 e->insns.r = get_insns ();
1988 end_sequence ();
1989 }
1990
1991 /* Update the CFG for the instructions queued on edge E. */
1992
1993 void
1994 commit_one_edge_insertion (edge e)
1995 {
1996 rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
1997 basic_block bb;
1998
1999 /* Pull the insns off the edge now since the edge might go away. */
2000 insns = e->insns.r;
2001 e->insns.r = NULL;
2002
2003 /* Figure out where to put these insns. If the destination has
2004 one predecessor, insert there. Except for the exit block. */
2005 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2006 {
2007 bb = e->dest;
2008
2009 /* Get the location correct wrt a code label, and "nice" wrt
2010 a basic block note, and before everything else. */
2011 tmp = BB_HEAD (bb);
2012 if (LABEL_P (tmp))
2013 tmp = NEXT_INSN (tmp);
2014 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2015 tmp = NEXT_INSN (tmp);
2016 if (tmp == BB_HEAD (bb))
2017 before = tmp;
2018 else if (tmp)
2019 after = PREV_INSN (tmp);
2020 else
2021 after = get_last_insn ();
2022 }
2023
2024 /* If the source has one successor and the edge is not abnormal,
2025 insert there. Except for the entry block.
2026 Don't do this if the predecessor ends in a jump other than
2027 unconditional simple jump. E.g. for asm goto that points all
2028 its labels at the fallthru basic block, we can't insert instructions
2029 before the asm goto, as the asm goto can have various of side effects,
2030 and can't emit instructions after the asm goto, as it must end
2031 the basic block. */
2032 else if ((e->flags & EDGE_ABNORMAL) == 0
2033 && single_succ_p (e->src)
2034 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2035 && (!JUMP_P (BB_END (e->src))
2036 || simplejump_p (BB_END (e->src))))
2037 {
2038 bb = e->src;
2039
2040 /* It is possible to have a non-simple jump here. Consider a target
2041 where some forms of unconditional jumps clobber a register. This
2042 happens on the fr30 for example.
2043
2044 We know this block has a single successor, so we can just emit
2045 the queued insns before the jump. */
2046 if (JUMP_P (BB_END (bb)))
2047 before = BB_END (bb);
2048 else
2049 {
2050 /* We'd better be fallthru, or we've lost track of what's what. */
2051 gcc_assert (e->flags & EDGE_FALLTHRU);
2052
2053 after = BB_END (bb);
2054 }
2055 }
2056
2057 /* Otherwise we must split the edge. */
2058 else
2059 {
2060 bb = split_edge (e);
2061
2062 /* If E crossed a partition boundary, we needed to make bb end in
2063 a region-crossing jump, even though it was originally fallthru. */
2064 if (JUMP_P (BB_END (bb)))
2065 before = BB_END (bb);
2066 else
2067 after = BB_END (bb);
2068 }
2069
2070 /* Now that we've found the spot, do the insertion. */
2071 if (before)
2072 {
2073 emit_insn_before_noloc (insns, before, bb);
2074 last = prev_nonnote_insn (before);
2075 }
2076 else
2077 last = emit_insn_after_noloc (insns, after, bb);
2078
2079 if (returnjump_p (last))
2080 {
2081 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2082 This is not currently a problem because this only happens
2083 for the (single) epilogue, which already has a fallthru edge
2084 to EXIT. */
2085
2086 e = single_succ_edge (bb);
2087 gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
2088 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
2089
2090 e->flags &= ~EDGE_FALLTHRU;
2091 emit_barrier_after (last);
2092
2093 if (before)
2094 delete_insn (before);
2095 }
2096 else
2097 gcc_assert (!JUMP_P (last));
2098 }
2099
2100 /* Update the CFG for all queued instructions. */
2101
2102 void
2103 commit_edge_insertions (void)
2104 {
2105 basic_block bb;
2106
2107 /* Optimization passes that invoke this routine can cause hot blocks
2108 previously reached by both hot and cold blocks to become dominated only
2109 by cold blocks. This will cause the verification below to fail,
2110 and lead to now cold code in the hot section. In some cases this
2111 may only be visible after newly unreachable blocks are deleted,
2112 which will be done by fixup_partitions. */
2113 fixup_partitions ();
2114
2115 if (!currently_expanding_to_rtl)
2116 checking_verify_flow_info ();
2117
2118 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2119 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
2120 {
2121 edge e;
2122 edge_iterator ei;
2123
2124 FOR_EACH_EDGE (e, ei, bb->succs)
2125 if (e->insns.r)
2126 {
2127 if (currently_expanding_to_rtl)
2128 rebuild_jump_labels_chain (e->insns.r);
2129 commit_one_edge_insertion (e);
2130 }
2131 }
2132 }
2133 \f
2134
2135 /* Print out RTL-specific basic block information (live information
2136 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2137 documented in dumpfile.h. */
2138
2139 static void
2140 rtl_dump_bb (FILE *outf, basic_block bb, int indent, dump_flags_t flags)
2141 {
2142 char *s_indent;
2143
2144 s_indent = (char *) alloca ((size_t) indent + 1);
2145 memset (s_indent, ' ', (size_t) indent);
2146 s_indent[indent] = '\0';
2147
2148 if (df && (flags & TDF_DETAILS))
2149 {
2150 df_dump_top (bb, outf);
2151 putc ('\n', outf);
2152 }
2153
2154 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2155 {
2156 rtx_insn *last = BB_END (bb);
2157 if (last)
2158 last = NEXT_INSN (last);
2159 for (rtx_insn *insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
2160 {
2161 if (flags & TDF_DETAILS)
2162 df_dump_insn_top (insn, outf);
2163 if (! (flags & TDF_SLIM))
2164 print_rtl_single (outf, insn);
2165 else
2166 dump_insn_slim (outf, insn);
2167 if (flags & TDF_DETAILS)
2168 df_dump_insn_bottom (insn, outf);
2169 }
2170 }
2171
2172 if (df && (flags & TDF_DETAILS))
2173 {
2174 df_dump_bottom (bb, outf);
2175 putc ('\n', outf);
2176 }
2177
2178 }
2179 \f
2180 /* Like dump_function_to_file, but for RTL. Print out dataflow information
2181 for the start of each basic block. FLAGS are the TDF_* masks documented
2182 in dumpfile.h. */
2183
2184 void
2185 print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, dump_flags_t flags)
2186 {
2187 const rtx_insn *tmp_rtx;
2188 if (rtx_first == 0)
2189 fprintf (outf, "(nil)\n");
2190 else
2191 {
2192 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2193 int max_uid = get_max_uid ();
2194 basic_block *start = XCNEWVEC (basic_block, max_uid);
2195 basic_block *end = XCNEWVEC (basic_block, max_uid);
2196 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
2197 basic_block bb;
2198
2199 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2200 insns, but the CFG is not maintained so the basic block info
2201 is not reliable. Therefore it's omitted from the dumps. */
2202 if (! (cfun->curr_properties & PROP_cfg))
2203 flags &= ~TDF_BLOCKS;
2204
2205 if (df)
2206 df_dump_start (outf);
2207
2208 if (cfun->curr_properties & PROP_cfg)
2209 {
2210 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2211 {
2212 rtx_insn *x;
2213
2214 start[INSN_UID (BB_HEAD (bb))] = bb;
2215 end[INSN_UID (BB_END (bb))] = bb;
2216 if (flags & TDF_BLOCKS)
2217 {
2218 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2219 {
2220 enum bb_state state = IN_MULTIPLE_BB;
2221
2222 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2223 state = IN_ONE_BB;
2224 in_bb_p[INSN_UID (x)] = state;
2225
2226 if (x == BB_END (bb))
2227 break;
2228 }
2229 }
2230 }
2231 }
2232
2233 for (tmp_rtx = rtx_first; tmp_rtx != NULL; tmp_rtx = NEXT_INSN (tmp_rtx))
2234 {
2235 if (flags & TDF_BLOCKS)
2236 {
2237 bb = start[INSN_UID (tmp_rtx)];
2238 if (bb != NULL)
2239 {
2240 dump_bb_info (outf, bb, 0, dump_flags, true, false);
2241 if (df && (flags & TDF_DETAILS))
2242 df_dump_top (bb, outf);
2243 }
2244
2245 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2246 && !NOTE_P (tmp_rtx)
2247 && !BARRIER_P (tmp_rtx))
2248 fprintf (outf, ";; Insn is not within a basic block\n");
2249 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2250 fprintf (outf, ";; Insn is in multiple basic blocks\n");
2251 }
2252
2253 if (flags & TDF_DETAILS)
2254 df_dump_insn_top (tmp_rtx, outf);
2255 if (! (flags & TDF_SLIM))
2256 print_rtl_single (outf, tmp_rtx);
2257 else
2258 dump_insn_slim (outf, tmp_rtx);
2259 if (flags & TDF_DETAILS)
2260 df_dump_insn_bottom (tmp_rtx, outf);
2261
2262 bb = end[INSN_UID (tmp_rtx)];
2263 if (bb != NULL)
2264 {
2265 if (flags & TDF_BLOCKS)
2266 {
2267 dump_bb_info (outf, bb, 0, dump_flags, false, true);
2268 if (df && (flags & TDF_DETAILS))
2269 df_dump_bottom (bb, outf);
2270 putc ('\n', outf);
2271 }
2272 /* Emit a hint if the fallthrough target of current basic block
2273 isn't the one placed right next. */
2274 else if (EDGE_COUNT (bb->succs) > 0)
2275 {
2276 gcc_assert (BB_END (bb) == tmp_rtx);
2277 const rtx_insn *ninsn = NEXT_INSN (tmp_rtx);
2278 /* Bypass intervening deleted-insn notes and debug insns. */
2279 while (ninsn
2280 && !NONDEBUG_INSN_P (ninsn)
2281 && !start[INSN_UID (ninsn)])
2282 ninsn = NEXT_INSN (ninsn);
2283 edge e = find_fallthru_edge (bb->succs);
2284 if (e && ninsn)
2285 {
2286 basic_block dest = e->dest;
2287 if (start[INSN_UID (ninsn)] != dest)
2288 fprintf (outf, "%s ; pc falls through to BB %d\n",
2289 print_rtx_head, dest->index);
2290 }
2291 }
2292 }
2293 }
2294
2295 free (start);
2296 free (end);
2297 free (in_bb_p);
2298 }
2299 }
2300 \f
2301 /* Update the branch probability of BB if a REG_BR_PROB is present. */
2302
2303 void
2304 update_br_prob_note (basic_block bb)
2305 {
2306 rtx note;
2307 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2308 if (!JUMP_P (BB_END (bb)) || !BRANCH_EDGE (bb)->probability.initialized_p ())
2309 {
2310 if (note)
2311 {
2312 rtx *note_link, this_rtx;
2313
2314 note_link = &REG_NOTES (BB_END (bb));
2315 for (this_rtx = *note_link; this_rtx; this_rtx = XEXP (this_rtx, 1))
2316 if (this_rtx == note)
2317 {
2318 *note_link = XEXP (this_rtx, 1);
2319 break;
2320 }
2321 }
2322 return;
2323 }
2324 if (!note
2325 || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ())
2326 return;
2327 XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ();
2328 }
2329
2330 /* Get the last insn associated with block BB (that includes barriers and
2331 tablejumps after BB). */
2332 rtx_insn *
2333 get_last_bb_insn (basic_block bb)
2334 {
2335 rtx_jump_table_data *table;
2336 rtx_insn *tmp;
2337 rtx_insn *end = BB_END (bb);
2338
2339 /* Include any jump table following the basic block. */
2340 if (tablejump_p (end, NULL, &table))
2341 end = table;
2342
2343 /* Include any barriers that may follow the basic block. */
2344 tmp = next_nonnote_nondebug_insn_bb (end);
2345 while (tmp && BARRIER_P (tmp))
2346 {
2347 end = tmp;
2348 tmp = next_nonnote_nondebug_insn_bb (end);
2349 }
2350
2351 return end;
2352 }
2353
2354 /* Add all BBs reachable from entry via hot paths into the SET. */
2355
2356 void
2357 find_bbs_reachable_by_hot_paths (hash_set<basic_block> *set)
2358 {
2359 auto_vec<basic_block, 64> worklist;
2360
2361 set->add (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2362 worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2363
2364 while (worklist.length () > 0)
2365 {
2366 basic_block bb = worklist.pop ();
2367 edge_iterator ei;
2368 edge e;
2369
2370 FOR_EACH_EDGE (e, ei, bb->succs)
2371 if (BB_PARTITION (e->dest) != BB_COLD_PARTITION
2372 && !set->add (e->dest))
2373 worklist.safe_push (e->dest);
2374 }
2375 }
2376
2377 /* Sanity check partition hotness to ensure that basic blocks in
2378   the cold partition don't dominate basic blocks in the hot partition.
2379 If FLAG_ONLY is true, report violations as errors. Otherwise
2380 re-mark the dominated blocks as cold, since this is run after
2381 cfg optimizations that may make hot blocks previously reached
2382 by both hot and cold blocks now only reachable along cold paths. */
2383
2384 static vec<basic_block>
2385 find_partition_fixes (bool flag_only)
2386 {
2387 basic_block bb;
2388 vec<basic_block> bbs_to_fix = vNULL;
2389 hash_set<basic_block> set;
2390
2391 /* Callers check this. */
2392 gcc_checking_assert (crtl->has_bb_partition);
2393
2394 find_bbs_reachable_by_hot_paths (&set);
2395
2396 FOR_EACH_BB_FN (bb, cfun)
2397 if (!set.contains (bb)
2398 && BB_PARTITION (bb) != BB_COLD_PARTITION)
2399 {
2400 if (flag_only)
2401 error ("non-cold basic block %d reachable only "
2402 "by paths crossing the cold partition", bb->index);
2403 else
2404 BB_SET_PARTITION (bb, BB_COLD_PARTITION);
2405 bbs_to_fix.safe_push (bb);
2406 }
2407
2408 return bbs_to_fix;
2409 }
2410
2411 /* Perform cleanup on the hot/cold bb partitioning after optimization
2412 passes that modify the cfg. */
2413
2414 void
2415 fixup_partitions (void)
2416 {
2417 basic_block bb;
2418
2419 if (!crtl->has_bb_partition)
2420 return;
2421
2422 /* Delete any blocks that became unreachable and weren't
2423 already cleaned up, for example during edge forwarding
2424 and convert_jumps_to_returns. This will expose more
2425 opportunities for fixing the partition boundaries here.
2426 Also, the calculation of the dominance graph during verification
2427 will assert if there are unreachable nodes. */
2428 delete_unreachable_blocks ();
2429
2430 /* If there are partitions, do a sanity check on them: A basic block in
2431   a cold partition cannot dominate a basic block in a hot partition.
2432 Fixup any that now violate this requirement, as a result of edge
2433 forwarding and unreachable block deletion.  */
2434 vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2435
2436 /* Do the partition fixup after all necessary blocks have been converted to
2437 cold, so that we only update the region crossings the minimum number of
2438 places, which can require forcing edges to be non fallthru. */
2439 while (! bbs_to_fix.is_empty ())
2440 {
2441 bb = bbs_to_fix.pop ();
2442 fixup_new_cold_bb (bb);
2443 }
2444 }
2445
2446 /* Verify, in the basic block chain, that there is at most one switch
2447 between hot/cold partitions. This condition will not be true until
2448 after reorder_basic_blocks is called. */
2449
2450 static int
2451 verify_hot_cold_block_grouping (void)
2452 {
2453 basic_block bb;
2454 int err = 0;
2455 bool switched_sections = false;
2456 int current_partition = BB_UNPARTITIONED;
2457
2458 /* Even after bb reordering is complete, we go into cfglayout mode
2459 again (in compgoto). Ensure we don't call this before going back
2460 into linearized RTL when any layout fixes would have been committed. */
2461 if (!crtl->bb_reorder_complete
2462 || current_ir_type () != IR_RTL_CFGRTL)
2463 return err;
2464
2465 FOR_EACH_BB_FN (bb, cfun)
2466 {
2467 if (current_partition != BB_UNPARTITIONED
2468 && BB_PARTITION (bb) != current_partition)
2469 {
2470 if (switched_sections)
2471 {
2472 error ("multiple hot/cold transitions found (bb %i)",
2473 bb->index);
2474 err = 1;
2475 }
2476 else
2477 switched_sections = true;
2478
2479 if (!crtl->has_bb_partition)
2480 error ("partition found but function partition flag not set");
2481 }
2482 current_partition = BB_PARTITION (bb);
2483 }
2484
2485 return err;
2486 }
2487 \f
2488
2489 /* Perform several checks on the edges out of each block, such as
2490 the consistency of the branch probabilities, the correctness
2491 of hot/cold partition crossing edges, and the number of expected
2492 successor edges. Also verify that the dominance relationship
2493 between hot/cold blocks is sane. */
2494
2495 static int
2496 rtl_verify_edges (void)
2497 {
2498 int err = 0;
2499 basic_block bb;
2500
2501 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2502 {
2503 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2504 int n_eh = 0, n_abnormal = 0;
2505 edge e, fallthru = NULL;
2506 edge_iterator ei;
2507 rtx note;
2508 bool has_crossing_edge = false;
2509
2510 if (JUMP_P (BB_END (bb))
2511 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2512 && EDGE_COUNT (bb->succs) >= 2
2513 && any_condjump_p (BB_END (bb)))
2514 {
2515 if (!BRANCH_EDGE (bb)->probability.initialized_p ())
2516 {
2517 if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
2518 {
2519 error ("verify_flow_info: "
2520 "REG_BR_PROB is set but cfg probability is not");
2521 err = 1;
2522 }
2523 }
2524 else if (XINT (note, 0)
2525 != BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ()
2526 && profile_status_for_fn (cfun) != PROFILE_ABSENT)
2527 {
2528 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2529 XINT (note, 0),
2530 BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ());
2531 err = 1;
2532 }
2533 }
2534
2535 FOR_EACH_EDGE (e, ei, bb->succs)
2536 {
2537 bool is_crossing;
2538
2539 if (e->flags & EDGE_FALLTHRU)
2540 n_fallthru++, fallthru = e;
2541
2542 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2543 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2544 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
2545 has_crossing_edge |= is_crossing;
2546 if (e->flags & EDGE_CROSSING)
2547 {
2548 if (!is_crossing)
2549 {
2550 error ("EDGE_CROSSING incorrectly set across same section");
2551 err = 1;
2552 }
2553 if (e->flags & EDGE_FALLTHRU)
2554 {
2555 error ("fallthru edge crosses section boundary in bb %i",
2556 e->src->index);
2557 err = 1;
2558 }
2559 if (e->flags & EDGE_EH)
2560 {
2561 error ("EH edge crosses section boundary in bb %i",
2562 e->src->index);
2563 err = 1;
2564 }
2565 if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
2566 {
2567 error ("No region crossing jump at section boundary in bb %i",
2568 bb->index);
2569 err = 1;
2570 }
2571 }
2572 else if (is_crossing)
2573 {
2574 error ("EDGE_CROSSING missing across section boundary");
2575 err = 1;
2576 }
2577
2578 if ((e->flags & ~(EDGE_DFS_BACK
2579 | EDGE_CAN_FALLTHRU
2580 | EDGE_IRREDUCIBLE_LOOP
2581 | EDGE_LOOP_EXIT
2582 | EDGE_CROSSING
2583 | EDGE_PRESERVE)) == 0)
2584 n_branch++;
2585
2586 if (e->flags & EDGE_ABNORMAL_CALL)
2587 n_abnormal_call++;
2588
2589 if (e->flags & EDGE_SIBCALL)
2590 n_sibcall++;
2591
2592 if (e->flags & EDGE_EH)
2593 n_eh++;
2594
2595 if (e->flags & EDGE_ABNORMAL)
2596 n_abnormal++;
2597 }
2598
2599 if (!has_crossing_edge
2600 && JUMP_P (BB_END (bb))
2601 && CROSSING_JUMP_P (BB_END (bb)))
2602 {
2603 print_rtl_with_bb (stderr, get_insns (), TDF_BLOCKS | TDF_DETAILS);
2604 error ("Region crossing jump across same section in bb %i",
2605 bb->index);
2606 err = 1;
2607 }
2608
2609 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2610 {
2611 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2612 err = 1;
2613 }
2614 if (n_eh > 1)
2615 {
2616 error ("too many exception handling edges in bb %i", bb->index);
2617 err = 1;
2618 }
2619 if (n_branch
2620 && (!JUMP_P (BB_END (bb))
2621 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2622 || any_condjump_p (BB_END (bb))))))
2623 {
2624 error ("too many outgoing branch edges from bb %i", bb->index);
2625 err = 1;
2626 }
2627 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2628 {
2629 error ("fallthru edge after unconditional jump in bb %i", bb->index);
2630 err = 1;
2631 }
2632 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2633 {
2634 error ("wrong number of branch edges after unconditional jump"
2635 " in bb %i", bb->index);
2636 err = 1;
2637 }
2638 if (n_branch != 1 && any_condjump_p (BB_END (bb))
2639 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2640 {
2641 error ("wrong amount of branch edges after conditional jump"
2642 " in bb %i", bb->index);
2643 err = 1;
2644 }
2645 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2646 {
2647 error ("abnormal call edges for non-call insn in bb %i", bb->index);
2648 err = 1;
2649 }
2650 if (n_sibcall && !CALL_P (BB_END (bb)))
2651 {
2652 error ("sibcall edges for non-call insn in bb %i", bb->index);
2653 err = 1;
2654 }
2655 if (n_abnormal > n_eh
2656 && !(CALL_P (BB_END (bb))
2657 && n_abnormal == n_abnormal_call + n_sibcall)
2658 && (!JUMP_P (BB_END (bb))
2659 || any_condjump_p (BB_END (bb))
2660 || any_uncondjump_p (BB_END (bb))))
2661 {
2662 error ("abnormal edges for no purpose in bb %i", bb->index);
2663 err = 1;
2664 }
2665
2666 int has_eh = -1;
2667 FOR_EACH_EDGE (e, ei, bb->preds)
2668 {
2669 if (has_eh == -1)
2670 has_eh = (e->flags & EDGE_EH);
2671 if ((e->flags & EDGE_EH) == has_eh)
2672 continue;
2673 error ("EH incoming edge mixed with non-EH incoming edges "
2674 "in bb %i", bb->index);
2675 err = 1;
2676 break;
2677 }
2678 }
2679
2680 /* If there are partitions, do a sanity check on them: A basic block in
2681   a cold partition cannot dominate a basic block in a hot partition.  */
2682 if (crtl->has_bb_partition && !err
2683 && current_ir_type () == IR_RTL_CFGLAYOUT)
2684 {
2685 vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2686 err = !bbs_to_fix.is_empty ();
2687 }
2688
2689 /* Clean up. */
2690 return err;
2691 }
2692
2693 /* Checks on the instructions within blocks. Currently checks that each
2694 block starts with a basic block note, and that basic block notes and
2695 control flow jumps are not found in the middle of the block. */
2696
2697 static int
2698 rtl_verify_bb_insns (void)
2699 {
2700 rtx_insn *x;
2701 int err = 0;
2702 basic_block bb;
2703
2704 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2705 {
2706 /* Now check the header of basic
2707 block. It ought to contain optional CODE_LABEL followed
2708 by NOTE_BASIC_BLOCK. */
2709 x = BB_HEAD (bb);
2710 if (LABEL_P (x))
2711 {
2712 if (BB_END (bb) == x)
2713 {
2714 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2715 bb->index);
2716 err = 1;
2717 }
2718
2719 x = NEXT_INSN (x);
2720 }
2721
2722 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2723 {
2724 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2725 bb->index);
2726 err = 1;
2727 }
2728
2729 if (BB_END (bb) == x)
2730 /* Do checks for empty blocks here. */
2731 ;
2732 else
2733 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2734 {
2735 if (NOTE_INSN_BASIC_BLOCK_P (x))
2736 {
2737 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2738 INSN_UID (x), bb->index);
2739 err = 1;
2740 }
2741
2742 if (x == BB_END (bb))
2743 break;
2744
2745 if (control_flow_insn_p (x))
2746 {
2747 error ("in basic block %d:", bb->index);
2748 fatal_insn ("flow control insn inside a basic block", x);
2749 }
2750 }
2751 }
2752
2753 /* Clean up. */
2754 return err;
2755 }
2756
2757 /* Verify that block pointers for instructions in basic blocks, headers and
2758 footers are set appropriately. */
2759
2760 static int
2761 rtl_verify_bb_pointers (void)
2762 {
2763 int err = 0;
2764 basic_block bb;
2765
2766 /* Check the general integrity of the basic blocks. */
2767 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2768 {
2769 rtx_insn *insn;
2770
2771 if (!(bb->flags & BB_RTL))
2772 {
2773 error ("BB_RTL flag not set for block %d", bb->index);
2774 err = 1;
2775 }
2776
2777 FOR_BB_INSNS (bb, insn)
2778 if (BLOCK_FOR_INSN (insn) != bb)
2779 {
2780 error ("insn %d basic block pointer is %d, should be %d",
2781 INSN_UID (insn),
2782 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2783 bb->index);
2784 err = 1;
2785 }
2786
2787 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2788 if (!BARRIER_P (insn)
2789 && BLOCK_FOR_INSN (insn) != NULL)
2790 {
2791 error ("insn %d in header of bb %d has non-NULL basic block",
2792 INSN_UID (insn), bb->index);
2793 err = 1;
2794 }
2795 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2796 if (!BARRIER_P (insn)
2797 && BLOCK_FOR_INSN (insn) != NULL)
2798 {
2799 error ("insn %d in footer of bb %d has non-NULL basic block",
2800 INSN_UID (insn), bb->index);
2801 err = 1;
2802 }
2803 }
2804
2805 /* Clean up. */
2806 return err;
2807 }
2808
2809 /* Verify the CFG and RTL consistency common for both underlying RTL and
2810 cfglayout RTL.
2811
2812 Currently it does following checks:
2813
2814 - overlapping of basic blocks
2815 - insns with wrong BLOCK_FOR_INSN pointers
2816 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2817 - tails of basic blocks (ensure that boundary is necessary)
2818 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2819 and NOTE_INSN_BASIC_BLOCK
2820 - verify that no fall_thru edge crosses hot/cold partition boundaries
2821 - verify that there are no pending RTL branch predictions
2822 - verify that hot blocks are not dominated by cold blocks
2823
2824 In future it can be extended check a lot of other stuff as well
2825 (reachability of basic blocks, life information, etc. etc.). */
2826
2827 static int
2828 rtl_verify_flow_info_1 (void)
2829 {
2830 int err = 0;
2831
2832 err |= rtl_verify_bb_pointers ();
2833
2834 err |= rtl_verify_bb_insns ();
2835
2836 err |= rtl_verify_edges ();
2837
2838 return err;
2839 }
2840
2841 /* Walk the instruction chain and verify that bb head/end pointers
2842 are correct, and that instructions are in exactly one bb and have
2843 correct block pointers. */
2844
2845 static int
2846 rtl_verify_bb_insn_chain (void)
2847 {
2848 basic_block bb;
2849 int err = 0;
2850 rtx_insn *x;
2851 rtx_insn *last_head = get_last_insn ();
2852 basic_block *bb_info;
2853 const int max_uid = get_max_uid ();
2854
2855 bb_info = XCNEWVEC (basic_block, max_uid);
2856
2857 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2858 {
2859 rtx_insn *head = BB_HEAD (bb);
2860 rtx_insn *end = BB_END (bb);
2861
2862 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2863 {
2864 /* Verify the end of the basic block is in the INSN chain. */
2865 if (x == end)
2866 break;
2867
2868 /* And that the code outside of basic blocks has NULL bb field. */
2869 if (!BARRIER_P (x)
2870 && BLOCK_FOR_INSN (x) != NULL)
2871 {
2872 error ("insn %d outside of basic blocks has non-NULL bb field",
2873 INSN_UID (x));
2874 err = 1;
2875 }
2876 }
2877
2878 if (!x)
2879 {
2880 error ("end insn %d for block %d not found in the insn stream",
2881 INSN_UID (end), bb->index);
2882 err = 1;
2883 }
2884
2885 /* Work backwards from the end to the head of the basic block
2886 to verify the head is in the RTL chain. */
2887 for (; x != NULL_RTX; x = PREV_INSN (x))
2888 {
2889 /* While walking over the insn chain, verify insns appear
2890 in only one basic block. */
2891 if (bb_info[INSN_UID (x)] != NULL)
2892 {
2893 error ("insn %d is in multiple basic blocks (%d and %d)",
2894 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2895 err = 1;
2896 }
2897
2898 bb_info[INSN_UID (x)] = bb;
2899
2900 if (x == head)
2901 break;
2902 }
2903 if (!x)
2904 {
2905 error ("head insn %d for block %d not found in the insn stream",
2906 INSN_UID (head), bb->index);
2907 err = 1;
2908 }
2909
2910 last_head = PREV_INSN (x);
2911 }
2912
2913 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2914 {
2915 /* Check that the code before the first basic block has NULL
2916 bb field. */
2917 if (!BARRIER_P (x)
2918 && BLOCK_FOR_INSN (x) != NULL)
2919 {
2920 error ("insn %d outside of basic blocks has non-NULL bb field",
2921 INSN_UID (x));
2922 err = 1;
2923 }
2924 }
2925 free (bb_info);
2926
2927 return err;
2928 }
2929
2930 /* Verify that fallthru edges point to adjacent blocks in layout order and
2931 that barriers exist after non-fallthru blocks. */
2932
2933 static int
2934 rtl_verify_fallthru (void)
2935 {
2936 basic_block bb;
2937 int err = 0;
2938
2939 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2940 {
2941 edge e;
2942
2943 e = find_fallthru_edge (bb->succs);
2944 if (!e)
2945 {
2946 rtx_insn *insn;
2947
2948 /* Ensure existence of barrier in BB with no fallthru edges. */
2949 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2950 {
2951 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2952 {
2953 error ("missing barrier after block %i", bb->index);
2954 err = 1;
2955 break;
2956 }
2957 if (BARRIER_P (insn))
2958 break;
2959 }
2960 }
2961 else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2962 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2963 {
2964 rtx_insn *insn;
2965
2966 if (e->src->next_bb != e->dest)
2967 {
2968 error
2969 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2970 e->src->index, e->dest->index);
2971 err = 1;
2972 }
2973 else
2974 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2975 insn = NEXT_INSN (insn))
2976 if (BARRIER_P (insn) || NONDEBUG_INSN_P (insn))
2977 {
2978 error ("verify_flow_info: Incorrect fallthru %i->%i",
2979 e->src->index, e->dest->index);
2980 fatal_insn ("wrong insn in the fallthru edge", insn);
2981 err = 1;
2982 }
2983 }
2984 }
2985
2986 return err;
2987 }
2988
2989 /* Verify that blocks are laid out in consecutive order. While walking the
2990 instructions, verify that all expected instructions are inside the basic
2991 blocks, and that all returns are followed by barriers. */
2992
2993 static int
2994 rtl_verify_bb_layout (void)
2995 {
2996 basic_block bb;
2997 int err = 0;
2998 rtx_insn *x, *y;
2999 int num_bb_notes;
3000 rtx_insn * const rtx_first = get_insns ();
3001 basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
3002
3003 num_bb_notes = 0;
3004
3005 for (x = rtx_first; x; x = NEXT_INSN (x))
3006 {
3007 if (NOTE_INSN_BASIC_BLOCK_P (x))
3008 {
3009 bb = NOTE_BASIC_BLOCK (x);
3010
3011 num_bb_notes++;
3012 if (bb != last_bb_seen->next_bb)
3013 internal_error ("basic blocks not laid down consecutively");
3014
3015 curr_bb = last_bb_seen = bb;
3016 }
3017
3018 if (!curr_bb)
3019 {
3020 switch (GET_CODE (x))
3021 {
3022 case BARRIER:
3023 case NOTE:
3024 break;
3025
3026 case CODE_LABEL:
3027 /* An ADDR_VEC is placed outside any basic block. */
3028 if (NEXT_INSN (x)
3029 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
3030 x = NEXT_INSN (x);
3031
3032 /* But in any case, non-deletable labels can appear anywhere. */
3033 break;
3034
3035 default:
3036 fatal_insn ("insn outside basic block", x);
3037 }
3038 }
3039
3040 if (JUMP_P (x)
3041 && returnjump_p (x) && ! condjump_p (x)
3042 && ! ((y = next_nonnote_nondebug_insn (x))
3043 && BARRIER_P (y)))
3044 fatal_insn ("return not followed by barrier", x);
3045
3046 if (curr_bb && x == BB_END (curr_bb))
3047 curr_bb = NULL;
3048 }
3049
3050 if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
3051 internal_error
3052 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
3053 num_bb_notes, n_basic_blocks_for_fn (cfun));
3054
3055 return err;
3056 }
3057
3058 /* Verify the CFG and RTL consistency common for both underlying RTL and
3059 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
3060
3061 Currently it does following checks:
3062 - all checks of rtl_verify_flow_info_1
3063 - test head/end pointers
3064 - check that blocks are laid out in consecutive order
3065 - check that all insns are in the basic blocks
3066 (except the switch handling code, barriers and notes)
3067 - check that all returns are followed by barriers
3068 - check that all fallthru edge points to the adjacent blocks
3069 - verify that there is a single hot/cold partition boundary after bbro */
3070
3071 static int
3072 rtl_verify_flow_info (void)
3073 {
3074 int err = 0;
3075
3076 err |= rtl_verify_flow_info_1 ();
3077
3078 err |= rtl_verify_bb_insn_chain ();
3079
3080 err |= rtl_verify_fallthru ();
3081
3082 err |= rtl_verify_bb_layout ();
3083
3084 err |= verify_hot_cold_block_grouping ();
3085
3086 return err;
3087 }
3088 \f
3089 /* Assume that the preceding pass has possibly eliminated jump instructions
3090 or converted the unconditional jumps. Eliminate the edges from CFG.
3091 Return true if any edges are eliminated. */
3092
3093 bool
3094 purge_dead_edges (basic_block bb)
3095 {
3096 edge e;
3097 rtx_insn *insn = BB_END (bb);
3098 rtx note;
3099 bool purged = false;
3100 bool found;
3101 edge_iterator ei;
3102
3103 if ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb))
3104 do
3105 insn = PREV_INSN (insn);
3106 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3107
3108 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
3109 if (NONJUMP_INSN_P (insn)
3110 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3111 {
3112 rtx eqnote;
3113
3114 if (! may_trap_p (PATTERN (insn))
3115 || ((eqnote = find_reg_equal_equiv_note (insn))
3116 && ! may_trap_p (XEXP (eqnote, 0))))
3117 remove_note (insn, note);
3118 }
3119
3120 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
3121 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3122 {
3123 bool remove = false;
3124
3125 /* There are three types of edges we need to handle correctly here: EH
3126 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3127 latter can appear when nonlocal gotos are used. */
3128 if (e->flags & EDGE_ABNORMAL_CALL)
3129 {
3130 if (!CALL_P (insn))
3131 remove = true;
3132 else if (can_nonlocal_goto (insn))
3133 ;
3134 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3135 ;
3136 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3137 ;
3138 else
3139 remove = true;
3140 }
3141 else if (e->flags & EDGE_EH)
3142 remove = !can_throw_internal (insn);
3143
3144 if (remove)
3145 {
3146 remove_edge (e);
3147 df_set_bb_dirty (bb);
3148 purged = true;
3149 }
3150 else
3151 ei_next (&ei);
3152 }
3153
3154 if (JUMP_P (insn))
3155 {
3156 rtx note;
3157 edge b,f;
3158 edge_iterator ei;
3159
3160 /* We do care only about conditional jumps and simplejumps. */
3161 if (!any_condjump_p (insn)
3162 && !returnjump_p (insn)
3163 && !simplejump_p (insn))
3164 return purged;
3165
3166 /* Branch probability/prediction notes are defined only for
3167 condjumps. We've possibly turned condjump into simplejump. */
3168 if (simplejump_p (insn))
3169 {
3170 note = find_reg_note (insn, REG_BR_PROB, NULL);
3171 if (note)
3172 remove_note (insn, note);
3173 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3174 remove_note (insn, note);
3175 }
3176
3177 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3178 {
3179 /* Avoid abnormal flags to leak from computed jumps turned
3180 into simplejumps. */
3181
3182 e->flags &= ~EDGE_ABNORMAL;
3183
3184 /* See if this edge is one we should keep. */
3185 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3186 /* A conditional jump can fall through into the next
3187 block, so we should keep the edge. */
3188 {
3189 ei_next (&ei);
3190 continue;
3191 }
3192 else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3193 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
3194 /* If the destination block is the target of the jump,
3195 keep the edge. */
3196 {
3197 ei_next (&ei);
3198 continue;
3199 }
3200 else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3201 && returnjump_p (insn))
3202 /* If the destination block is the exit block, and this
3203 instruction is a return, then keep the edge. */
3204 {
3205 ei_next (&ei);
3206 continue;
3207 }
3208 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3209 /* Keep the edges that correspond to exceptions thrown by
3210 this instruction and rematerialize the EDGE_ABNORMAL
3211 flag we just cleared above. */
3212 {
3213 e->flags |= EDGE_ABNORMAL;
3214 ei_next (&ei);
3215 continue;
3216 }
3217
3218 /* We do not need this edge. */
3219 df_set_bb_dirty (bb);
3220 purged = true;
3221 remove_edge (e);
3222 }
3223
3224 if (EDGE_COUNT (bb->succs) == 0 || !purged)
3225 return purged;
3226
3227 if (dump_file)
3228 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
3229
3230 if (!optimize)
3231 return purged;
3232
3233 /* Redistribute probabilities. */
3234 if (single_succ_p (bb))
3235 {
3236 single_succ_edge (bb)->probability = profile_probability::always ();
3237 }
3238 else
3239 {
3240 note = find_reg_note (insn, REG_BR_PROB, NULL);
3241 if (!note)
3242 return purged;
3243
3244 b = BRANCH_EDGE (bb);
3245 f = FALLTHRU_EDGE (bb);
3246 b->probability = profile_probability::from_reg_br_prob_note
3247 (XINT (note, 0));
3248 f->probability = b->probability.invert ();
3249 }
3250
3251 return purged;
3252 }
3253 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
3254 {
3255 /* First, there should not be any EH or ABCALL edges resulting
3256 from non-local gotos and the like. If there were, we shouldn't
3257 have created the sibcall in the first place. Second, there
3258 should of course never have been a fallthru edge. */
3259 gcc_assert (single_succ_p (bb));
3260 gcc_assert (single_succ_edge (bb)->flags
3261 == (EDGE_SIBCALL | EDGE_ABNORMAL));
3262
3263 return 0;
3264 }
3265
3266 /* If we don't see a jump insn, we don't know exactly why the block would
3267 have been broken at this point. Look for a simple, non-fallthru edge,
3268 as these are only created by conditional branches. If we find such an
3269 edge we know that there used to be a jump here and can then safely
3270 remove all non-fallthru edges. */
3271 found = false;
3272 FOR_EACH_EDGE (e, ei, bb->succs)
3273 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3274 {
3275 found = true;
3276 break;
3277 }
3278
3279 if (!found)
3280 return purged;
3281
3282 /* Remove all but the fake and fallthru edges. The fake edge may be
3283 the only successor for this block in the case of noreturn
3284 calls. */
3285 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3286 {
3287 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
3288 {
3289 df_set_bb_dirty (bb);
3290 remove_edge (e);
3291 purged = true;
3292 }
3293 else
3294 ei_next (&ei);
3295 }
3296
3297 gcc_assert (single_succ_p (bb));
3298
3299 single_succ_edge (bb)->probability = profile_probability::always ();
3300
3301 if (dump_file)
3302 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
3303 bb->index);
3304 return purged;
3305 }
3306
3307 /* Search all basic blocks for potentially dead edges and purge them. Return
3308 true if some edge has been eliminated. */
3309
3310 bool
3311 purge_all_dead_edges (void)
3312 {
3313 int purged = false;
3314 basic_block bb;
3315
3316 FOR_EACH_BB_FN (bb, cfun)
3317 {
3318 bool purged_here = purge_dead_edges (bb);
3319
3320 purged |= purged_here;
3321 }
3322
3323 return purged;
3324 }
3325
3326 /* This is used by a few passes that emit some instructions after abnormal
3327 calls, moving the basic block's end, while they in fact do want to emit
3328 them on the fallthru edge. Look for abnormal call edges, find backward
3329 the call in the block and insert the instructions on the edge instead.
3330
3331 Similarly, handle instructions throwing exceptions internally.
3332
3333 Return true when instructions have been found and inserted on edges. */
3334
3335 bool
3336 fixup_abnormal_edges (void)
3337 {
3338 bool inserted = false;
3339 basic_block bb;
3340
3341 FOR_EACH_BB_FN (bb, cfun)
3342 {
3343 edge e;
3344 edge_iterator ei;
3345
3346 /* Look for cases we are interested in - calls or instructions causing
3347 exceptions. */
3348 FOR_EACH_EDGE (e, ei, bb->succs)
3349 if ((e->flags & EDGE_ABNORMAL_CALL)
3350 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3351 == (EDGE_ABNORMAL | EDGE_EH)))
3352 break;
3353
3354 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3355 {
3356 rtx_insn *insn;
3357
3358 /* Get past the new insns generated. Allow notes, as the insns
3359 may be already deleted. */
3360 insn = BB_END (bb);
3361 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3362 && !can_throw_internal (insn)
3363 && insn != BB_HEAD (bb))
3364 insn = PREV_INSN (insn);
3365
3366 if (CALL_P (insn) || can_throw_internal (insn))
3367 {
3368 rtx_insn *stop, *next;
3369
3370 e = find_fallthru_edge (bb->succs);
3371
3372 stop = NEXT_INSN (BB_END (bb));
3373 BB_END (bb) = insn;
3374
3375 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3376 {
3377 next = NEXT_INSN (insn);
3378 if (INSN_P (insn))
3379 {
3380 delete_insn (insn);
3381
3382 /* Sometimes there's still the return value USE.
3383 If it's placed after a trapping call (i.e. that
3384 call is the last insn anyway), we have no fallthru
3385 edge. Simply delete this use and don't try to insert
3386 on the non-existent edge.
3387 Similarly, sometimes a call that can throw is
3388 followed in the source with __builtin_unreachable (),
3389 meaning that there is UB if the call returns rather
3390 than throws. If there weren't any instructions
3391 following such calls before, supposedly even the ones
3392 we've deleted aren't significant and can be
3393 removed. */
3394 if (e)
3395 {
3396 /* We're not deleting it, we're moving it. */
3397 insn->set_undeleted ();
3398 SET_PREV_INSN (insn) = NULL_RTX;
3399 SET_NEXT_INSN (insn) = NULL_RTX;
3400
3401 insert_insn_on_edge (insn, e);
3402 inserted = true;
3403 }
3404 }
3405 else if (!BARRIER_P (insn))
3406 set_block_for_insn (insn, NULL);
3407 }
3408 }
3409
3410 /* It may be that we don't find any trapping insn. In this
3411 case we discovered quite late that the insn that had been
3412 marked as can_throw_internal in fact couldn't trap at all.
3413 So we should in fact delete the EH edges out of the block. */
3414 else
3415 purge_dead_edges (bb);
3416 }
3417 }
3418
3419 return inserted;
3420 }
3421 \f
3422 /* Cut the insns from FIRST to LAST out of the insns stream. */
3423
3424 rtx_insn *
3425 unlink_insn_chain (rtx_insn *first, rtx_insn *last)
3426 {
3427 rtx_insn *prevfirst = PREV_INSN (first);
3428 rtx_insn *nextlast = NEXT_INSN (last);
3429
3430 SET_PREV_INSN (first) = NULL;
3431 SET_NEXT_INSN (last) = NULL;
3432 if (prevfirst)
3433 SET_NEXT_INSN (prevfirst) = nextlast;
3434 if (nextlast)
3435 SET_PREV_INSN (nextlast) = prevfirst;
3436 else
3437 set_last_insn (prevfirst);
3438 if (!prevfirst)
3439 set_first_insn (nextlast);
3440 return first;
3441 }
3442 \f
3443 /* Skip over inter-block insns occurring after BB which are typically
3444 associated with BB (e.g., barriers). If there are any such insns,
3445 we return the last one. Otherwise, we return the end of BB. */
3446
3447 static rtx_insn *
3448 skip_insns_after_block (basic_block bb)
3449 {
3450 rtx_insn *insn, *last_insn, *next_head, *prev;
3451
3452 next_head = NULL;
3453 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3454 next_head = BB_HEAD (bb->next_bb);
3455
3456 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3457 {
3458 if (insn == next_head)
3459 break;
3460
3461 switch (GET_CODE (insn))
3462 {
3463 case BARRIER:
3464 last_insn = insn;
3465 continue;
3466
3467 case NOTE:
3468 switch (NOTE_KIND (insn))
3469 {
3470 case NOTE_INSN_BLOCK_END:
3471 gcc_unreachable ();
3472 continue;
3473 default:
3474 continue;
3475 break;
3476 }
3477 break;
3478
3479 case CODE_LABEL:
3480 if (NEXT_INSN (insn)
3481 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3482 {
3483 insn = NEXT_INSN (insn);
3484 last_insn = insn;
3485 continue;
3486 }
3487 break;
3488
3489 default:
3490 break;
3491 }
3492
3493 break;
3494 }
3495
3496 /* It is possible to hit contradictory sequence. For instance:
3497
3498 jump_insn
3499 NOTE_INSN_BLOCK_BEG
3500 barrier
3501
3502 Where barrier belongs to jump_insn, but the note does not. This can be
3503 created by removing the basic block originally following
3504 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3505
3506 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3507 {
3508 prev = PREV_INSN (insn);
3509 if (NOTE_P (insn))
3510 switch (NOTE_KIND (insn))
3511 {
3512 case NOTE_INSN_BLOCK_END:
3513 gcc_unreachable ();
3514 break;
3515 case NOTE_INSN_DELETED:
3516 case NOTE_INSN_DELETED_LABEL:
3517 case NOTE_INSN_DELETED_DEBUG_LABEL:
3518 continue;
3519 default:
3520 reorder_insns (insn, insn, last_insn);
3521 }
3522 }
3523
3524 return last_insn;
3525 }
3526
3527 /* Locate or create a label for a given basic block. */
3528
3529 static rtx_insn *
3530 label_for_bb (basic_block bb)
3531 {
3532 rtx_insn *label = BB_HEAD (bb);
3533
3534 if (!LABEL_P (label))
3535 {
3536 if (dump_file)
3537 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3538
3539 label = block_label (bb);
3540 }
3541
3542 return label;
3543 }
3544
3545 /* Locate the effective beginning and end of the insn chain for each
3546 block, as defined by skip_insns_after_block above. */
3547
3548 static void
3549 record_effective_endpoints (void)
3550 {
3551 rtx_insn *next_insn;
3552 basic_block bb;
3553 rtx_insn *insn;
3554
3555 for (insn = get_insns ();
3556 insn
3557 && NOTE_P (insn)
3558 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3559 insn = NEXT_INSN (insn))
3560 continue;
3561 /* No basic blocks at all? */
3562 gcc_assert (insn);
3563
3564 if (PREV_INSN (insn))
3565 cfg_layout_function_header =
3566 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3567 else
3568 cfg_layout_function_header = NULL;
3569
3570 next_insn = get_insns ();
3571 FOR_EACH_BB_FN (bb, cfun)
3572 {
3573 rtx_insn *end;
3574
3575 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3576 BB_HEADER (bb) = unlink_insn_chain (next_insn,
3577 PREV_INSN (BB_HEAD (bb)));
3578 end = skip_insns_after_block (bb);
3579 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3580 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3581 next_insn = NEXT_INSN (BB_END (bb));
3582 }
3583
3584 cfg_layout_function_footer = next_insn;
3585 if (cfg_layout_function_footer)
3586 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3587 }
3588 \f
3589 namespace {
3590
3591 const pass_data pass_data_into_cfg_layout_mode =
3592 {
3593 RTL_PASS, /* type */
3594 "into_cfglayout", /* name */
3595 OPTGROUP_NONE, /* optinfo_flags */
3596 TV_CFG, /* tv_id */
3597 0, /* properties_required */
3598 PROP_cfglayout, /* properties_provided */
3599 0, /* properties_destroyed */
3600 0, /* todo_flags_start */
3601 0, /* todo_flags_finish */
3602 };
3603
3604 class pass_into_cfg_layout_mode : public rtl_opt_pass
3605 {
3606 public:
3607 pass_into_cfg_layout_mode (gcc::context *ctxt)
3608 : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
3609 {}
3610
3611 /* opt_pass methods: */
3612 virtual unsigned int execute (function *)
3613 {
3614 cfg_layout_initialize (0);
3615 return 0;
3616 }
3617
3618 }; // class pass_into_cfg_layout_mode
3619
3620 } // anon namespace
3621
3622 rtl_opt_pass *
3623 make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3624 {
3625 return new pass_into_cfg_layout_mode (ctxt);
3626 }
3627
3628 namespace {
3629
3630 const pass_data pass_data_outof_cfg_layout_mode =
3631 {
3632 RTL_PASS, /* type */
3633 "outof_cfglayout", /* name */
3634 OPTGROUP_NONE, /* optinfo_flags */
3635 TV_CFG, /* tv_id */
3636 0, /* properties_required */
3637 0, /* properties_provided */
3638 PROP_cfglayout, /* properties_destroyed */
3639 0, /* todo_flags_start */
3640 0, /* todo_flags_finish */
3641 };
3642
3643 class pass_outof_cfg_layout_mode : public rtl_opt_pass
3644 {
3645 public:
3646 pass_outof_cfg_layout_mode (gcc::context *ctxt)
3647 : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
3648 {}
3649
3650 /* opt_pass methods: */
3651 virtual unsigned int execute (function *);
3652
3653 }; // class pass_outof_cfg_layout_mode
3654
3655 unsigned int
3656 pass_outof_cfg_layout_mode::execute (function *fun)
3657 {
3658 basic_block bb;
3659
3660 FOR_EACH_BB_FN (bb, fun)
3661 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3662 bb->aux = bb->next_bb;
3663
3664 cfg_layout_finalize ();
3665
3666 return 0;
3667 }
3668
3669 } // anon namespace
3670
3671 rtl_opt_pass *
3672 make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3673 {
3674 return new pass_outof_cfg_layout_mode (ctxt);
3675 }
3676 \f
3677
3678 /* Link the basic blocks in the correct order, compacting the basic
3679 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3680 function also clears the basic block header and footer fields.
3681
3682 This function is usually called after a pass (e.g. tracer) finishes
3683 some transformations while in cfglayout mode. The required sequence
3684 of the basic blocks is in a linked list along the bb->aux field.
3685 This functions re-links the basic block prev_bb and next_bb pointers
3686 accordingly, and it compacts and renumbers the blocks.
3687
3688 FIXME: This currently works only for RTL, but the only RTL-specific
3689 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3690 to GIMPLE a long time ago, but it doesn't relink the basic block
3691 chain. It could do that (to give better initial RTL) if this function
3692 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
3693
3694 void
3695 relink_block_chain (bool stay_in_cfglayout_mode)
3696 {
3697 basic_block bb, prev_bb;
3698 int index;
3699
3700 /* Maybe dump the re-ordered sequence. */
3701 if (dump_file)
3702 {
3703 fprintf (dump_file, "Reordered sequence:\n");
3704 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3705 NUM_FIXED_BLOCKS;
3706 bb;
3707 bb = (basic_block) bb->aux, index++)
3708 {
3709 fprintf (dump_file, " %i ", index);
3710 if (get_bb_original (bb))
3711 fprintf (dump_file, "duplicate of %i\n",
3712 get_bb_original (bb)->index);
3713 else if (forwarder_block_p (bb)
3714 && !LABEL_P (BB_HEAD (bb)))
3715 fprintf (dump_file, "compensation\n");
3716 else
3717 fprintf (dump_file, "bb %i\n", bb->index);
3718 }
3719 }
3720
3721 /* Now reorder the blocks. */
3722 prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3723 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
3724 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3725 {
3726 bb->prev_bb = prev_bb;
3727 prev_bb->next_bb = bb;
3728 }
3729 prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3730 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
3731
3732 /* Then, clean up the aux fields. */
3733 FOR_ALL_BB_FN (bb, cfun)
3734 {
3735 bb->aux = NULL;
3736 if (!stay_in_cfglayout_mode)
3737 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3738 }
3739
3740 /* Maybe reset the original copy tables, they are not valid anymore
3741 when we renumber the basic blocks in compact_blocks. If we are
3742 are going out of cfglayout mode, don't re-allocate the tables. */
3743 if (original_copy_tables_initialized_p ())
3744 free_original_copy_tables ();
3745 if (stay_in_cfglayout_mode)
3746 initialize_original_copy_tables ();
3747
3748 /* Finally, put basic_block_info in the new order. */
3749 compact_blocks ();
3750 }
3751 \f
3752
3753 /* Given a reorder chain, rearrange the code to match. */
3754
3755 static void
3756 fixup_reorder_chain (void)
3757 {
3758 basic_block bb;
3759 rtx_insn *insn = NULL;
3760
3761 if (cfg_layout_function_header)
3762 {
3763 set_first_insn (cfg_layout_function_header);
3764 insn = cfg_layout_function_header;
3765 while (NEXT_INSN (insn))
3766 insn = NEXT_INSN (insn);
3767 }
3768
3769 /* First do the bulk reordering -- rechain the blocks without regard to
3770 the needed changes to jumps and labels. */
3771
3772 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3773 bb->aux)
3774 {
3775 if (BB_HEADER (bb))
3776 {
3777 if (insn)
3778 SET_NEXT_INSN (insn) = BB_HEADER (bb);
3779 else
3780 set_first_insn (BB_HEADER (bb));
3781 SET_PREV_INSN (BB_HEADER (bb)) = insn;
3782 insn = BB_HEADER (bb);
3783 while (NEXT_INSN (insn))
3784 insn = NEXT_INSN (insn);
3785 }
3786 if (insn)
3787 SET_NEXT_INSN (insn) = BB_HEAD (bb);
3788 else
3789 set_first_insn (BB_HEAD (bb));
3790 SET_PREV_INSN (BB_HEAD (bb)) = insn;
3791 insn = BB_END (bb);
3792 if (BB_FOOTER (bb))
3793 {
3794 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3795 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
3796 while (NEXT_INSN (insn))
3797 insn = NEXT_INSN (insn);
3798 }
3799 }
3800
3801 SET_NEXT_INSN (insn) = cfg_layout_function_footer;
3802 if (cfg_layout_function_footer)
3803 SET_PREV_INSN (cfg_layout_function_footer) = insn;
3804
3805 while (NEXT_INSN (insn))
3806 insn = NEXT_INSN (insn);
3807
3808 set_last_insn (insn);
3809 if (flag_checking)
3810 verify_insn_chain ();
3811
3812 /* Now add jumps and labels as needed to match the blocks new
3813 outgoing edges. */
3814
3815 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3816 bb->aux)
3817 {
3818 edge e_fall, e_taken, e;
3819 rtx_insn *bb_end_insn;
3820 rtx ret_label = NULL_RTX;
3821 basic_block nb;
3822 edge_iterator ei;
3823
3824 if (EDGE_COUNT (bb->succs) == 0)
3825 continue;
3826
3827 /* Find the old fallthru edge, and another non-EH edge for
3828 a taken jump. */
3829 e_taken = e_fall = NULL;
3830
3831 FOR_EACH_EDGE (e, ei, bb->succs)
3832 if (e->flags & EDGE_FALLTHRU)
3833 e_fall = e;
3834 else if (! (e->flags & EDGE_EH))
3835 e_taken = e;
3836
3837 bb_end_insn = BB_END (bb);
3838 if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
3839 {
3840 ret_label = JUMP_LABEL (bb_end_jump);
3841 if (any_condjump_p (bb_end_jump))
3842 {
3843 /* This might happen if the conditional jump has side
3844 effects and could therefore not be optimized away.
3845 Make the basic block to end with a barrier in order
3846 to prevent rtl_verify_flow_info from complaining. */
3847 if (!e_fall)
3848 {
3849 gcc_assert (!onlyjump_p (bb_end_jump)
3850 || returnjump_p (bb_end_jump)
3851 || (e_taken->flags & EDGE_CROSSING));
3852 emit_barrier_after (bb_end_jump);
3853 continue;
3854 }
3855
3856 /* If the old fallthru is still next, nothing to do. */
3857 if (bb->aux == e_fall->dest
3858 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3859 continue;
3860
3861 /* The degenerated case of conditional jump jumping to the next
3862 instruction can happen for jumps with side effects. We need
3863 to construct a forwarder block and this will be done just
3864 fine by force_nonfallthru below. */
3865 if (!e_taken)
3866 ;
3867
3868 /* There is another special case: if *neither* block is next,
3869 such as happens at the very end of a function, then we'll
3870 need to add a new unconditional jump. Choose the taken
3871 edge based on known or assumed probability. */
3872 else if (bb->aux != e_taken->dest)
3873 {
3874 rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
3875
3876 if (note
3877 && profile_probability::from_reg_br_prob_note
3878 (XINT (note, 0)) < profile_probability::even ()
3879 && invert_jump (bb_end_jump,
3880 (e_fall->dest
3881 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3882 ? NULL_RTX
3883 : label_for_bb (e_fall->dest)), 0))
3884 {
3885 e_fall->flags &= ~EDGE_FALLTHRU;
3886 gcc_checking_assert (could_fall_through
3887 (e_taken->src, e_taken->dest));
3888 e_taken->flags |= EDGE_FALLTHRU;
3889 update_br_prob_note (bb);
3890 e = e_fall, e_fall = e_taken, e_taken = e;
3891 }
3892 }
3893
3894 /* If the "jumping" edge is a crossing edge, and the fall
3895 through edge is non-crossing, leave things as they are. */
3896 else if ((e_taken->flags & EDGE_CROSSING)
3897 && !(e_fall->flags & EDGE_CROSSING))
3898 continue;
3899
3900 /* Otherwise we can try to invert the jump. This will
3901 basically never fail, however, keep up the pretense. */
3902 else if (invert_jump (bb_end_jump,
3903 (e_fall->dest
3904 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3905 ? NULL_RTX
3906 : label_for_bb (e_fall->dest)), 0))
3907 {
3908 e_fall->flags &= ~EDGE_FALLTHRU;
3909 gcc_checking_assert (could_fall_through
3910 (e_taken->src, e_taken->dest));
3911 e_taken->flags |= EDGE_FALLTHRU;
3912 update_br_prob_note (bb);
3913 if (LABEL_NUSES (ret_label) == 0
3914 && single_pred_p (e_taken->dest))
3915 delete_insn (as_a<rtx_insn *> (ret_label));
3916 continue;
3917 }
3918 }
3919 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3920 {
3921 /* If the old fallthru is still next or if
3922 asm goto doesn't have a fallthru (e.g. when followed by
3923 __builtin_unreachable ()), nothing to do. */
3924 if (! e_fall
3925 || bb->aux == e_fall->dest
3926 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3927 continue;
3928
3929 /* Otherwise we'll have to use the fallthru fixup below. */
3930 }
3931 else
3932 {
3933 /* Otherwise we have some return, switch or computed
3934 jump. In the 99% case, there should not have been a
3935 fallthru edge. */
3936 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3937 continue;
3938 }
3939 }
3940 else
3941 {
3942 /* No fallthru implies a noreturn function with EH edges, or
3943 something similarly bizarre. In any case, we don't need to
3944 do anything. */
3945 if (! e_fall)
3946 continue;
3947
3948 /* If the fallthru block is still next, nothing to do. */
3949 if (bb->aux == e_fall->dest)
3950 continue;
3951
3952 /* A fallthru to exit block. */
3953 if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3954 continue;
3955 }
3956
3957 /* We got here if we need to add a new jump insn.
3958 Note force_nonfallthru can delete E_FALL and thus we have to
3959 save E_FALL->src prior to the call to force_nonfallthru. */
3960 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3961 if (nb)
3962 {
3963 nb->aux = bb->aux;
3964 bb->aux = nb;
3965 /* Don't process this new block. */
3966 bb = nb;
3967 }
3968 }
3969
3970 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3971
3972 /* Annoying special case - jump around dead jumptables left in the code. */
3973 FOR_EACH_BB_FN (bb, cfun)
3974 {
3975 edge e = find_fallthru_edge (bb->succs);
3976
3977 if (e && !can_fallthru (e->src, e->dest))
3978 force_nonfallthru (e);
3979 }
3980
3981 /* Ensure goto_locus from edges has some instructions with that locus in RTL
3982 when not optimizing. */
3983 if (!optimize && !DECL_IGNORED_P (current_function_decl))
3984 FOR_EACH_BB_FN (bb, cfun)
3985 {
3986 edge e;
3987 edge_iterator ei;
3988
3989 FOR_EACH_EDGE (e, ei, bb->succs)
3990 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
3991 && !(e->flags & EDGE_ABNORMAL))
3992 {
3993 edge e2;
3994 edge_iterator ei2;
3995 basic_block dest, nb;
3996 rtx_insn *end;
3997
3998 insn = BB_END (e->src);
3999 end = PREV_INSN (BB_HEAD (e->src));
4000 while (insn != end
4001 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
4002 insn = PREV_INSN (insn);
4003 if (insn != end
4004 && INSN_LOCATION (insn) == e->goto_locus)
4005 continue;
4006 if (simplejump_p (BB_END (e->src))
4007 && !INSN_HAS_LOCATION (BB_END (e->src)))
4008 {
4009 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
4010 continue;
4011 }
4012 dest = e->dest;
4013 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4014 {
4015 /* Non-fallthru edges to the exit block cannot be split. */
4016 if (!(e->flags & EDGE_FALLTHRU))
4017 continue;
4018 }
4019 else
4020 {
4021 insn = BB_HEAD (dest);
4022 end = NEXT_INSN (BB_END (dest));
4023 while (insn != end && !NONDEBUG_INSN_P (insn))
4024 insn = NEXT_INSN (insn);
4025 if (insn != end && INSN_HAS_LOCATION (insn)
4026 && INSN_LOCATION (insn) == e->goto_locus)
4027 continue;
4028 }
4029 nb = split_edge (e);
4030 if (!INSN_P (BB_END (nb)))
4031 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
4032 nb);
4033 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
4034
4035 /* If there are other incoming edges to the destination block
4036 with the same goto locus, redirect them to the new block as
4037 well, this can prevent other such blocks from being created
4038 in subsequent iterations of the loop. */
4039 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
4040 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
4041 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
4042 && e->goto_locus == e2->goto_locus)
4043 redirect_edge_and_branch (e2, nb);
4044 else
4045 ei_next (&ei2);
4046 }
4047 }
4048 }
4049 \f
4050 /* Perform sanity checks on the insn chain.
4051 1. Check that next/prev pointers are consistent in both the forward and
4052 reverse direction.
4053 2. Count insns in chain, going both directions, and check if equal.
4054 3. Check that get_last_insn () returns the actual end of chain. */
4055
4056 DEBUG_FUNCTION void
4057 verify_insn_chain (void)
4058 {
4059 rtx_insn *x, *prevx, *nextx;
4060 int insn_cnt1, insn_cnt2;
4061
4062 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
4063 x != 0;
4064 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
4065 gcc_assert (PREV_INSN (x) == prevx);
4066
4067 gcc_assert (prevx == get_last_insn ());
4068
4069 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
4070 x != 0;
4071 nextx = x, insn_cnt2++, x = PREV_INSN (x))
4072 gcc_assert (NEXT_INSN (x) == nextx);
4073
4074 gcc_assert (insn_cnt1 == insn_cnt2);
4075 }
4076 \f
4077 /* If we have assembler epilogues, the block falling through to exit must
4078 be the last one in the reordered chain when we reach final. Ensure
4079 that this condition is met. */
4080 static void
4081 fixup_fallthru_exit_predecessor (void)
4082 {
4083 edge e;
4084 basic_block bb = NULL;
4085
4086 /* This transformation is not valid before reload, because we might
4087 separate a call from the instruction that copies the return
4088 value. */
4089 gcc_assert (reload_completed);
4090
4091 e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4092 if (e)
4093 bb = e->src;
4094
4095 if (bb && bb->aux)
4096 {
4097 basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4098
4099 /* If the very first block is the one with the fall-through exit
4100 edge, we have to split that block. */
4101 if (c == bb)
4102 {
4103 bb = split_block_after_labels (bb)->dest;
4104 bb->aux = c->aux;
4105 c->aux = bb;
4106 BB_FOOTER (bb) = BB_FOOTER (c);
4107 BB_FOOTER (c) = NULL;
4108 }
4109
4110 while (c->aux != bb)
4111 c = (basic_block) c->aux;
4112
4113 c->aux = bb->aux;
4114 while (c->aux)
4115 c = (basic_block) c->aux;
4116
4117 c->aux = bb;
4118 bb->aux = NULL;
4119 }
4120 }
4121
4122 /* In case there are more than one fallthru predecessors of exit, force that
4123 there is only one. */
4124
4125 static void
4126 force_one_exit_fallthru (void)
4127 {
4128 edge e, predecessor = NULL;
4129 bool more = false;
4130 edge_iterator ei;
4131 basic_block forwarder, bb;
4132
4133 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4134 if (e->flags & EDGE_FALLTHRU)
4135 {
4136 if (predecessor == NULL)
4137 predecessor = e;
4138 else
4139 {
4140 more = true;
4141 break;
4142 }
4143 }
4144
4145 if (!more)
4146 return;
4147
4148 /* Exit has several fallthru predecessors. Create a forwarder block for
4149 them. */
4150 forwarder = split_edge (predecessor);
4151 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4152 (e = ei_safe_edge (ei)); )
4153 {
4154 if (e->src == forwarder
4155 || !(e->flags & EDGE_FALLTHRU))
4156 ei_next (&ei);
4157 else
4158 redirect_edge_and_branch_force (e, forwarder);
4159 }
4160
4161 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4162 exit block. */
4163 FOR_EACH_BB_FN (bb, cfun)
4164 {
4165 if (bb->aux == NULL && bb != forwarder)
4166 {
4167 bb->aux = forwarder;
4168 break;
4169 }
4170 }
4171 }
4172 \f
4173 /* Return true in case it is possible to duplicate the basic block BB. */
4174
4175 static bool
4176 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4177 {
4178 /* Do not attempt to duplicate tablejumps, as we need to unshare
4179 the dispatch table. This is difficult to do, as the instructions
4180 computing jump destination may be hoisted outside the basic block. */
4181 if (tablejump_p (BB_END (bb), NULL, NULL))
4182 return false;
4183
4184 /* Do not duplicate blocks containing insns that can't be copied. */
4185 if (targetm.cannot_copy_insn_p)
4186 {
4187 rtx_insn *insn = BB_HEAD (bb);
4188 while (1)
4189 {
4190 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4191 return false;
4192 if (insn == BB_END (bb))
4193 break;
4194 insn = NEXT_INSN (insn);
4195 }
4196 }
4197
4198 return true;
4199 }
4200
4201 rtx_insn *
4202 duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
4203 {
4204 rtx_insn *insn, *next, *copy;
4205 rtx_note *last;
4206
4207 /* Avoid updating of boundaries of previous basic block. The
4208 note will get removed from insn stream in fixup. */
4209 last = emit_note (NOTE_INSN_DELETED);
4210
4211 /* Create copy at the end of INSN chain. The chain will
4212 be reordered later. */
4213 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4214 {
4215 switch (GET_CODE (insn))
4216 {
4217 case DEBUG_INSN:
4218 /* Don't duplicate label debug insns. */
4219 if (DEBUG_BIND_INSN_P (insn)
4220 && TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4221 break;
4222 /* FALLTHRU */
4223 case INSN:
4224 case CALL_INSN:
4225 case JUMP_INSN:
4226 copy = emit_copy_of_insn_after (insn, get_last_insn ());
4227 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4228 && ANY_RETURN_P (JUMP_LABEL (insn)))
4229 JUMP_LABEL (copy) = JUMP_LABEL (insn);
4230 maybe_copy_prologue_epilogue_insn (insn, copy);
4231 break;
4232
4233 case JUMP_TABLE_DATA:
4234 /* Avoid copying of dispatch tables. We never duplicate
4235 tablejumps, so this can hit only in case the table got
4236 moved far from original jump.
4237 Avoid copying following barrier as well if any
4238 (and debug insns in between). */
4239 for (next = NEXT_INSN (insn);
4240 next != NEXT_INSN (to);
4241 next = NEXT_INSN (next))
4242 if (!DEBUG_INSN_P (next))
4243 break;
4244 if (next != NEXT_INSN (to) && BARRIER_P (next))
4245 insn = next;
4246 break;
4247
4248 case CODE_LABEL:
4249 break;
4250
4251 case BARRIER:
4252 emit_barrier ();
4253 break;
4254
4255 case NOTE:
4256 switch (NOTE_KIND (insn))
4257 {
4258 /* In case prologue is empty and function contain label
4259 in first BB, we may want to copy the block. */
4260 case NOTE_INSN_PROLOGUE_END:
4261
4262 case NOTE_INSN_DELETED:
4263 case NOTE_INSN_DELETED_LABEL:
4264 case NOTE_INSN_DELETED_DEBUG_LABEL:
4265 /* No problem to strip these. */
4266 case NOTE_INSN_FUNCTION_BEG:
4267 /* There is always just single entry to function. */
4268 case NOTE_INSN_BASIC_BLOCK:
4269 /* We should only switch text sections once. */
4270 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4271 break;
4272
4273 case NOTE_INSN_EPILOGUE_BEG:
4274 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
4275 emit_note_copy (as_a <rtx_note *> (insn));
4276 break;
4277
4278 default:
4279 /* All other notes should have already been eliminated. */
4280 gcc_unreachable ();
4281 }
4282 break;
4283 default:
4284 gcc_unreachable ();
4285 }
4286 }
4287 insn = NEXT_INSN (last);
4288 delete_insn (last);
4289 return insn;
4290 }
4291
4292 /* Create a duplicate of the basic block BB. */
4293
4294 static basic_block
4295 cfg_layout_duplicate_bb (basic_block bb, copy_bb_data *)
4296 {
4297 rtx_insn *insn;
4298 basic_block new_bb;
4299
4300 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4301 new_bb = create_basic_block (insn,
4302 insn ? get_last_insn () : NULL,
4303 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
4304
4305 BB_COPY_PARTITION (new_bb, bb);
4306 if (BB_HEADER (bb))
4307 {
4308 insn = BB_HEADER (bb);
4309 while (NEXT_INSN (insn))
4310 insn = NEXT_INSN (insn);
4311 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4312 if (insn)
4313 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4314 }
4315
4316 if (BB_FOOTER (bb))
4317 {
4318 insn = BB_FOOTER (bb);
4319 while (NEXT_INSN (insn))
4320 insn = NEXT_INSN (insn);
4321 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4322 if (insn)
4323 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4324 }
4325
4326 return new_bb;
4327 }
4328
4329 \f
4330 /* Main entry point to this module - initialize the datastructures for
4331 CFG layout changes. It keeps LOOPS up-to-date if not null.
4332
4333 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4334
4335 void
4336 cfg_layout_initialize (int flags)
4337 {
4338 rtx_insn_list *x;
4339 basic_block bb;
4340
4341 /* Once bb partitioning is complete, cfg layout mode should not be
4342 re-entered. Entering cfg layout mode may require fixups. As an
4343 example, if edge forwarding performed when optimizing the cfg
4344 layout required moving a block from the hot to the cold
4345 section. This would create an illegal partitioning unless some
4346 manual fixup was performed. */
4347 gcc_assert (!crtl->bb_reorder_complete || !crtl->has_bb_partition);
4348
4349 initialize_original_copy_tables ();
4350
4351 cfg_layout_rtl_register_cfg_hooks ();
4352
4353 record_effective_endpoints ();
4354
4355 /* Make sure that the targets of non local gotos are marked. */
4356 for (x = nonlocal_goto_handler_labels; x; x = x->next ())
4357 {
4358 bb = BLOCK_FOR_INSN (x->insn ());
4359 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4360 }
4361
4362 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4363 }
4364
4365 /* Splits superblocks. */
4366 void
4367 break_superblocks (void)
4368 {
4369 bool need = false;
4370 basic_block bb;
4371
4372 auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
4373 bitmap_clear (superblocks);
4374
4375 FOR_EACH_BB_FN (bb, cfun)
4376 if (bb->flags & BB_SUPERBLOCK)
4377 {
4378 bb->flags &= ~BB_SUPERBLOCK;
4379 bitmap_set_bit (superblocks, bb->index);
4380 need = true;
4381 }
4382
4383 if (need)
4384 {
4385 rebuild_jump_labels (get_insns ());
4386 find_many_sub_basic_blocks (superblocks);
4387 }
4388 }
4389
4390 /* Finalize the changes: reorder insn list according to the sequence specified
4391 by aux pointers, enter compensation code, rebuild scope forest. */
4392
4393 void
4394 cfg_layout_finalize (void)
4395 {
4396 free_dominance_info (CDI_DOMINATORS);
4397 force_one_exit_fallthru ();
4398 rtl_register_cfg_hooks ();
4399 if (reload_completed && !targetm.have_epilogue ())
4400 fixup_fallthru_exit_predecessor ();
4401 fixup_reorder_chain ();
4402
4403 rebuild_jump_labels (get_insns ());
4404 delete_dead_jumptables ();
4405
4406 if (flag_checking)
4407 verify_insn_chain ();
4408 checking_verify_flow_info ();
4409 }
4410
4411
4412 /* Same as split_block but update cfg_layout structures. */
4413
4414 static basic_block
4415 cfg_layout_split_block (basic_block bb, void *insnp)
4416 {
4417 rtx insn = (rtx) insnp;
4418 basic_block new_bb = rtl_split_block (bb, insn);
4419
4420 BB_FOOTER (new_bb) = BB_FOOTER (bb);
4421 BB_FOOTER (bb) = NULL;
4422
4423 return new_bb;
4424 }
4425
4426 /* Redirect Edge to DEST. */
4427 static edge
4428 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
4429 {
4430 basic_block src = e->src;
4431 edge ret;
4432
4433 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4434 return NULL;
4435
4436 if (e->dest == dest)
4437 return e;
4438
4439 if (e->flags & EDGE_CROSSING
4440 && BB_PARTITION (e->src) == BB_PARTITION (dest)
4441 && simplejump_p (BB_END (src)))
4442 {
4443 if (dump_file)
4444 fprintf (dump_file,
4445 "Removing crossing jump while redirecting edge form %i to %i\n",
4446 e->src->index, dest->index);
4447 delete_insn (BB_END (src));
4448 remove_barriers_from_footer (src);
4449 e->flags |= EDGE_FALLTHRU;
4450 }
4451
4452 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4453 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
4454 {
4455 df_set_bb_dirty (src);
4456 return ret;
4457 }
4458
4459 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4460 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4461 {
4462 if (dump_file)
4463 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
4464 e->src->index, dest->index);
4465
4466 df_set_bb_dirty (e->src);
4467 redirect_edge_succ (e, dest);
4468 return e;
4469 }
4470
4471 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4472 in the case the basic block appears to be in sequence. Avoid this
4473 transformation. */
4474
4475 if (e->flags & EDGE_FALLTHRU)
4476 {
4477 /* Redirect any branch edges unified with the fallthru one. */
4478 if (JUMP_P (BB_END (src))
4479 && label_is_jump_target_p (BB_HEAD (e->dest),
4480 BB_END (src)))
4481 {
4482 edge redirected;
4483
4484 if (dump_file)
4485 fprintf (dump_file, "Fallthru edge unified with branch "
4486 "%i->%i redirected to %i\n",
4487 e->src->index, e->dest->index, dest->index);
4488 e->flags &= ~EDGE_FALLTHRU;
4489 redirected = redirect_branch_edge (e, dest);
4490 gcc_assert (redirected);
4491 redirected->flags |= EDGE_FALLTHRU;
4492 df_set_bb_dirty (redirected->src);
4493 return redirected;
4494 }
4495 /* In case we are redirecting fallthru edge to the branch edge
4496 of conditional jump, remove it. */
4497 if (EDGE_COUNT (src->succs) == 2)
4498 {
4499 /* Find the edge that is different from E. */
4500 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
4501
4502 if (s->dest == dest
4503 && any_condjump_p (BB_END (src))
4504 && onlyjump_p (BB_END (src)))
4505 delete_insn (BB_END (src));
4506 }
4507 if (dump_file)
4508 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
4509 e->src->index, e->dest->index, dest->index);
4510 ret = redirect_edge_succ_nodup (e, dest);
4511 }
4512 else
4513 ret = redirect_branch_edge (e, dest);
4514
4515 if (!ret)
4516 return NULL;
4517
4518 fixup_partition_crossing (ret);
4519 /* We don't want simplejumps in the insn stream during cfglayout. */
4520 gcc_assert (!simplejump_p (BB_END (src)) || CROSSING_JUMP_P (BB_END (src)));
4521
4522 df_set_bb_dirty (src);
4523 return ret;
4524 }
4525
4526 /* Simple wrapper as we always can redirect fallthru edges. */
4527 static basic_block
4528 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
4529 {
4530 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4531
4532 gcc_assert (redirected);
4533 return NULL;
4534 }
4535
4536 /* Same as delete_basic_block but update cfg_layout structures. */
4537
4538 static void
4539 cfg_layout_delete_block (basic_block bb)
4540 {
4541 rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
4542 rtx_insn **to;
4543
4544 if (BB_HEADER (bb))
4545 {
4546 next = BB_HEAD (bb);
4547 if (prev)
4548 SET_NEXT_INSN (prev) = BB_HEADER (bb);
4549 else
4550 set_first_insn (BB_HEADER (bb));
4551 SET_PREV_INSN (BB_HEADER (bb)) = prev;
4552 insn = BB_HEADER (bb);
4553 while (NEXT_INSN (insn))
4554 insn = NEXT_INSN (insn);
4555 SET_NEXT_INSN (insn) = next;
4556 SET_PREV_INSN (next) = insn;
4557 }
4558 next = NEXT_INSN (BB_END (bb));
4559 if (BB_FOOTER (bb))
4560 {
4561 insn = BB_FOOTER (bb);
4562 while (insn)
4563 {
4564 if (BARRIER_P (insn))
4565 {
4566 if (PREV_INSN (insn))
4567 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4568 else
4569 BB_FOOTER (bb) = NEXT_INSN (insn);
4570 if (NEXT_INSN (insn))
4571 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4572 }
4573 if (LABEL_P (insn))
4574 break;
4575 insn = NEXT_INSN (insn);
4576 }
4577 if (BB_FOOTER (bb))
4578 {
4579 insn = BB_END (bb);
4580 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4581 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
4582 while (NEXT_INSN (insn))
4583 insn = NEXT_INSN (insn);
4584 SET_NEXT_INSN (insn) = next;
4585 if (next)
4586 SET_PREV_INSN (next) = insn;
4587 else
4588 set_last_insn (insn);
4589 }
4590 }
4591 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4592 to = &BB_HEADER (bb->next_bb);
4593 else
4594 to = &cfg_layout_function_footer;
4595
4596 rtl_delete_block (bb);
4597
4598 if (prev)
4599 prev = NEXT_INSN (prev);
4600 else
4601 prev = get_insns ();
4602 if (next)
4603 next = PREV_INSN (next);
4604 else
4605 next = get_last_insn ();
4606
4607 if (next && NEXT_INSN (next) != prev)
4608 {
4609 remaints = unlink_insn_chain (prev, next);
4610 insn = remaints;
4611 while (NEXT_INSN (insn))
4612 insn = NEXT_INSN (insn);
4613 SET_NEXT_INSN (insn) = *to;
4614 if (*to)
4615 SET_PREV_INSN (*to) = insn;
4616 *to = remaints;
4617 }
4618 }
4619
4620 /* Return true when blocks A and B can be safely merged. */
4621
4622 static bool
4623 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4624 {
4625 /* If we are partitioning hot/cold basic blocks, we don't want to
4626 mess up unconditional or indirect jumps that cross between hot
4627 and cold sections.
4628
4629 Basic block partitioning may result in some jumps that appear to
4630 be optimizable (or blocks that appear to be mergeable), but which really
4631 must be left untouched (they are required to make it safely across
4632 partition boundaries). See the comments at the top of
4633 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4634
4635 if (BB_PARTITION (a) != BB_PARTITION (b))
4636 return false;
4637
4638 /* Protect the loop latches. */
4639 if (current_loops && b->loop_father->latch == b)
4640 return false;
4641
4642 /* If we would end up moving B's instructions, make sure it doesn't fall
4643 through into the exit block, since we cannot recover from a fallthrough
4644 edge into the exit block occurring in the middle of a function. */
4645 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4646 {
4647 edge e = find_fallthru_edge (b->succs);
4648 if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4649 return false;
4650 }
4651
4652 /* There must be exactly one edge in between the blocks. */
4653 return (single_succ_p (a)
4654 && single_succ (a) == b
4655 && single_pred_p (b) == 1
4656 && a != b
4657 /* Must be simple edge. */
4658 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4659 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4660 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
4661 /* If the jump insn has side effects, we can't kill the edge.
4662 When not optimizing, try_redirect_by_replacing_jump will
4663 not allow us to redirect an edge by replacing a table jump. */
4664 && (!JUMP_P (BB_END (a))
4665 || ((!optimize || reload_completed)
4666 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4667 }
4668
4669 /* Merge block A and B. The blocks must be mergeable. */
4670
4671 static void
4672 cfg_layout_merge_blocks (basic_block a, basic_block b)
4673 {
4674 /* If B is a forwarder block whose outgoing edge has no location, we'll
4675 propagate the locus of the edge between A and B onto it. */
4676 const bool forward_edge_locus
4677 = (b->flags & BB_FORWARDER_BLOCK) != 0
4678 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
4679 rtx_insn *insn;
4680
4681 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4682
4683 if (dump_file)
4684 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4685 a->index);
4686
4687 /* If there was a CODE_LABEL beginning B, delete it. */
4688 if (LABEL_P (BB_HEAD (b)))
4689 {
4690 delete_insn (BB_HEAD (b));
4691 }
4692
4693 /* We should have fallthru edge in a, or we can do dummy redirection to get
4694 it cleaned up. */
4695 if (JUMP_P (BB_END (a)))
4696 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4697 gcc_assert (!JUMP_P (BB_END (a)));
4698
4699 /* If not optimizing, preserve the locus of the single edge between
4700 blocks A and B if necessary by emitting a nop. */
4701 if (!optimize
4702 && !forward_edge_locus
4703 && !DECL_IGNORED_P (current_function_decl))
4704 emit_nop_for_unique_locus_between (a, b);
4705
4706 /* Move things from b->footer after a->footer. */
4707 if (BB_FOOTER (b))
4708 {
4709 if (!BB_FOOTER (a))
4710 BB_FOOTER (a) = BB_FOOTER (b);
4711 else
4712 {
4713 rtx_insn *last = BB_FOOTER (a);
4714
4715 while (NEXT_INSN (last))
4716 last = NEXT_INSN (last);
4717 SET_NEXT_INSN (last) = BB_FOOTER (b);
4718 SET_PREV_INSN (BB_FOOTER (b)) = last;
4719 }
4720 BB_FOOTER (b) = NULL;
4721 }
4722
4723 /* Move things from b->header before a->footer.
4724 Note that this may include dead tablejump data, but we don't clean
4725 those up until we go out of cfglayout mode. */
4726 if (BB_HEADER (b))
4727 {
4728 if (! BB_FOOTER (a))
4729 BB_FOOTER (a) = BB_HEADER (b);
4730 else
4731 {
4732 rtx_insn *last = BB_HEADER (b);
4733
4734 while (NEXT_INSN (last))
4735 last = NEXT_INSN (last);
4736 SET_NEXT_INSN (last) = BB_FOOTER (a);
4737 SET_PREV_INSN (BB_FOOTER (a)) = last;
4738 BB_FOOTER (a) = BB_HEADER (b);
4739 }
4740 BB_HEADER (b) = NULL;
4741 }
4742
4743 /* In the case basic blocks are not adjacent, move them around. */
4744 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4745 {
4746 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4747
4748 emit_insn_after_noloc (insn, BB_END (a), a);
4749 }
4750 /* Otherwise just re-associate the instructions. */
4751 else
4752 {
4753 insn = BB_HEAD (b);
4754 BB_END (a) = BB_END (b);
4755 }
4756
4757 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4758 We need to explicitly call. */
4759 update_bb_for_insn_chain (insn, BB_END (b), a);
4760
4761 /* Skip possible DELETED_LABEL insn. */
4762 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4763 insn = NEXT_INSN (insn);
4764 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4765 BB_HEAD (b) = BB_END (b) = NULL;
4766 delete_insn (insn);
4767
4768 df_bb_delete (b->index);
4769
4770 if (forward_edge_locus)
4771 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4772
4773 if (dump_file)
4774 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4775 }
4776
4777 /* Split edge E. */
4778
4779 static basic_block
4780 cfg_layout_split_edge (edge e)
4781 {
4782 basic_block new_bb =
4783 create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4784 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
4785 NULL_RTX, e->src);
4786
4787 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4788 BB_COPY_PARTITION (new_bb, e->src);
4789 else
4790 BB_COPY_PARTITION (new_bb, e->dest);
4791 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4792 redirect_edge_and_branch_force (e, new_bb);
4793
4794 return new_bb;
4795 }
4796
4797 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4798
4799 static void
4800 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4801 {
4802 }
4803
4804 /* Return true if BB contains only labels or non-executable
4805 instructions. */
4806
4807 static bool
4808 rtl_block_empty_p (basic_block bb)
4809 {
4810 rtx_insn *insn;
4811
4812 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4813 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4814 return true;
4815
4816 FOR_BB_INSNS (bb, insn)
4817 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4818 return false;
4819
4820 return true;
4821 }
4822
4823 /* Split a basic block if it ends with a conditional branch and if
4824 the other part of the block is not empty. */
4825
4826 static basic_block
4827 rtl_split_block_before_cond_jump (basic_block bb)
4828 {
4829 rtx_insn *insn;
4830 rtx_insn *split_point = NULL;
4831 rtx_insn *last = NULL;
4832 bool found_code = false;
4833
4834 FOR_BB_INSNS (bb, insn)
4835 {
4836 if (any_condjump_p (insn))
4837 split_point = last;
4838 else if (NONDEBUG_INSN_P (insn))
4839 found_code = true;
4840 last = insn;
4841 }
4842
4843 /* Did not find everything. */
4844 if (found_code && split_point)
4845 return split_block (bb, split_point)->dest;
4846 else
4847 return NULL;
4848 }
4849
4850 /* Return 1 if BB ends with a call, possibly followed by some
4851 instructions that must stay with the call, 0 otherwise. */
4852
4853 static bool
4854 rtl_block_ends_with_call_p (basic_block bb)
4855 {
4856 rtx_insn *insn = BB_END (bb);
4857
4858 while (!CALL_P (insn)
4859 && insn != BB_HEAD (bb)
4860 && (keep_with_call_p (insn)
4861 || NOTE_P (insn)
4862 || DEBUG_INSN_P (insn)))
4863 insn = PREV_INSN (insn);
4864 return (CALL_P (insn));
4865 }
4866
4867 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4868
4869 static bool
4870 rtl_block_ends_with_condjump_p (const_basic_block bb)
4871 {
4872 return any_condjump_p (BB_END (bb));
4873 }
4874
4875 /* Return true if we need to add fake edge to exit.
4876 Helper function for rtl_flow_call_edges_add. */
4877
4878 static bool
4879 need_fake_edge_p (const rtx_insn *insn)
4880 {
4881 if (!INSN_P (insn))
4882 return false;
4883
4884 if ((CALL_P (insn)
4885 && !SIBLING_CALL_P (insn)
4886 && !find_reg_note (insn, REG_NORETURN, NULL)
4887 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4888 return true;
4889
4890 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4891 && MEM_VOLATILE_P (PATTERN (insn)))
4892 || (GET_CODE (PATTERN (insn)) == PARALLEL
4893 && asm_noperands (insn) != -1
4894 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4895 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4896 }
4897
4898 /* Add fake edges to the function exit for any non constant and non noreturn
4899 calls, volatile inline assembly in the bitmap of blocks specified by
4900 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4901 that were split.
4902
4903 The goal is to expose cases in which entering a basic block does not imply
4904 that all subsequent instructions must be executed. */
4905
4906 static int
4907 rtl_flow_call_edges_add (sbitmap blocks)
4908 {
4909 int i;
4910 int blocks_split = 0;
4911 int last_bb = last_basic_block_for_fn (cfun);
4912 bool check_last_block = false;
4913
4914 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
4915 return 0;
4916
4917 if (! blocks)
4918 check_last_block = true;
4919 else
4920 check_last_block = bitmap_bit_p (blocks,
4921 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
4922
4923 /* In the last basic block, before epilogue generation, there will be
4924 a fallthru edge to EXIT. Special care is required if the last insn
4925 of the last basic block is a call because make_edge folds duplicate
4926 edges, which would result in the fallthru edge also being marked
4927 fake, which would result in the fallthru edge being removed by
4928 remove_fake_edges, which would result in an invalid CFG.
4929
4930 Moreover, we can't elide the outgoing fake edge, since the block
4931 profiler needs to take this into account in order to solve the minimal
4932 spanning tree in the case that the call doesn't return.
4933
4934 Handle this by adding a dummy instruction in a new last basic block. */
4935 if (check_last_block)
4936 {
4937 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
4938 rtx_insn *insn = BB_END (bb);
4939
4940 /* Back up past insns that must be kept in the same block as a call. */
4941 while (insn != BB_HEAD (bb)
4942 && keep_with_call_p (insn))
4943 insn = PREV_INSN (insn);
4944
4945 if (need_fake_edge_p (insn))
4946 {
4947 edge e;
4948
4949 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4950 if (e)
4951 {
4952 insert_insn_on_edge (gen_use (const0_rtx), e);
4953 commit_edge_insertions ();
4954 }
4955 }
4956 }
4957
4958 /* Now add fake edges to the function exit for any non constant
4959 calls since there is no way that we can determine if they will
4960 return or not... */
4961
4962 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4963 {
4964 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
4965 rtx_insn *insn;
4966 rtx_insn *prev_insn;
4967
4968 if (!bb)
4969 continue;
4970
4971 if (blocks && !bitmap_bit_p (blocks, i))
4972 continue;
4973
4974 for (insn = BB_END (bb); ; insn = prev_insn)
4975 {
4976 prev_insn = PREV_INSN (insn);
4977 if (need_fake_edge_p (insn))
4978 {
4979 edge e;
4980 rtx_insn *split_at_insn = insn;
4981
4982 /* Don't split the block between a call and an insn that should
4983 remain in the same block as the call. */
4984 if (CALL_P (insn))
4985 while (split_at_insn != BB_END (bb)
4986 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4987 split_at_insn = NEXT_INSN (split_at_insn);
4988
4989 /* The handling above of the final block before the epilogue
4990 should be enough to verify that there is no edge to the exit
4991 block in CFG already. Calling make_edge in such case would
4992 cause us to mark that edge as fake and remove it later. */
4993
4994 if (flag_checking && split_at_insn == BB_END (bb))
4995 {
4996 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4997 gcc_assert (e == NULL);
4998 }
4999
5000 /* Note that the following may create a new basic block
5001 and renumber the existing basic blocks. */
5002 if (split_at_insn != BB_END (bb))
5003 {
5004 e = split_block (bb, split_at_insn);
5005 if (e)
5006 blocks_split++;
5007 }
5008
5009 edge ne = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
5010 ne->probability = profile_probability::guessed_never ();
5011 }
5012
5013 if (insn == BB_HEAD (bb))
5014 break;
5015 }
5016 }
5017
5018 if (blocks_split)
5019 verify_flow_info ();
5020
5021 return blocks_split;
5022 }
5023
5024 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
5025 the conditional branch target, SECOND_HEAD should be the fall-thru
5026 there is no need to handle this here the loop versioning code handles
5027 this. the reason for SECON_HEAD is that it is needed for condition
5028 in trees, and this should be of the same type since it is a hook. */
5029 static void
5030 rtl_lv_add_condition_to_bb (basic_block first_head ,
5031 basic_block second_head ATTRIBUTE_UNUSED,
5032 basic_block cond_bb, void *comp_rtx)
5033 {
5034 rtx_code_label *label;
5035 rtx_insn *seq, *jump;
5036 rtx op0 = XEXP ((rtx)comp_rtx, 0);
5037 rtx op1 = XEXP ((rtx)comp_rtx, 1);
5038 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
5039 machine_mode mode;
5040
5041
5042 label = block_label (first_head);
5043 mode = GET_MODE (op0);
5044 if (mode == VOIDmode)
5045 mode = GET_MODE (op1);
5046
5047 start_sequence ();
5048 op0 = force_operand (op0, NULL_RTX);
5049 op1 = force_operand (op1, NULL_RTX);
5050 do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label,
5051 profile_probability::uninitialized ());
5052 jump = get_last_insn ();
5053 JUMP_LABEL (jump) = label;
5054 LABEL_NUSES (label)++;
5055 seq = get_insns ();
5056 end_sequence ();
5057
5058 /* Add the new cond, in the new head. */
5059 emit_insn_after (seq, BB_END (cond_bb));
5060 }
5061
5062
5063 /* Given a block B with unconditional branch at its end, get the
5064 store the return the branch edge and the fall-thru edge in
5065 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
5066 static void
5067 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
5068 edge *fallthru_edge)
5069 {
5070 edge e = EDGE_SUCC (b, 0);
5071
5072 if (e->flags & EDGE_FALLTHRU)
5073 {
5074 *fallthru_edge = e;
5075 *branch_edge = EDGE_SUCC (b, 1);
5076 }
5077 else
5078 {
5079 *branch_edge = e;
5080 *fallthru_edge = EDGE_SUCC (b, 1);
5081 }
5082 }
5083
5084 void
5085 init_rtl_bb_info (basic_block bb)
5086 {
5087 gcc_assert (!bb->il.x.rtl);
5088 bb->il.x.head_ = NULL;
5089 bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5090 }
5091
5092 /* Returns true if it is possible to remove edge E by redirecting
5093 it to the destination of the other edge from E->src. */
5094
5095 static bool
5096 rtl_can_remove_branch_p (const_edge e)
5097 {
5098 const_basic_block src = e->src;
5099 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
5100 const rtx_insn *insn = BB_END (src);
5101 rtx set;
5102
5103 /* The conditions are taken from try_redirect_by_replacing_jump. */
5104 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
5105 return false;
5106
5107 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
5108 return false;
5109
5110 if (BB_PARTITION (src) != BB_PARTITION (target))
5111 return false;
5112
5113 if (!onlyjump_p (insn)
5114 || tablejump_p (insn, NULL, NULL))
5115 return false;
5116
5117 set = single_set (insn);
5118 if (!set || side_effects_p (set))
5119 return false;
5120
5121 return true;
5122 }
5123
5124 static basic_block
5125 rtl_duplicate_bb (basic_block bb, copy_bb_data *id)
5126 {
5127 bb = cfg_layout_duplicate_bb (bb, id);
5128 bb->aux = NULL;
5129 return bb;
5130 }
5131
5132 /* Do book-keeping of basic block BB for the profile consistency checker.
5133 Store the counting in RECORD. */
5134 static void
5135 rtl_account_profile_record (basic_block bb, struct profile_record *record)
5136 {
5137 rtx_insn *insn;
5138 FOR_BB_INSNS (bb, insn)
5139 if (INSN_P (insn))
5140 {
5141 record->size += insn_cost (insn, false);
5142 if (bb->count.initialized_p ())
5143 record->time
5144 += insn_cost (insn, true) * bb->count.to_gcov_type ();
5145 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
5146 record->time
5147 += insn_cost (insn, true) * bb->count.to_frequency (cfun);
5148 }
5149 }
5150
5151 /* Implementation of CFG manipulation for linearized RTL. */
5152 struct cfg_hooks rtl_cfg_hooks = {
5153 "rtl",
5154 rtl_verify_flow_info,
5155 rtl_dump_bb,
5156 rtl_dump_bb_for_graph,
5157 rtl_create_basic_block,
5158 rtl_redirect_edge_and_branch,
5159 rtl_redirect_edge_and_branch_force,
5160 rtl_can_remove_branch_p,
5161 rtl_delete_block,
5162 rtl_split_block,
5163 rtl_move_block_after,
5164 rtl_can_merge_blocks, /* can_merge_blocks_p */
5165 rtl_merge_blocks,
5166 rtl_predict_edge,
5167 rtl_predicted_by_p,
5168 cfg_layout_can_duplicate_bb_p,
5169 rtl_duplicate_bb,
5170 rtl_split_edge,
5171 rtl_make_forwarder_block,
5172 rtl_tidy_fallthru_edge,
5173 rtl_force_nonfallthru,
5174 rtl_block_ends_with_call_p,
5175 rtl_block_ends_with_condjump_p,
5176 rtl_flow_call_edges_add,
5177 NULL, /* execute_on_growing_pred */
5178 NULL, /* execute_on_shrinking_pred */
5179 NULL, /* duplicate loop for trees */
5180 NULL, /* lv_add_condition_to_bb */
5181 NULL, /* lv_adjust_loop_header_phi*/
5182 NULL, /* extract_cond_bb_edges */
5183 NULL, /* flush_pending_stmts */
5184 rtl_block_empty_p, /* block_empty_p */
5185 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5186 rtl_account_profile_record,
5187 };
5188
5189 /* Implementation of CFG manipulation for cfg layout RTL, where
5190 basic block connected via fallthru edges does not have to be adjacent.
5191 This representation will hopefully become the default one in future
5192 version of the compiler. */
5193
5194 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5195 "cfglayout mode",
5196 rtl_verify_flow_info_1,
5197 rtl_dump_bb,
5198 rtl_dump_bb_for_graph,
5199 cfg_layout_create_basic_block,
5200 cfg_layout_redirect_edge_and_branch,
5201 cfg_layout_redirect_edge_and_branch_force,
5202 rtl_can_remove_branch_p,
5203 cfg_layout_delete_block,
5204 cfg_layout_split_block,
5205 rtl_move_block_after,
5206 cfg_layout_can_merge_blocks_p,
5207 cfg_layout_merge_blocks,
5208 rtl_predict_edge,
5209 rtl_predicted_by_p,
5210 cfg_layout_can_duplicate_bb_p,
5211 cfg_layout_duplicate_bb,
5212 cfg_layout_split_edge,
5213 rtl_make_forwarder_block,
5214 NULL, /* tidy_fallthru_edge */
5215 rtl_force_nonfallthru,
5216 rtl_block_ends_with_call_p,
5217 rtl_block_ends_with_condjump_p,
5218 rtl_flow_call_edges_add,
5219 NULL, /* execute_on_growing_pred */
5220 NULL, /* execute_on_shrinking_pred */
5221 duplicate_loop_to_header_edge, /* duplicate loop for trees */
5222 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5223 NULL, /* lv_adjust_loop_header_phi*/
5224 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
5225 NULL, /* flush_pending_stmts */
5226 rtl_block_empty_p, /* block_empty_p */
5227 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5228 rtl_account_profile_record,
5229 };
5230
5231 #include "gt-cfgrtl.h"
5232
5233 #if __GNUC__ >= 10
5234 # pragma GCC diagnostic pop
5235 #endif