]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfgrtl.c
Update copyright years.
[thirdparty/gcc.git] / gcc / cfgrtl.c
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
22
23 Available functionality:
24 - Basic CFG/RTL manipulation API documented in cfghooks.h
25 - CFG-aware instruction chain manipulation
26 delete_insn, delete_insn_chain
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
33
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
39 \f
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "backend.h"
44 #include "target.h"
45 #include "rtl.h"
46 #include "tree.h"
47 #include "cfghooks.h"
48 #include "df.h"
49 #include "insn-config.h"
50 #include "memmodel.h"
51 #include "emit-rtl.h"
52 #include "cfgrtl.h"
53 #include "cfganal.h"
54 #include "cfgbuild.h"
55 #include "cfgcleanup.h"
56 #include "bb-reorder.h"
57 #include "rtl-error.h"
58 #include "insn-attr.h"
59 #include "dojump.h"
60 #include "expr.h"
61 #include "cfgloop.h"
62 #include "tree-pass.h"
63 #include "print-rtl.h"
64
65 /* Disable warnings about missing quoting in GCC diagnostics. */
66 #if __GNUC__ >= 10
67 # pragma GCC diagnostic push
68 # pragma GCC diagnostic ignored "-Wformat-diag"
69 #endif
70
71 /* Holds the interesting leading and trailing notes for the function.
72 Only applicable if the CFG is in cfglayout mode. */
73 static GTY(()) rtx_insn *cfg_layout_function_footer;
74 static GTY(()) rtx_insn *cfg_layout_function_header;
75
76 static rtx_insn *skip_insns_after_block (basic_block);
77 static void record_effective_endpoints (void);
78 static void fixup_reorder_chain (void);
79
80 void verify_insn_chain (void);
81 static void fixup_fallthru_exit_predecessor (void);
82 static int can_delete_note_p (const rtx_note *);
83 static int can_delete_label_p (const rtx_code_label *);
84 static basic_block rtl_split_edge (edge);
85 static bool rtl_move_block_after (basic_block, basic_block);
86 static int rtl_verify_flow_info (void);
87 static basic_block cfg_layout_split_block (basic_block, void *);
88 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
89 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
90 static void cfg_layout_delete_block (basic_block);
91 static void rtl_delete_block (basic_block);
92 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
93 static edge rtl_redirect_edge_and_branch (edge, basic_block);
94 static basic_block rtl_split_block (basic_block, void *);
95 static void rtl_dump_bb (FILE *, basic_block, int, dump_flags_t);
96 static int rtl_verify_flow_info_1 (void);
97 static void rtl_make_forwarder_block (edge);
98 \f
99 /* Return true if NOTE is not one of the ones that must be kept paired,
100 so that we may simply delete it. */
101
102 static int
103 can_delete_note_p (const rtx_note *note)
104 {
105 switch (NOTE_KIND (note))
106 {
107 case NOTE_INSN_DELETED:
108 case NOTE_INSN_BASIC_BLOCK:
109 case NOTE_INSN_EPILOGUE_BEG:
110 return true;
111
112 default:
113 return false;
114 }
115 }
116
117 /* True if a given label can be deleted. */
118
119 static int
120 can_delete_label_p (const rtx_code_label *label)
121 {
122 return (!LABEL_PRESERVE_P (label)
123 /* User declared labels must be preserved. */
124 && LABEL_NAME (label) == 0
125 && !vec_safe_contains<rtx_insn *> (forced_labels,
126 const_cast<rtx_code_label *> (label)));
127 }
128
129 /* Delete INSN by patching it out. */
130
131 void
132 delete_insn (rtx_insn *insn)
133 {
134 rtx note;
135 bool really_delete = true;
136
137 if (LABEL_P (insn))
138 {
139 /* Some labels can't be directly removed from the INSN chain, as they
140 might be references via variables, constant pool etc.
141 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
142 if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
143 {
144 const char *name = LABEL_NAME (insn);
145 basic_block bb = BLOCK_FOR_INSN (insn);
146 rtx_insn *bb_note = NEXT_INSN (insn);
147
148 really_delete = false;
149 PUT_CODE (insn, NOTE);
150 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
151 NOTE_DELETED_LABEL_NAME (insn) = name;
152
153 /* If the note following the label starts a basic block, and the
154 label is a member of the same basic block, interchange the two. */
155 if (bb_note != NULL_RTX
156 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
157 && bb != NULL
158 && bb == BLOCK_FOR_INSN (bb_note))
159 {
160 reorder_insns_nobb (insn, insn, bb_note);
161 BB_HEAD (bb) = bb_note;
162 if (BB_END (bb) == bb_note)
163 BB_END (bb) = insn;
164 }
165 }
166
167 remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
168 }
169
170 if (really_delete)
171 {
172 /* If this insn has already been deleted, something is very wrong. */
173 gcc_assert (!insn->deleted ());
174 if (INSN_P (insn))
175 df_insn_delete (insn);
176 remove_insn (insn);
177 insn->set_deleted ();
178 }
179
180 /* If deleting a jump, decrement the use count of the label. Deleting
181 the label itself should happen in the normal course of block merging. */
182 if (JUMP_P (insn))
183 {
184 if (JUMP_LABEL (insn)
185 && LABEL_P (JUMP_LABEL (insn)))
186 LABEL_NUSES (JUMP_LABEL (insn))--;
187
188 /* If there are more targets, remove them too. */
189 while ((note
190 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
191 && LABEL_P (XEXP (note, 0)))
192 {
193 LABEL_NUSES (XEXP (note, 0))--;
194 remove_note (insn, note);
195 }
196 }
197
198 /* Also if deleting any insn that references a label as an operand. */
199 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
200 && LABEL_P (XEXP (note, 0)))
201 {
202 LABEL_NUSES (XEXP (note, 0))--;
203 remove_note (insn, note);
204 }
205
206 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
207 {
208 rtvec vec = table->get_labels ();
209 int len = GET_NUM_ELEM (vec);
210 int i;
211
212 for (i = 0; i < len; i++)
213 {
214 rtx label = XEXP (RTVEC_ELT (vec, i), 0);
215
216 /* When deleting code in bulk (e.g. removing many unreachable
217 blocks) we can delete a label that's a target of the vector
218 before deleting the vector itself. */
219 if (!NOTE_P (label))
220 LABEL_NUSES (label)--;
221 }
222 }
223 }
224
225 /* Like delete_insn but also purge dead edges from BB.
226 Return true if any edges are eliminated. */
227
228 bool
229 delete_insn_and_edges (rtx_insn *insn)
230 {
231 bool purge = false;
232
233 if (INSN_P (insn)
234 && BLOCK_FOR_INSN (insn)
235 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
236 purge = true;
237 delete_insn (insn);
238 if (purge)
239 return purge_dead_edges (BLOCK_FOR_INSN (insn));
240 return false;
241 }
242
243 /* Unlink a chain of insns between START and FINISH, leaving notes
244 that must be paired. If CLEAR_BB is true, we set bb field for
245 insns that cannot be removed to NULL. */
246
247 void
248 delete_insn_chain (rtx start, rtx_insn *finish, bool clear_bb)
249 {
250 /* Unchain the insns one by one. It would be quicker to delete all of these
251 with a single unchaining, rather than one at a time, but we need to keep
252 the NOTE's. */
253 rtx_insn *current = finish;
254 while (1)
255 {
256 rtx_insn *prev = PREV_INSN (current);
257 if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
258 ;
259 else
260 delete_insn (current);
261
262 if (clear_bb && !current->deleted ())
263 set_block_for_insn (current, NULL);
264
265 if (current == start)
266 break;
267 current = prev;
268 }
269 }
270 \f
271 /* Create a new basic block consisting of the instructions between HEAD and END
272 inclusive. This function is designed to allow fast BB construction - reuses
273 the note and basic block struct in BB_NOTE, if any and do not grow
274 BASIC_BLOCK chain and should be used directly only by CFG construction code.
275 END can be NULL in to create new empty basic block before HEAD. Both END
276 and HEAD can be NULL to create basic block at the end of INSN chain.
277 AFTER is the basic block we should be put after. */
278
279 basic_block
280 create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
281 basic_block after)
282 {
283 basic_block bb;
284
285 if (bb_note
286 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
287 && bb->aux == NULL)
288 {
289 /* If we found an existing note, thread it back onto the chain. */
290
291 rtx_insn *after;
292
293 if (LABEL_P (head))
294 after = head;
295 else
296 {
297 after = PREV_INSN (head);
298 head = bb_note;
299 }
300
301 if (after != bb_note && NEXT_INSN (after) != bb_note)
302 reorder_insns_nobb (bb_note, bb_note, after);
303 }
304 else
305 {
306 /* Otherwise we must create a note and a basic block structure. */
307
308 bb = alloc_block ();
309
310 init_rtl_bb_info (bb);
311 if (!head && !end)
312 head = end = bb_note
313 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
314 else if (LABEL_P (head) && end)
315 {
316 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
317 if (head == end)
318 end = bb_note;
319 }
320 else
321 {
322 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
323 head = bb_note;
324 if (!end)
325 end = head;
326 }
327
328 NOTE_BASIC_BLOCK (bb_note) = bb;
329 }
330
331 /* Always include the bb note in the block. */
332 if (NEXT_INSN (end) == bb_note)
333 end = bb_note;
334
335 BB_HEAD (bb) = head;
336 BB_END (bb) = end;
337 bb->index = last_basic_block_for_fn (cfun)++;
338 bb->flags = BB_NEW | BB_RTL;
339 link_block (bb, after);
340 SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
341 df_bb_refs_record (bb->index, false);
342 update_bb_for_insn (bb);
343 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
344
345 /* Tag the block so that we know it has been used when considering
346 other basic block notes. */
347 bb->aux = bb;
348
349 return bb;
350 }
351
352 /* Create new basic block consisting of instructions in between HEAD and END
353 and place it to the BB chain after block AFTER. END can be NULL to
354 create a new empty basic block before HEAD. Both END and HEAD can be
355 NULL to create basic block at the end of INSN chain. */
356
357 static basic_block
358 rtl_create_basic_block (void *headp, void *endp, basic_block after)
359 {
360 rtx_insn *head = (rtx_insn *) headp;
361 rtx_insn *end = (rtx_insn *) endp;
362 basic_block bb;
363
364 /* Grow the basic block array if needed. */
365 if ((size_t) last_basic_block_for_fn (cfun)
366 >= basic_block_info_for_fn (cfun)->length ())
367 {
368 size_t new_size =
369 (last_basic_block_for_fn (cfun)
370 + (last_basic_block_for_fn (cfun) + 3) / 4);
371 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
372 }
373
374 n_basic_blocks_for_fn (cfun)++;
375
376 bb = create_basic_block_structure (head, end, NULL, after);
377 bb->aux = NULL;
378 return bb;
379 }
380
381 static basic_block
382 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
383 {
384 basic_block newbb = rtl_create_basic_block (head, end, after);
385
386 return newbb;
387 }
388 \f
389 /* Delete the insns in a (non-live) block. We physically delete every
390 non-deleted-note insn, and update the flow graph appropriately.
391
392 Return nonzero if we deleted an exception handler. */
393
394 /* ??? Preserving all such notes strikes me as wrong. It would be nice
395 to post-process the stream to remove empty blocks, loops, ranges, etc. */
396
397 static void
398 rtl_delete_block (basic_block b)
399 {
400 rtx_insn *insn, *end;
401
402 /* If the head of this block is a CODE_LABEL, then it might be the
403 label for an exception handler which can't be reached. We need
404 to remove the label from the exception_handler_label list. */
405 insn = BB_HEAD (b);
406
407 end = get_last_bb_insn (b);
408
409 /* Selectively delete the entire chain. */
410 BB_HEAD (b) = NULL;
411 delete_insn_chain (insn, end, true);
412
413
414 if (dump_file)
415 fprintf (dump_file, "deleting block %d\n", b->index);
416 df_bb_delete (b->index);
417 }
418 \f
419 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
420
421 void
422 compute_bb_for_insn (void)
423 {
424 basic_block bb;
425
426 FOR_EACH_BB_FN (bb, cfun)
427 {
428 rtx_insn *end = BB_END (bb);
429 rtx_insn *insn;
430
431 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
432 {
433 BLOCK_FOR_INSN (insn) = bb;
434 if (insn == end)
435 break;
436 }
437 }
438 }
439
440 /* Release the basic_block_for_insn array. */
441
442 unsigned int
443 free_bb_for_insn (void)
444 {
445 rtx_insn *insn;
446 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
447 if (!BARRIER_P (insn))
448 BLOCK_FOR_INSN (insn) = NULL;
449 return 0;
450 }
451
452 namespace {
453
454 const pass_data pass_data_free_cfg =
455 {
456 RTL_PASS, /* type */
457 "*free_cfg", /* name */
458 OPTGROUP_NONE, /* optinfo_flags */
459 TV_NONE, /* tv_id */
460 0, /* properties_required */
461 0, /* properties_provided */
462 PROP_cfg, /* properties_destroyed */
463 0, /* todo_flags_start */
464 0, /* todo_flags_finish */
465 };
466
467 class pass_free_cfg : public rtl_opt_pass
468 {
469 public:
470 pass_free_cfg (gcc::context *ctxt)
471 : rtl_opt_pass (pass_data_free_cfg, ctxt)
472 {}
473
474 /* opt_pass methods: */
475 virtual unsigned int execute (function *);
476
477 }; // class pass_free_cfg
478
479 unsigned int
480 pass_free_cfg::execute (function *)
481 {
482 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
483 valid at that point so it would be too late to call df_analyze. */
484 if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
485 {
486 df_note_add_problem ();
487 df_analyze ();
488 }
489
490 if (crtl->has_bb_partition)
491 insert_section_boundary_note ();
492
493 free_bb_for_insn ();
494 return 0;
495 }
496
497 } // anon namespace
498
499 rtl_opt_pass *
500 make_pass_free_cfg (gcc::context *ctxt)
501 {
502 return new pass_free_cfg (ctxt);
503 }
504
505 /* Return RTX to emit after when we want to emit code on the entry of function. */
506 rtx_insn *
507 entry_of_function (void)
508 {
509 return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
510 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
511 }
512
513 /* Emit INSN at the entry point of the function, ensuring that it is only
514 executed once per function. */
515 void
516 emit_insn_at_entry (rtx insn)
517 {
518 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
519 edge e = ei_safe_edge (ei);
520 gcc_assert (e->flags & EDGE_FALLTHRU);
521
522 insert_insn_on_edge (insn, e);
523 commit_edge_insertions ();
524 }
525
526 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
527 (or BARRIER if found) and notify df of the bb change.
528 The insn chain range is inclusive
529 (i.e. both BEGIN and END will be updated. */
530
531 static void
532 update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
533 {
534 rtx_insn *insn;
535
536 end = NEXT_INSN (end);
537 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
538 if (!BARRIER_P (insn))
539 df_insn_change_bb (insn, bb);
540 }
541
542 /* Update BLOCK_FOR_INSN of insns in BB to BB,
543 and notify df of the change. */
544
545 void
546 update_bb_for_insn (basic_block bb)
547 {
548 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
549 }
550
551 \f
552 /* Like active_insn_p, except keep the return value use or clobber around
553 even after reload. */
554
555 static bool
556 flow_active_insn_p (const rtx_insn *insn)
557 {
558 if (active_insn_p (insn))
559 return true;
560
561 /* A clobber of the function return value exists for buggy
562 programs that fail to return a value. Its effect is to
563 keep the return value from being live across the entire
564 function. If we allow it to be skipped, we introduce the
565 possibility for register lifetime confusion.
566 Similarly, keep a USE of the function return value, otherwise
567 the USE is dropped and we could fail to thread jump if USE
568 appears on some paths and not on others, see PR90257. */
569 if ((GET_CODE (PATTERN (insn)) == CLOBBER
570 || GET_CODE (PATTERN (insn)) == USE)
571 && REG_P (XEXP (PATTERN (insn), 0))
572 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
573 return true;
574
575 return false;
576 }
577
578 /* Return true if the block has no effect and only forwards control flow to
579 its single destination. */
580
581 bool
582 contains_no_active_insn_p (const_basic_block bb)
583 {
584 rtx_insn *insn;
585
586 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
587 || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
588 || !single_succ_p (bb)
589 || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
590 return false;
591
592 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
593 if (INSN_P (insn) && flow_active_insn_p (insn))
594 return false;
595
596 return (!INSN_P (insn)
597 || (JUMP_P (insn) && simplejump_p (insn))
598 || !flow_active_insn_p (insn));
599 }
600
601 /* Likewise, but protect loop latches, headers and preheaders. */
602 /* FIXME: Make this a cfg hook. */
603
604 bool
605 forwarder_block_p (const_basic_block bb)
606 {
607 if (!contains_no_active_insn_p (bb))
608 return false;
609
610 /* Protect loop latches, headers and preheaders. */
611 if (current_loops)
612 {
613 basic_block dest;
614 if (bb->loop_father->header == bb)
615 return false;
616 dest = EDGE_SUCC (bb, 0)->dest;
617 if (dest->loop_father->header == dest)
618 return false;
619 }
620
621 return true;
622 }
623
624 /* Return nonzero if we can reach target from src by falling through. */
625 /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
626
627 bool
628 can_fallthru (basic_block src, basic_block target)
629 {
630 rtx_insn *insn = BB_END (src);
631 rtx_insn *insn2;
632 edge e;
633 edge_iterator ei;
634
635 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
636 return true;
637 if (src->next_bb != target)
638 return false;
639
640 /* ??? Later we may add code to move jump tables offline. */
641 if (tablejump_p (insn, NULL, NULL))
642 return false;
643
644 FOR_EACH_EDGE (e, ei, src->succs)
645 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
646 && e->flags & EDGE_FALLTHRU)
647 return false;
648
649 insn2 = BB_HEAD (target);
650 if (!active_insn_p (insn2))
651 insn2 = next_active_insn (insn2);
652
653 return next_active_insn (insn) == insn2;
654 }
655
656 /* Return nonzero if we could reach target from src by falling through,
657 if the target was made adjacent. If we already have a fall-through
658 edge to the exit block, we can't do that. */
659 static bool
660 could_fall_through (basic_block src, basic_block target)
661 {
662 edge e;
663 edge_iterator ei;
664
665 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
666 return true;
667 FOR_EACH_EDGE (e, ei, src->succs)
668 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
669 && e->flags & EDGE_FALLTHRU)
670 return 0;
671 return true;
672 }
673 \f
674 /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
675 rtx_note *
676 bb_note (basic_block bb)
677 {
678 rtx_insn *note;
679
680 note = BB_HEAD (bb);
681 if (LABEL_P (note))
682 note = NEXT_INSN (note);
683
684 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
685 return as_a <rtx_note *> (note);
686 }
687
688 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
689 note associated with the BLOCK. */
690
691 static rtx_insn *
692 first_insn_after_basic_block_note (basic_block block)
693 {
694 rtx_insn *insn;
695
696 /* Get the first instruction in the block. */
697 insn = BB_HEAD (block);
698
699 if (insn == NULL_RTX)
700 return NULL;
701 if (LABEL_P (insn))
702 insn = NEXT_INSN (insn);
703 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
704
705 return NEXT_INSN (insn);
706 }
707
708 /* Creates a new basic block just after basic block BB by splitting
709 everything after specified instruction INSNP. */
710
711 static basic_block
712 rtl_split_block (basic_block bb, void *insnp)
713 {
714 basic_block new_bb;
715 rtx_insn *insn = (rtx_insn *) insnp;
716 edge e;
717 edge_iterator ei;
718
719 if (!insn)
720 {
721 insn = first_insn_after_basic_block_note (bb);
722
723 if (insn)
724 {
725 rtx_insn *next = insn;
726
727 insn = PREV_INSN (insn);
728
729 /* If the block contains only debug insns, insn would have
730 been NULL in a non-debug compilation, and then we'd end
731 up emitting a DELETED note. For -fcompare-debug
732 stability, emit the note too. */
733 if (insn != BB_END (bb)
734 && DEBUG_INSN_P (next)
735 && DEBUG_INSN_P (BB_END (bb)))
736 {
737 while (next != BB_END (bb) && DEBUG_INSN_P (next))
738 next = NEXT_INSN (next);
739
740 if (next == BB_END (bb))
741 emit_note_after (NOTE_INSN_DELETED, next);
742 }
743 }
744 else
745 insn = get_last_insn ();
746 }
747
748 /* We probably should check type of the insn so that we do not create
749 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
750 bother. */
751 if (insn == BB_END (bb))
752 emit_note_after (NOTE_INSN_DELETED, insn);
753
754 /* Create the new basic block. */
755 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
756 BB_COPY_PARTITION (new_bb, bb);
757 BB_END (bb) = insn;
758
759 /* Redirect the outgoing edges. */
760 new_bb->succs = bb->succs;
761 bb->succs = NULL;
762 FOR_EACH_EDGE (e, ei, new_bb->succs)
763 e->src = new_bb;
764
765 /* The new block starts off being dirty. */
766 df_set_bb_dirty (bb);
767 return new_bb;
768 }
769
770 /* Return true if the single edge between blocks A and B is the only place
771 in RTL which holds some unique locus. */
772
773 static bool
774 unique_locus_on_edge_between_p (basic_block a, basic_block b)
775 {
776 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
777 rtx_insn *insn, *end;
778
779 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
780 return false;
781
782 /* First scan block A backward. */
783 insn = BB_END (a);
784 end = PREV_INSN (BB_HEAD (a));
785 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
786 insn = PREV_INSN (insn);
787
788 if (insn != end && INSN_LOCATION (insn) == goto_locus)
789 return false;
790
791 /* Then scan block B forward. */
792 insn = BB_HEAD (b);
793 if (insn)
794 {
795 end = NEXT_INSN (BB_END (b));
796 while (insn != end && !NONDEBUG_INSN_P (insn))
797 insn = NEXT_INSN (insn);
798
799 if (insn != end && INSN_HAS_LOCATION (insn)
800 && INSN_LOCATION (insn) == goto_locus)
801 return false;
802 }
803
804 return true;
805 }
806
807 /* If the single edge between blocks A and B is the only place in RTL which
808 holds some unique locus, emit a nop with that locus between the blocks. */
809
810 static void
811 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
812 {
813 if (!unique_locus_on_edge_between_p (a, b))
814 return;
815
816 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
817 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
818 }
819
820 /* Blocks A and B are to be merged into a single block A. The insns
821 are already contiguous. */
822
823 static void
824 rtl_merge_blocks (basic_block a, basic_block b)
825 {
826 /* If B is a forwarder block whose outgoing edge has no location, we'll
827 propagate the locus of the edge between A and B onto it. */
828 const bool forward_edge_locus
829 = (b->flags & BB_FORWARDER_BLOCK) != 0
830 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
831 rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
832 rtx_insn *del_first = NULL, *del_last = NULL;
833 rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
834 int b_empty = 0;
835
836 if (dump_file)
837 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
838 a->index);
839
840 while (DEBUG_INSN_P (b_end))
841 b_end = PREV_INSN (b_debug_start = b_end);
842
843 /* If there was a CODE_LABEL beginning B, delete it. */
844 if (LABEL_P (b_head))
845 {
846 /* Detect basic blocks with nothing but a label. This can happen
847 in particular at the end of a function. */
848 if (b_head == b_end)
849 b_empty = 1;
850
851 del_first = del_last = b_head;
852 b_head = NEXT_INSN (b_head);
853 }
854
855 /* Delete the basic block note and handle blocks containing just that
856 note. */
857 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
858 {
859 if (b_head == b_end)
860 b_empty = 1;
861 if (! del_last)
862 del_first = b_head;
863
864 del_last = b_head;
865 b_head = NEXT_INSN (b_head);
866 }
867
868 /* If there was a jump out of A, delete it. */
869 if (JUMP_P (a_end))
870 {
871 rtx_insn *prev;
872
873 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
874 if (!NOTE_P (prev)
875 || NOTE_INSN_BASIC_BLOCK_P (prev)
876 || prev == BB_HEAD (a))
877 break;
878
879 del_first = a_end;
880
881 /* If this was a conditional jump, we need to also delete
882 the insn that set cc0. */
883 if (HAVE_cc0 && only_sets_cc0_p (prev))
884 {
885 rtx_insn *tmp = prev;
886
887 prev = prev_nonnote_insn (prev);
888 if (!prev)
889 prev = BB_HEAD (a);
890 del_first = tmp;
891 }
892
893 a_end = PREV_INSN (del_first);
894 }
895 else if (BARRIER_P (NEXT_INSN (a_end)))
896 del_first = NEXT_INSN (a_end);
897
898 /* Delete everything marked above as well as crap that might be
899 hanging out between the two blocks. */
900 BB_END (a) = a_end;
901 BB_HEAD (b) = b_empty ? NULL : b_head;
902 delete_insn_chain (del_first, del_last, true);
903
904 /* If not optimizing, preserve the locus of the single edge between
905 blocks A and B if necessary by emitting a nop. */
906 if (!optimize
907 && !forward_edge_locus
908 && !DECL_IGNORED_P (current_function_decl))
909 {
910 emit_nop_for_unique_locus_between (a, b);
911 a_end = BB_END (a);
912 }
913
914 /* Reassociate the insns of B with A. */
915 if (!b_empty)
916 {
917 update_bb_for_insn_chain (a_end, b_debug_end, a);
918
919 BB_END (a) = b_debug_end;
920 BB_HEAD (b) = NULL;
921 }
922 else if (b_end != b_debug_end)
923 {
924 /* Move any deleted labels and other notes between the end of A
925 and the debug insns that make up B after the debug insns,
926 bringing the debug insns into A while keeping the notes after
927 the end of A. */
928 if (NEXT_INSN (a_end) != b_debug_start)
929 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
930 b_debug_end);
931 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
932 BB_END (a) = b_debug_end;
933 }
934
935 df_bb_delete (b->index);
936
937 if (forward_edge_locus)
938 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
939
940 if (dump_file)
941 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
942 }
943
944
945 /* Return true when block A and B can be merged. */
946
947 static bool
948 rtl_can_merge_blocks (basic_block a, basic_block b)
949 {
950 /* If we are partitioning hot/cold basic blocks, we don't want to
951 mess up unconditional or indirect jumps that cross between hot
952 and cold sections.
953
954 Basic block partitioning may result in some jumps that appear to
955 be optimizable (or blocks that appear to be mergeable), but which really
956 must be left untouched (they are required to make it safely across
957 partition boundaries). See the comments at the top of
958 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
959
960 if (BB_PARTITION (a) != BB_PARTITION (b))
961 return false;
962
963 /* Protect the loop latches. */
964 if (current_loops && b->loop_father->latch == b)
965 return false;
966
967 /* There must be exactly one edge in between the blocks. */
968 return (single_succ_p (a)
969 && single_succ (a) == b
970 && single_pred_p (b)
971 && a != b
972 /* Must be simple edge. */
973 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
974 && a->next_bb == b
975 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
976 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
977 /* If the jump insn has side effects,
978 we can't kill the edge. */
979 && (!JUMP_P (BB_END (a))
980 || (reload_completed
981 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
982 }
983 \f
984 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
985 exist. */
986
987 rtx_code_label *
988 block_label (basic_block block)
989 {
990 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
991 return NULL;
992
993 if (!LABEL_P (BB_HEAD (block)))
994 {
995 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
996 }
997
998 return as_a <rtx_code_label *> (BB_HEAD (block));
999 }
1000
1001 /* Remove all barriers from BB_FOOTER of a BB. */
1002
1003 static void
1004 remove_barriers_from_footer (basic_block bb)
1005 {
1006 rtx_insn *insn = BB_FOOTER (bb);
1007
1008 /* Remove barriers but keep jumptables. */
1009 while (insn)
1010 {
1011 if (BARRIER_P (insn))
1012 {
1013 if (PREV_INSN (insn))
1014 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1015 else
1016 BB_FOOTER (bb) = NEXT_INSN (insn);
1017 if (NEXT_INSN (insn))
1018 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1019 }
1020 if (LABEL_P (insn))
1021 return;
1022 insn = NEXT_INSN (insn);
1023 }
1024 }
1025
1026 /* Attempt to perform edge redirection by replacing possibly complex jump
1027 instruction by unconditional jump or removing jump completely. This can
1028 apply only if all edges now point to the same block. The parameters and
1029 return values are equivalent to redirect_edge_and_branch. */
1030
1031 edge
1032 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
1033 {
1034 basic_block src = e->src;
1035 rtx_insn *insn = BB_END (src), *kill_from;
1036 rtx set;
1037 int fallthru = 0;
1038
1039 /* If we are partitioning hot/cold basic blocks, we don't want to
1040 mess up unconditional or indirect jumps that cross between hot
1041 and cold sections.
1042
1043 Basic block partitioning may result in some jumps that appear to
1044 be optimizable (or blocks that appear to be mergeable), but which really
1045 must be left untouched (they are required to make it safely across
1046 partition boundaries). See the comments at the top of
1047 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
1048
1049 if (BB_PARTITION (src) != BB_PARTITION (target))
1050 return NULL;
1051
1052 /* We can replace or remove a complex jump only when we have exactly
1053 two edges. Also, if we have exactly one outgoing edge, we can
1054 redirect that. */
1055 if (EDGE_COUNT (src->succs) >= 3
1056 /* Verify that all targets will be TARGET. Specifically, the
1057 edge that is not E must also go to TARGET. */
1058 || (EDGE_COUNT (src->succs) == 2
1059 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1060 return NULL;
1061
1062 if (!onlyjump_p (insn))
1063 return NULL;
1064 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1065 return NULL;
1066
1067 /* Avoid removing branch with side effects. */
1068 set = single_set (insn);
1069 if (!set || side_effects_p (set))
1070 return NULL;
1071
1072 /* In case we zap a conditional jump, we'll need to kill
1073 the cc0 setter too. */
1074 kill_from = insn;
1075 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn))
1076 && only_sets_cc0_p (PREV_INSN (insn)))
1077 kill_from = PREV_INSN (insn);
1078
1079 /* See if we can create the fallthru edge. */
1080 if (in_cfglayout || can_fallthru (src, target))
1081 {
1082 if (dump_file)
1083 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1084 fallthru = 1;
1085
1086 /* Selectively unlink whole insn chain. */
1087 if (in_cfglayout)
1088 {
1089 delete_insn_chain (kill_from, BB_END (src), false);
1090 remove_barriers_from_footer (src);
1091 }
1092 else
1093 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1094 false);
1095 }
1096
1097 /* If this already is simplejump, redirect it. */
1098 else if (simplejump_p (insn))
1099 {
1100 if (e->dest == target)
1101 return NULL;
1102 if (dump_file)
1103 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1104 INSN_UID (insn), e->dest->index, target->index);
1105 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1106 block_label (target), 0))
1107 {
1108 gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
1109 return NULL;
1110 }
1111 }
1112
1113 /* Cannot do anything for target exit block. */
1114 else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1115 return NULL;
1116
1117 /* Or replace possibly complicated jump insn by simple jump insn. */
1118 else
1119 {
1120 rtx_code_label *target_label = block_label (target);
1121 rtx_insn *barrier;
1122 rtx_insn *label;
1123 rtx_jump_table_data *table;
1124
1125 emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
1126 JUMP_LABEL (BB_END (src)) = target_label;
1127 LABEL_NUSES (target_label)++;
1128 if (dump_file)
1129 fprintf (dump_file, "Replacing insn %i by jump %i\n",
1130 INSN_UID (insn), INSN_UID (BB_END (src)));
1131
1132
1133 delete_insn_chain (kill_from, insn, false);
1134
1135 /* Recognize a tablejump that we are converting to a
1136 simple jump and remove its associated CODE_LABEL
1137 and ADDR_VEC or ADDR_DIFF_VEC. */
1138 if (tablejump_p (insn, &label, &table))
1139 delete_insn_chain (label, table, false);
1140
1141 barrier = next_nonnote_nondebug_insn (BB_END (src));
1142 if (!barrier || !BARRIER_P (barrier))
1143 emit_barrier_after (BB_END (src));
1144 else
1145 {
1146 if (barrier != NEXT_INSN (BB_END (src)))
1147 {
1148 /* Move the jump before barrier so that the notes
1149 which originally were or were created before jump table are
1150 inside the basic block. */
1151 rtx_insn *new_insn = BB_END (src);
1152
1153 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1154 PREV_INSN (barrier), src);
1155
1156 SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1157 SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1158
1159 SET_NEXT_INSN (new_insn) = barrier;
1160 SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1161
1162 SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1163 SET_PREV_INSN (barrier) = new_insn;
1164 }
1165 }
1166 }
1167
1168 /* Keep only one edge out and set proper flags. */
1169 if (!single_succ_p (src))
1170 remove_edge (e);
1171 gcc_assert (single_succ_p (src));
1172
1173 e = single_succ_edge (src);
1174 if (fallthru)
1175 e->flags = EDGE_FALLTHRU;
1176 else
1177 e->flags = 0;
1178
1179 e->probability = profile_probability::always ();
1180
1181 if (e->dest != target)
1182 redirect_edge_succ (e, target);
1183 return e;
1184 }
1185
1186 /* Subroutine of redirect_branch_edge that tries to patch the jump
1187 instruction INSN so that it reaches block NEW. Do this
1188 only when it originally reached block OLD. Return true if this
1189 worked or the original target wasn't OLD, return false if redirection
1190 doesn't work. */
1191
1192 static bool
1193 patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
1194 {
1195 rtx_jump_table_data *table;
1196 rtx tmp;
1197 /* Recognize a tablejump and adjust all matching cases. */
1198 if (tablejump_p (insn, NULL, &table))
1199 {
1200 rtvec vec;
1201 int j;
1202 rtx_code_label *new_label = block_label (new_bb);
1203
1204 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1205 return false;
1206 vec = table->get_labels ();
1207
1208 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1209 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1210 {
1211 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1212 --LABEL_NUSES (old_label);
1213 ++LABEL_NUSES (new_label);
1214 }
1215
1216 /* Handle casesi dispatch insns. */
1217 if ((tmp = tablejump_casesi_pattern (insn)) != NULL_RTX
1218 && label_ref_label (XEXP (SET_SRC (tmp), 2)) == old_label)
1219 {
1220 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1221 new_label);
1222 --LABEL_NUSES (old_label);
1223 ++LABEL_NUSES (new_label);
1224 }
1225 }
1226 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1227 {
1228 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1229 rtx note;
1230
1231 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1232 return false;
1233 rtx_code_label *new_label = block_label (new_bb);
1234
1235 for (i = 0; i < n; ++i)
1236 {
1237 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1238 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1239 if (XEXP (old_ref, 0) == old_label)
1240 {
1241 ASM_OPERANDS_LABEL (tmp, i)
1242 = gen_rtx_LABEL_REF (Pmode, new_label);
1243 --LABEL_NUSES (old_label);
1244 ++LABEL_NUSES (new_label);
1245 }
1246 }
1247
1248 if (JUMP_LABEL (insn) == old_label)
1249 {
1250 JUMP_LABEL (insn) = new_label;
1251 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1252 if (note)
1253 remove_note (insn, note);
1254 }
1255 else
1256 {
1257 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1258 if (note)
1259 remove_note (insn, note);
1260 if (JUMP_LABEL (insn) != new_label
1261 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1262 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1263 }
1264 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1265 != NULL_RTX)
1266 XEXP (note, 0) = new_label;
1267 }
1268 else
1269 {
1270 /* ?? We may play the games with moving the named labels from
1271 one basic block to the other in case only one computed_jump is
1272 available. */
1273 if (computed_jump_p (insn)
1274 /* A return instruction can't be redirected. */
1275 || returnjump_p (insn))
1276 return false;
1277
1278 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1279 {
1280 /* If the insn doesn't go where we think, we're confused. */
1281 gcc_assert (JUMP_LABEL (insn) == old_label);
1282
1283 /* If the substitution doesn't succeed, die. This can happen
1284 if the back end emitted unrecognizable instructions or if
1285 target is exit block on some arches. Or for crossing
1286 jumps. */
1287 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1288 block_label (new_bb), 0))
1289 {
1290 gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1291 || CROSSING_JUMP_P (insn));
1292 return false;
1293 }
1294 }
1295 }
1296 return true;
1297 }
1298
1299
1300 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1301 NULL on failure */
1302 static edge
1303 redirect_branch_edge (edge e, basic_block target)
1304 {
1305 rtx_insn *old_label = BB_HEAD (e->dest);
1306 basic_block src = e->src;
1307 rtx_insn *insn = BB_END (src);
1308
1309 /* We can only redirect non-fallthru edges of jump insn. */
1310 if (e->flags & EDGE_FALLTHRU)
1311 return NULL;
1312 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1313 return NULL;
1314
1315 if (!currently_expanding_to_rtl)
1316 {
1317 if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
1318 return NULL;
1319 }
1320 else
1321 /* When expanding this BB might actually contain multiple
1322 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1323 Redirect all of those that match our label. */
1324 FOR_BB_INSNS (src, insn)
1325 if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
1326 old_label, target))
1327 return NULL;
1328
1329 if (dump_file)
1330 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1331 e->src->index, e->dest->index, target->index);
1332
1333 if (e->dest != target)
1334 e = redirect_edge_succ_nodup (e, target);
1335
1336 return e;
1337 }
1338
1339 /* Called when edge E has been redirected to a new destination,
1340 in order to update the region crossing flag on the edge and
1341 jump. */
1342
1343 static void
1344 fixup_partition_crossing (edge e)
1345 {
1346 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1347 == EXIT_BLOCK_PTR_FOR_FN (cfun))
1348 return;
1349 /* If we redirected an existing edge, it may already be marked
1350 crossing, even though the new src is missing a reg crossing note.
1351 But make sure reg crossing note doesn't already exist before
1352 inserting. */
1353 if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1354 {
1355 e->flags |= EDGE_CROSSING;
1356 if (JUMP_P (BB_END (e->src)))
1357 CROSSING_JUMP_P (BB_END (e->src)) = 1;
1358 }
1359 else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1360 {
1361 e->flags &= ~EDGE_CROSSING;
1362 /* Remove the section crossing note from jump at end of
1363 src if it exists, and if no other successors are
1364 still crossing. */
1365 if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
1366 {
1367 bool has_crossing_succ = false;
1368 edge e2;
1369 edge_iterator ei;
1370 FOR_EACH_EDGE (e2, ei, e->src->succs)
1371 {
1372 has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1373 if (has_crossing_succ)
1374 break;
1375 }
1376 if (!has_crossing_succ)
1377 CROSSING_JUMP_P (BB_END (e->src)) = 0;
1378 }
1379 }
1380 }
1381
1382 /* Called when block BB has been reassigned to the cold partition,
1383 because it is now dominated by another cold block,
1384 to ensure that the region crossing attributes are updated. */
1385
1386 static void
1387 fixup_new_cold_bb (basic_block bb)
1388 {
1389 edge e;
1390 edge_iterator ei;
1391
1392 /* This is called when a hot bb is found to now be dominated
1393 by a cold bb and therefore needs to become cold. Therefore,
1394 its preds will no longer be region crossing. Any non-dominating
1395 preds that were previously hot would also have become cold
1396 in the caller for the same region. Any preds that were previously
1397 region-crossing will be adjusted in fixup_partition_crossing. */
1398 FOR_EACH_EDGE (e, ei, bb->preds)
1399 {
1400 fixup_partition_crossing (e);
1401 }
1402
1403 /* Possibly need to make bb's successor edges region crossing,
1404 or remove stale region crossing. */
1405 FOR_EACH_EDGE (e, ei, bb->succs)
1406 {
1407 /* We can't have fall-through edges across partition boundaries.
1408 Note that force_nonfallthru will do any necessary partition
1409 boundary fixup by calling fixup_partition_crossing itself. */
1410 if ((e->flags & EDGE_FALLTHRU)
1411 && BB_PARTITION (bb) != BB_PARTITION (e->dest)
1412 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1413 force_nonfallthru (e);
1414 else
1415 fixup_partition_crossing (e);
1416 }
1417 }
1418
1419 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1420 expense of adding new instructions or reordering basic blocks.
1421
1422 Function can be also called with edge destination equivalent to the TARGET.
1423 Then it should try the simplifications and do nothing if none is possible.
1424
1425 Return edge representing the branch if transformation succeeded. Return NULL
1426 on failure.
1427 We still return NULL in case E already destinated TARGET and we didn't
1428 managed to simplify instruction stream. */
1429
1430 static edge
1431 rtl_redirect_edge_and_branch (edge e, basic_block target)
1432 {
1433 edge ret;
1434 basic_block src = e->src;
1435 basic_block dest = e->dest;
1436
1437 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1438 return NULL;
1439
1440 if (dest == target)
1441 return e;
1442
1443 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1444 {
1445 df_set_bb_dirty (src);
1446 fixup_partition_crossing (ret);
1447 return ret;
1448 }
1449
1450 ret = redirect_branch_edge (e, target);
1451 if (!ret)
1452 return NULL;
1453
1454 df_set_bb_dirty (src);
1455 fixup_partition_crossing (ret);
1456 return ret;
1457 }
1458
1459 /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1460
1461 void
1462 emit_barrier_after_bb (basic_block bb)
1463 {
1464 rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
1465 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
1466 || current_ir_type () == IR_RTL_CFGLAYOUT);
1467 if (current_ir_type () == IR_RTL_CFGLAYOUT)
1468 {
1469 rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1470
1471 if (BB_FOOTER (bb))
1472 {
1473 rtx_insn *footer_tail = BB_FOOTER (bb);
1474
1475 while (NEXT_INSN (footer_tail))
1476 footer_tail = NEXT_INSN (footer_tail);
1477 if (!BARRIER_P (footer_tail))
1478 {
1479 SET_NEXT_INSN (footer_tail) = insn;
1480 SET_PREV_INSN (insn) = footer_tail;
1481 }
1482 }
1483 else
1484 BB_FOOTER (bb) = insn;
1485 }
1486 }
1487
1488 /* Like force_nonfallthru below, but additionally performs redirection
1489 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1490 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1491 simple_return_rtx, indicating which kind of returnjump to create.
1492 It should be NULL otherwise. */
1493
1494 basic_block
1495 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1496 {
1497 basic_block jump_block, new_bb = NULL, src = e->src;
1498 rtx note;
1499 edge new_edge;
1500 int abnormal_edge_flags = 0;
1501 bool asm_goto_edge = false;
1502 int loc;
1503
1504 /* In the case the last instruction is conditional jump to the next
1505 instruction, first redirect the jump itself and then continue
1506 by creating a basic block afterwards to redirect fallthru edge. */
1507 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1508 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1509 && any_condjump_p (BB_END (e->src))
1510 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1511 {
1512 rtx note;
1513 edge b = unchecked_make_edge (e->src, target, 0);
1514 bool redirected;
1515
1516 redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
1517 block_label (target), 0);
1518 gcc_assert (redirected);
1519
1520 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1521 if (note)
1522 {
1523 int prob = XINT (note, 0);
1524
1525 b->probability = profile_probability::from_reg_br_prob_note (prob);
1526 e->probability -= e->probability;
1527 }
1528 }
1529
1530 if (e->flags & EDGE_ABNORMAL)
1531 {
1532 /* Irritating special case - fallthru edge to the same block as abnormal
1533 edge.
1534 We can't redirect abnormal edge, but we still can split the fallthru
1535 one and create separate abnormal edge to original destination.
1536 This allows bb-reorder to make such edge non-fallthru. */
1537 gcc_assert (e->dest == target);
1538 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1539 e->flags &= EDGE_FALLTHRU;
1540 }
1541 else
1542 {
1543 gcc_assert (e->flags & EDGE_FALLTHRU);
1544 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1545 {
1546 /* We can't redirect the entry block. Create an empty block
1547 at the start of the function which we use to add the new
1548 jump. */
1549 edge tmp;
1550 edge_iterator ei;
1551 bool found = false;
1552
1553 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1554 ENTRY_BLOCK_PTR_FOR_FN (cfun));
1555 bb->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
1556
1557 /* Make sure new block ends up in correct hot/cold section. */
1558 BB_COPY_PARTITION (bb, e->dest);
1559
1560 /* Change the existing edge's source to be the new block, and add
1561 a new edge from the entry block to the new block. */
1562 e->src = bb;
1563 for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1564 (tmp = ei_safe_edge (ei)); )
1565 {
1566 if (tmp == e)
1567 {
1568 ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
1569 found = true;
1570 break;
1571 }
1572 else
1573 ei_next (&ei);
1574 }
1575
1576 gcc_assert (found);
1577
1578 vec_safe_push (bb->succs, e);
1579 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1580 EDGE_FALLTHRU);
1581 }
1582 }
1583
1584 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1585 don't point to the target or fallthru label. */
1586 if (JUMP_P (BB_END (e->src))
1587 && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
1588 && (e->flags & EDGE_FALLTHRU)
1589 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1590 {
1591 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1592 bool adjust_jump_target = false;
1593
1594 for (i = 0; i < n; ++i)
1595 {
1596 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1597 {
1598 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1599 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1600 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1601 adjust_jump_target = true;
1602 }
1603 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1604 asm_goto_edge = true;
1605 }
1606 if (adjust_jump_target)
1607 {
1608 rtx_insn *insn = BB_END (e->src);
1609 rtx note;
1610 rtx_insn *old_label = BB_HEAD (e->dest);
1611 rtx_insn *new_label = BB_HEAD (target);
1612
1613 if (JUMP_LABEL (insn) == old_label)
1614 {
1615 JUMP_LABEL (insn) = new_label;
1616 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1617 if (note)
1618 remove_note (insn, note);
1619 }
1620 else
1621 {
1622 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1623 if (note)
1624 remove_note (insn, note);
1625 if (JUMP_LABEL (insn) != new_label
1626 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1627 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1628 }
1629 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1630 != NULL_RTX)
1631 XEXP (note, 0) = new_label;
1632 }
1633 }
1634
1635 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1636 {
1637 rtx_insn *new_head;
1638 profile_count count = e->count ();
1639 profile_probability probability = e->probability;
1640 /* Create the new structures. */
1641
1642 /* If the old block ended with a tablejump, skip its table
1643 by searching forward from there. Otherwise start searching
1644 forward from the last instruction of the old block. */
1645 rtx_jump_table_data *table;
1646 if (tablejump_p (BB_END (e->src), NULL, &table))
1647 new_head = table;
1648 else
1649 new_head = BB_END (e->src);
1650 new_head = NEXT_INSN (new_head);
1651
1652 jump_block = create_basic_block (new_head, NULL, e->src);
1653 jump_block->count = count;
1654
1655 /* Make sure new block ends up in correct hot/cold section. */
1656
1657 BB_COPY_PARTITION (jump_block, e->src);
1658
1659 /* Wire edge in. */
1660 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1661 new_edge->probability = probability;
1662
1663 /* Redirect old edge. */
1664 redirect_edge_pred (e, jump_block);
1665 e->probability = profile_probability::always ();
1666
1667 /* If e->src was previously region crossing, it no longer is
1668 and the reg crossing note should be removed. */
1669 fixup_partition_crossing (new_edge);
1670
1671 /* If asm goto has any label refs to target's label,
1672 add also edge from asm goto bb to target. */
1673 if (asm_goto_edge)
1674 {
1675 new_edge->probability = new_edge->probability.apply_scale (1, 2);
1676 jump_block->count = jump_block->count.apply_scale (1, 2);
1677 edge new_edge2 = make_edge (new_edge->src, target,
1678 e->flags & ~EDGE_FALLTHRU);
1679 new_edge2->probability = probability - new_edge->probability;
1680 }
1681
1682 new_bb = jump_block;
1683 }
1684 else
1685 jump_block = e->src;
1686
1687 loc = e->goto_locus;
1688 e->flags &= ~EDGE_FALLTHRU;
1689 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1690 {
1691 if (jump_label == ret_rtx)
1692 emit_jump_insn_after_setloc (targetm.gen_return (),
1693 BB_END (jump_block), loc);
1694 else
1695 {
1696 gcc_assert (jump_label == simple_return_rtx);
1697 emit_jump_insn_after_setloc (targetm.gen_simple_return (),
1698 BB_END (jump_block), loc);
1699 }
1700 set_return_jump_label (BB_END (jump_block));
1701 }
1702 else
1703 {
1704 rtx_code_label *label = block_label (target);
1705 emit_jump_insn_after_setloc (targetm.gen_jump (label),
1706 BB_END (jump_block), loc);
1707 JUMP_LABEL (BB_END (jump_block)) = label;
1708 LABEL_NUSES (label)++;
1709 }
1710
1711 /* We might be in cfg layout mode, and if so, the following routine will
1712 insert the barrier correctly. */
1713 emit_barrier_after_bb (jump_block);
1714 redirect_edge_succ_nodup (e, target);
1715
1716 if (abnormal_edge_flags)
1717 make_edge (src, target, abnormal_edge_flags);
1718
1719 df_mark_solutions_dirty ();
1720 fixup_partition_crossing (e);
1721 return new_bb;
1722 }
1723
1724 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1725 (and possibly create new basic block) to make edge non-fallthru.
1726 Return newly created BB or NULL if none. */
1727
1728 static basic_block
1729 rtl_force_nonfallthru (edge e)
1730 {
1731 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1732 }
1733
1734 /* Redirect edge even at the expense of creating new jump insn or
1735 basic block. Return new basic block if created, NULL otherwise.
1736 Conversion must be possible. */
1737
1738 static basic_block
1739 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1740 {
1741 if (redirect_edge_and_branch (e, target)
1742 || e->dest == target)
1743 return NULL;
1744
1745 /* In case the edge redirection failed, try to force it to be non-fallthru
1746 and redirect newly created simplejump. */
1747 df_set_bb_dirty (e->src);
1748 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1749 }
1750
1751 /* The given edge should potentially be a fallthru edge. If that is in
1752 fact true, delete the jump and barriers that are in the way. */
1753
1754 static void
1755 rtl_tidy_fallthru_edge (edge e)
1756 {
1757 rtx_insn *q;
1758 basic_block b = e->src, c = b->next_bb;
1759
1760 /* ??? In a late-running flow pass, other folks may have deleted basic
1761 blocks by nopping out blocks, leaving multiple BARRIERs between here
1762 and the target label. They ought to be chastised and fixed.
1763
1764 We can also wind up with a sequence of undeletable labels between
1765 one block and the next.
1766
1767 So search through a sequence of barriers, labels, and notes for
1768 the head of block C and assert that we really do fall through. */
1769
1770 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1771 if (NONDEBUG_INSN_P (q))
1772 return;
1773
1774 /* Remove what will soon cease being the jump insn from the source block.
1775 If block B consisted only of this single jump, turn it into a deleted
1776 note. */
1777 q = BB_END (b);
1778 if (JUMP_P (q)
1779 && onlyjump_p (q)
1780 && (any_uncondjump_p (q)
1781 || single_succ_p (b)))
1782 {
1783 rtx_insn *label;
1784 rtx_jump_table_data *table;
1785
1786 if (tablejump_p (q, &label, &table))
1787 {
1788 /* The label is likely mentioned in some instruction before
1789 the tablejump and might not be DCEd, so turn it into
1790 a note instead and move before the tablejump that is going to
1791 be deleted. */
1792 const char *name = LABEL_NAME (label);
1793 PUT_CODE (label, NOTE);
1794 NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1795 NOTE_DELETED_LABEL_NAME (label) = name;
1796 reorder_insns (label, label, PREV_INSN (q));
1797 delete_insn (table);
1798 }
1799
1800 /* If this was a conditional jump, we need to also delete
1801 the insn that set cc0. */
1802 if (HAVE_cc0 && any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1803 q = PREV_INSN (q);
1804
1805 q = PREV_INSN (q);
1806 }
1807 /* Unconditional jumps with side-effects (i.e. which we can't just delete
1808 together with the barrier) should never have a fallthru edge. */
1809 else if (JUMP_P (q) && any_uncondjump_p (q))
1810 return;
1811
1812 /* Selectively unlink the sequence. */
1813 if (q != PREV_INSN (BB_HEAD (c)))
1814 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1815
1816 e->flags |= EDGE_FALLTHRU;
1817 }
1818 \f
1819 /* Should move basic block BB after basic block AFTER. NIY. */
1820
1821 static bool
1822 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1823 basic_block after ATTRIBUTE_UNUSED)
1824 {
1825 return false;
1826 }
1827
1828 /* Locate the last bb in the same partition as START_BB. */
1829
1830 static basic_block
1831 last_bb_in_partition (basic_block start_bb)
1832 {
1833 basic_block bb;
1834 FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1835 {
1836 if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1837 return bb;
1838 }
1839 /* Return bb before the exit block. */
1840 return bb->prev_bb;
1841 }
1842
1843 /* Split a (typically critical) edge. Return the new block.
1844 The edge must not be abnormal.
1845
1846 ??? The code generally expects to be called on critical edges.
1847 The case of a block ending in an unconditional jump to a
1848 block with multiple predecessors is not handled optimally. */
1849
1850 static basic_block
1851 rtl_split_edge (edge edge_in)
1852 {
1853 basic_block bb, new_bb;
1854 rtx_insn *before;
1855
1856 /* Abnormal edges cannot be split. */
1857 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1858
1859 /* We are going to place the new block in front of edge destination.
1860 Avoid existence of fallthru predecessors. */
1861 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1862 {
1863 edge e = find_fallthru_edge (edge_in->dest->preds);
1864
1865 if (e)
1866 force_nonfallthru (e);
1867 }
1868
1869 /* Create the basic block note. */
1870 if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1871 before = BB_HEAD (edge_in->dest);
1872 else
1873 before = NULL;
1874
1875 /* If this is a fall through edge to the exit block, the blocks might be
1876 not adjacent, and the right place is after the source. */
1877 if ((edge_in->flags & EDGE_FALLTHRU)
1878 && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1879 {
1880 before = NEXT_INSN (BB_END (edge_in->src));
1881 bb = create_basic_block (before, NULL, edge_in->src);
1882 BB_COPY_PARTITION (bb, edge_in->src);
1883 }
1884 else
1885 {
1886 if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1887 {
1888 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1889 BB_COPY_PARTITION (bb, edge_in->dest);
1890 }
1891 else
1892 {
1893 basic_block after = edge_in->dest->prev_bb;
1894 /* If this is post-bb reordering, and the edge crosses a partition
1895 boundary, the new block needs to be inserted in the bb chain
1896 at the end of the src partition (since we put the new bb into
1897 that partition, see below). Otherwise we may end up creating
1898 an extra partition crossing in the chain, which is illegal.
1899 It can't go after the src, because src may have a fall-through
1900 to a different block. */
1901 if (crtl->bb_reorder_complete
1902 && (edge_in->flags & EDGE_CROSSING))
1903 {
1904 after = last_bb_in_partition (edge_in->src);
1905 before = get_last_bb_insn (after);
1906 /* The instruction following the last bb in partition should
1907 be a barrier, since it cannot end in a fall-through. */
1908 gcc_checking_assert (BARRIER_P (before));
1909 before = NEXT_INSN (before);
1910 }
1911 bb = create_basic_block (before, NULL, after);
1912 /* Put the split bb into the src partition, to avoid creating
1913 a situation where a cold bb dominates a hot bb, in the case
1914 where src is cold and dest is hot. The src will dominate
1915 the new bb (whereas it might not have dominated dest). */
1916 BB_COPY_PARTITION (bb, edge_in->src);
1917 }
1918 }
1919
1920 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1921
1922 /* Can't allow a region crossing edge to be fallthrough. */
1923 if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
1924 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1925 {
1926 new_bb = force_nonfallthru (single_succ_edge (bb));
1927 gcc_assert (!new_bb);
1928 }
1929
1930 /* For non-fallthru edges, we must adjust the predecessor's
1931 jump instruction to target our new block. */
1932 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1933 {
1934 edge redirected = redirect_edge_and_branch (edge_in, bb);
1935 gcc_assert (redirected);
1936 }
1937 else
1938 {
1939 if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1940 {
1941 /* For asm goto even splitting of fallthru edge might
1942 need insn patching, as other labels might point to the
1943 old label. */
1944 rtx_insn *last = BB_END (edge_in->src);
1945 if (last
1946 && JUMP_P (last)
1947 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1948 && (extract_asm_operands (PATTERN (last))
1949 || JUMP_LABEL (last) == before)
1950 && patch_jump_insn (last, before, bb))
1951 df_set_bb_dirty (edge_in->src);
1952 }
1953 redirect_edge_succ (edge_in, bb);
1954 }
1955
1956 return bb;
1957 }
1958
1959 /* Queue instructions for insertion on an edge between two basic blocks.
1960 The new instructions and basic blocks (if any) will not appear in the
1961 CFG until commit_edge_insertions is called. */
1962
1963 void
1964 insert_insn_on_edge (rtx pattern, edge e)
1965 {
1966 /* We cannot insert instructions on an abnormal critical edge.
1967 It will be easier to find the culprit if we die now. */
1968 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1969
1970 if (e->insns.r == NULL_RTX)
1971 start_sequence ();
1972 else
1973 push_to_sequence (e->insns.r);
1974
1975 emit_insn (pattern);
1976
1977 e->insns.r = get_insns ();
1978 end_sequence ();
1979 }
1980
1981 /* Update the CFG for the instructions queued on edge E. */
1982
1983 void
1984 commit_one_edge_insertion (edge e)
1985 {
1986 rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
1987 basic_block bb;
1988
1989 /* Pull the insns off the edge now since the edge might go away. */
1990 insns = e->insns.r;
1991 e->insns.r = NULL;
1992
1993 /* Figure out where to put these insns. If the destination has
1994 one predecessor, insert there. Except for the exit block. */
1995 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1996 {
1997 bb = e->dest;
1998
1999 /* Get the location correct wrt a code label, and "nice" wrt
2000 a basic block note, and before everything else. */
2001 tmp = BB_HEAD (bb);
2002 if (LABEL_P (tmp))
2003 tmp = NEXT_INSN (tmp);
2004 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2005 tmp = NEXT_INSN (tmp);
2006 if (tmp == BB_HEAD (bb))
2007 before = tmp;
2008 else if (tmp)
2009 after = PREV_INSN (tmp);
2010 else
2011 after = get_last_insn ();
2012 }
2013
2014 /* If the source has one successor and the edge is not abnormal,
2015 insert there. Except for the entry block.
2016 Don't do this if the predecessor ends in a jump other than
2017 unconditional simple jump. E.g. for asm goto that points all
2018 its labels at the fallthru basic block, we can't insert instructions
2019 before the asm goto, as the asm goto can have various of side effects,
2020 and can't emit instructions after the asm goto, as it must end
2021 the basic block. */
2022 else if ((e->flags & EDGE_ABNORMAL) == 0
2023 && single_succ_p (e->src)
2024 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2025 && (!JUMP_P (BB_END (e->src))
2026 || simplejump_p (BB_END (e->src))))
2027 {
2028 bb = e->src;
2029
2030 /* It is possible to have a non-simple jump here. Consider a target
2031 where some forms of unconditional jumps clobber a register. This
2032 happens on the fr30 for example.
2033
2034 We know this block has a single successor, so we can just emit
2035 the queued insns before the jump. */
2036 if (JUMP_P (BB_END (bb)))
2037 before = BB_END (bb);
2038 else
2039 {
2040 /* We'd better be fallthru, or we've lost track of what's what. */
2041 gcc_assert (e->flags & EDGE_FALLTHRU);
2042
2043 after = BB_END (bb);
2044 }
2045 }
2046
2047 /* Otherwise we must split the edge. */
2048 else
2049 {
2050 bb = split_edge (e);
2051
2052 /* If E crossed a partition boundary, we needed to make bb end in
2053 a region-crossing jump, even though it was originally fallthru. */
2054 if (JUMP_P (BB_END (bb)))
2055 before = BB_END (bb);
2056 else
2057 after = BB_END (bb);
2058 }
2059
2060 /* Now that we've found the spot, do the insertion. */
2061 if (before)
2062 {
2063 emit_insn_before_noloc (insns, before, bb);
2064 last = prev_nonnote_insn (before);
2065 }
2066 else
2067 last = emit_insn_after_noloc (insns, after, bb);
2068
2069 if (returnjump_p (last))
2070 {
2071 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2072 This is not currently a problem because this only happens
2073 for the (single) epilogue, which already has a fallthru edge
2074 to EXIT. */
2075
2076 e = single_succ_edge (bb);
2077 gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
2078 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
2079
2080 e->flags &= ~EDGE_FALLTHRU;
2081 emit_barrier_after (last);
2082
2083 if (before)
2084 delete_insn (before);
2085 }
2086 else
2087 gcc_assert (!JUMP_P (last));
2088 }
2089
2090 /* Update the CFG for all queued instructions. */
2091
2092 void
2093 commit_edge_insertions (void)
2094 {
2095 basic_block bb;
2096
2097 /* Optimization passes that invoke this routine can cause hot blocks
2098 previously reached by both hot and cold blocks to become dominated only
2099 by cold blocks. This will cause the verification below to fail,
2100 and lead to now cold code in the hot section. In some cases this
2101 may only be visible after newly unreachable blocks are deleted,
2102 which will be done by fixup_partitions. */
2103 fixup_partitions ();
2104
2105 if (!currently_expanding_to_rtl)
2106 checking_verify_flow_info ();
2107
2108 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2109 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
2110 {
2111 edge e;
2112 edge_iterator ei;
2113
2114 FOR_EACH_EDGE (e, ei, bb->succs)
2115 if (e->insns.r)
2116 {
2117 if (currently_expanding_to_rtl)
2118 rebuild_jump_labels_chain (e->insns.r);
2119 commit_one_edge_insertion (e);
2120 }
2121 }
2122 }
2123 \f
2124
2125 /* Print out RTL-specific basic block information (live information
2126 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2127 documented in dumpfile.h. */
2128
2129 static void
2130 rtl_dump_bb (FILE *outf, basic_block bb, int indent, dump_flags_t flags)
2131 {
2132 char *s_indent;
2133
2134 s_indent = (char *) alloca ((size_t) indent + 1);
2135 memset (s_indent, ' ', (size_t) indent);
2136 s_indent[indent] = '\0';
2137
2138 if (df && (flags & TDF_DETAILS))
2139 {
2140 df_dump_top (bb, outf);
2141 putc ('\n', outf);
2142 }
2143
2144 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2145 {
2146 rtx_insn *last = BB_END (bb);
2147 if (last)
2148 last = NEXT_INSN (last);
2149 for (rtx_insn *insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
2150 {
2151 if (flags & TDF_DETAILS)
2152 df_dump_insn_top (insn, outf);
2153 if (! (flags & TDF_SLIM))
2154 print_rtl_single (outf, insn);
2155 else
2156 dump_insn_slim (outf, insn);
2157 if (flags & TDF_DETAILS)
2158 df_dump_insn_bottom (insn, outf);
2159 }
2160 }
2161
2162 if (df && (flags & TDF_DETAILS))
2163 {
2164 df_dump_bottom (bb, outf);
2165 putc ('\n', outf);
2166 }
2167
2168 }
2169 \f
2170 /* Like dump_function_to_file, but for RTL. Print out dataflow information
2171 for the start of each basic block. FLAGS are the TDF_* masks documented
2172 in dumpfile.h. */
2173
2174 void
2175 print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, dump_flags_t flags)
2176 {
2177 const rtx_insn *tmp_rtx;
2178 if (rtx_first == 0)
2179 fprintf (outf, "(nil)\n");
2180 else
2181 {
2182 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2183 int max_uid = get_max_uid ();
2184 basic_block *start = XCNEWVEC (basic_block, max_uid);
2185 basic_block *end = XCNEWVEC (basic_block, max_uid);
2186 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
2187 basic_block bb;
2188
2189 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2190 insns, but the CFG is not maintained so the basic block info
2191 is not reliable. Therefore it's omitted from the dumps. */
2192 if (! (cfun->curr_properties & PROP_cfg))
2193 flags &= ~TDF_BLOCKS;
2194
2195 if (df)
2196 df_dump_start (outf);
2197
2198 if (cfun->curr_properties & PROP_cfg)
2199 {
2200 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2201 {
2202 rtx_insn *x;
2203
2204 start[INSN_UID (BB_HEAD (bb))] = bb;
2205 end[INSN_UID (BB_END (bb))] = bb;
2206 if (flags & TDF_BLOCKS)
2207 {
2208 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2209 {
2210 enum bb_state state = IN_MULTIPLE_BB;
2211
2212 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2213 state = IN_ONE_BB;
2214 in_bb_p[INSN_UID (x)] = state;
2215
2216 if (x == BB_END (bb))
2217 break;
2218 }
2219 }
2220 }
2221 }
2222
2223 for (tmp_rtx = rtx_first; tmp_rtx != NULL; tmp_rtx = NEXT_INSN (tmp_rtx))
2224 {
2225 if (flags & TDF_BLOCKS)
2226 {
2227 bb = start[INSN_UID (tmp_rtx)];
2228 if (bb != NULL)
2229 {
2230 dump_bb_info (outf, bb, 0, dump_flags, true, false);
2231 if (df && (flags & TDF_DETAILS))
2232 df_dump_top (bb, outf);
2233 }
2234
2235 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2236 && !NOTE_P (tmp_rtx)
2237 && !BARRIER_P (tmp_rtx))
2238 fprintf (outf, ";; Insn is not within a basic block\n");
2239 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2240 fprintf (outf, ";; Insn is in multiple basic blocks\n");
2241 }
2242
2243 if (flags & TDF_DETAILS)
2244 df_dump_insn_top (tmp_rtx, outf);
2245 if (! (flags & TDF_SLIM))
2246 print_rtl_single (outf, tmp_rtx);
2247 else
2248 dump_insn_slim (outf, tmp_rtx);
2249 if (flags & TDF_DETAILS)
2250 df_dump_insn_bottom (tmp_rtx, outf);
2251
2252 bb = end[INSN_UID (tmp_rtx)];
2253 if (bb != NULL)
2254 {
2255 if (flags & TDF_BLOCKS)
2256 {
2257 dump_bb_info (outf, bb, 0, dump_flags, false, true);
2258 if (df && (flags & TDF_DETAILS))
2259 df_dump_bottom (bb, outf);
2260 putc ('\n', outf);
2261 }
2262 /* Emit a hint if the fallthrough target of current basic block
2263 isn't the one placed right next. */
2264 else if (EDGE_COUNT (bb->succs) > 0)
2265 {
2266 gcc_assert (BB_END (bb) == tmp_rtx);
2267 const rtx_insn *ninsn = NEXT_INSN (tmp_rtx);
2268 /* Bypass intervening deleted-insn notes and debug insns. */
2269 while (ninsn
2270 && !NONDEBUG_INSN_P (ninsn)
2271 && !start[INSN_UID (ninsn)])
2272 ninsn = NEXT_INSN (ninsn);
2273 edge e = find_fallthru_edge (bb->succs);
2274 if (e && ninsn)
2275 {
2276 basic_block dest = e->dest;
2277 if (start[INSN_UID (ninsn)] != dest)
2278 fprintf (outf, "%s ; pc falls through to BB %d\n",
2279 print_rtx_head, dest->index);
2280 }
2281 }
2282 }
2283 }
2284
2285 free (start);
2286 free (end);
2287 free (in_bb_p);
2288 }
2289 }
2290 \f
2291 /* Update the branch probability of BB if a REG_BR_PROB is present. */
2292
2293 void
2294 update_br_prob_note (basic_block bb)
2295 {
2296 rtx note;
2297 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2298 if (!JUMP_P (BB_END (bb)) || !BRANCH_EDGE (bb)->probability.initialized_p ())
2299 {
2300 if (note)
2301 {
2302 rtx *note_link, this_rtx;
2303
2304 note_link = &REG_NOTES (BB_END (bb));
2305 for (this_rtx = *note_link; this_rtx; this_rtx = XEXP (this_rtx, 1))
2306 if (this_rtx == note)
2307 {
2308 *note_link = XEXP (this_rtx, 1);
2309 break;
2310 }
2311 }
2312 return;
2313 }
2314 if (!note
2315 || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ())
2316 return;
2317 XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ();
2318 }
2319
2320 /* Get the last insn associated with block BB (that includes barriers and
2321 tablejumps after BB). */
2322 rtx_insn *
2323 get_last_bb_insn (basic_block bb)
2324 {
2325 rtx_jump_table_data *table;
2326 rtx_insn *tmp;
2327 rtx_insn *end = BB_END (bb);
2328
2329 /* Include any jump table following the basic block. */
2330 if (tablejump_p (end, NULL, &table))
2331 end = table;
2332
2333 /* Include any barriers that may follow the basic block. */
2334 tmp = next_nonnote_nondebug_insn_bb (end);
2335 while (tmp && BARRIER_P (tmp))
2336 {
2337 end = tmp;
2338 tmp = next_nonnote_nondebug_insn_bb (end);
2339 }
2340
2341 return end;
2342 }
2343
2344 /* Add all BBs reachable from entry via hot paths into the SET. */
2345
2346 void
2347 find_bbs_reachable_by_hot_paths (hash_set<basic_block> *set)
2348 {
2349 auto_vec<basic_block, 64> worklist;
2350
2351 set->add (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2352 worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2353
2354 while (worklist.length () > 0)
2355 {
2356 basic_block bb = worklist.pop ();
2357 edge_iterator ei;
2358 edge e;
2359
2360 FOR_EACH_EDGE (e, ei, bb->succs)
2361 if (BB_PARTITION (e->dest) != BB_COLD_PARTITION
2362 && !set->add (e->dest))
2363 worklist.safe_push (e->dest);
2364 }
2365 }
2366
2367 /* Sanity check partition hotness to ensure that basic blocks in
2368   the cold partition don't dominate basic blocks in the hot partition.
2369 If FLAG_ONLY is true, report violations as errors. Otherwise
2370 re-mark the dominated blocks as cold, since this is run after
2371 cfg optimizations that may make hot blocks previously reached
2372 by both hot and cold blocks now only reachable along cold paths. */
2373
2374 static vec<basic_block>
2375 find_partition_fixes (bool flag_only)
2376 {
2377 basic_block bb;
2378 vec<basic_block> bbs_to_fix = vNULL;
2379 hash_set<basic_block> set;
2380
2381 /* Callers check this. */
2382 gcc_checking_assert (crtl->has_bb_partition);
2383
2384 find_bbs_reachable_by_hot_paths (&set);
2385
2386 FOR_EACH_BB_FN (bb, cfun)
2387 if (!set.contains (bb)
2388 && BB_PARTITION (bb) != BB_COLD_PARTITION)
2389 {
2390 if (flag_only)
2391 error ("non-cold basic block %d reachable only "
2392 "by paths crossing the cold partition", bb->index);
2393 else
2394 BB_SET_PARTITION (bb, BB_COLD_PARTITION);
2395 bbs_to_fix.safe_push (bb);
2396 }
2397
2398 return bbs_to_fix;
2399 }
2400
2401 /* Perform cleanup on the hot/cold bb partitioning after optimization
2402 passes that modify the cfg. */
2403
2404 void
2405 fixup_partitions (void)
2406 {
2407 basic_block bb;
2408
2409 if (!crtl->has_bb_partition)
2410 return;
2411
2412 /* Delete any blocks that became unreachable and weren't
2413 already cleaned up, for example during edge forwarding
2414 and convert_jumps_to_returns. This will expose more
2415 opportunities for fixing the partition boundaries here.
2416 Also, the calculation of the dominance graph during verification
2417 will assert if there are unreachable nodes. */
2418 delete_unreachable_blocks ();
2419
2420 /* If there are partitions, do a sanity check on them: A basic block in
2421   a cold partition cannot dominate a basic block in a hot partition.
2422 Fixup any that now violate this requirement, as a result of edge
2423 forwarding and unreachable block deletion.  */
2424 vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2425
2426 /* Do the partition fixup after all necessary blocks have been converted to
2427 cold, so that we only update the region crossings the minimum number of
2428 places, which can require forcing edges to be non fallthru. */
2429 while (! bbs_to_fix.is_empty ())
2430 {
2431 bb = bbs_to_fix.pop ();
2432 fixup_new_cold_bb (bb);
2433 }
2434 }
2435
2436 /* Verify, in the basic block chain, that there is at most one switch
2437 between hot/cold partitions. This condition will not be true until
2438 after reorder_basic_blocks is called. */
2439
2440 static int
2441 verify_hot_cold_block_grouping (void)
2442 {
2443 basic_block bb;
2444 int err = 0;
2445 bool switched_sections = false;
2446 int current_partition = BB_UNPARTITIONED;
2447
2448 /* Even after bb reordering is complete, we go into cfglayout mode
2449 again (in compgoto). Ensure we don't call this before going back
2450 into linearized RTL when any layout fixes would have been committed. */
2451 if (!crtl->bb_reorder_complete
2452 || current_ir_type () != IR_RTL_CFGRTL)
2453 return err;
2454
2455 FOR_EACH_BB_FN (bb, cfun)
2456 {
2457 if (current_partition != BB_UNPARTITIONED
2458 && BB_PARTITION (bb) != current_partition)
2459 {
2460 if (switched_sections)
2461 {
2462 error ("multiple hot/cold transitions found (bb %i)",
2463 bb->index);
2464 err = 1;
2465 }
2466 else
2467 switched_sections = true;
2468
2469 if (!crtl->has_bb_partition)
2470 error ("partition found but function partition flag not set");
2471 }
2472 current_partition = BB_PARTITION (bb);
2473 }
2474
2475 return err;
2476 }
2477 \f
2478
2479 /* Perform several checks on the edges out of each block, such as
2480 the consistency of the branch probabilities, the correctness
2481 of hot/cold partition crossing edges, and the number of expected
2482 successor edges. Also verify that the dominance relationship
2483 between hot/cold blocks is sane. */
2484
2485 static int
2486 rtl_verify_edges (void)
2487 {
2488 int err = 0;
2489 basic_block bb;
2490
2491 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2492 {
2493 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2494 int n_eh = 0, n_abnormal = 0;
2495 edge e, fallthru = NULL;
2496 edge_iterator ei;
2497 rtx note;
2498 bool has_crossing_edge = false;
2499
2500 if (JUMP_P (BB_END (bb))
2501 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2502 && EDGE_COUNT (bb->succs) >= 2
2503 && any_condjump_p (BB_END (bb)))
2504 {
2505 if (!BRANCH_EDGE (bb)->probability.initialized_p ())
2506 {
2507 if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
2508 {
2509 error ("verify_flow_info: "
2510 "REG_BR_PROB is set but cfg probability is not");
2511 err = 1;
2512 }
2513 }
2514 else if (XINT (note, 0)
2515 != BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ()
2516 && profile_status_for_fn (cfun) != PROFILE_ABSENT)
2517 {
2518 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2519 XINT (note, 0),
2520 BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ());
2521 err = 1;
2522 }
2523 }
2524
2525 FOR_EACH_EDGE (e, ei, bb->succs)
2526 {
2527 bool is_crossing;
2528
2529 if (e->flags & EDGE_FALLTHRU)
2530 n_fallthru++, fallthru = e;
2531
2532 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2533 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2534 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
2535 has_crossing_edge |= is_crossing;
2536 if (e->flags & EDGE_CROSSING)
2537 {
2538 if (!is_crossing)
2539 {
2540 error ("EDGE_CROSSING incorrectly set across same section");
2541 err = 1;
2542 }
2543 if (e->flags & EDGE_FALLTHRU)
2544 {
2545 error ("fallthru edge crosses section boundary in bb %i",
2546 e->src->index);
2547 err = 1;
2548 }
2549 if (e->flags & EDGE_EH)
2550 {
2551 error ("EH edge crosses section boundary in bb %i",
2552 e->src->index);
2553 err = 1;
2554 }
2555 if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
2556 {
2557 error ("No region crossing jump at section boundary in bb %i",
2558 bb->index);
2559 err = 1;
2560 }
2561 }
2562 else if (is_crossing)
2563 {
2564 error ("EDGE_CROSSING missing across section boundary");
2565 err = 1;
2566 }
2567
2568 if ((e->flags & ~(EDGE_DFS_BACK
2569 | EDGE_CAN_FALLTHRU
2570 | EDGE_IRREDUCIBLE_LOOP
2571 | EDGE_LOOP_EXIT
2572 | EDGE_CROSSING
2573 | EDGE_PRESERVE)) == 0)
2574 n_branch++;
2575
2576 if (e->flags & EDGE_ABNORMAL_CALL)
2577 n_abnormal_call++;
2578
2579 if (e->flags & EDGE_SIBCALL)
2580 n_sibcall++;
2581
2582 if (e->flags & EDGE_EH)
2583 n_eh++;
2584
2585 if (e->flags & EDGE_ABNORMAL)
2586 n_abnormal++;
2587 }
2588
2589 if (!has_crossing_edge
2590 && JUMP_P (BB_END (bb))
2591 && CROSSING_JUMP_P (BB_END (bb)))
2592 {
2593 print_rtl_with_bb (stderr, get_insns (), TDF_BLOCKS | TDF_DETAILS);
2594 error ("Region crossing jump across same section in bb %i",
2595 bb->index);
2596 err = 1;
2597 }
2598
2599 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2600 {
2601 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2602 err = 1;
2603 }
2604 if (n_eh > 1)
2605 {
2606 error ("too many exception handling edges in bb %i", bb->index);
2607 err = 1;
2608 }
2609 if (n_branch
2610 && (!JUMP_P (BB_END (bb))
2611 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2612 || any_condjump_p (BB_END (bb))))))
2613 {
2614 error ("too many outgoing branch edges from bb %i", bb->index);
2615 err = 1;
2616 }
2617 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2618 {
2619 error ("fallthru edge after unconditional jump in bb %i", bb->index);
2620 err = 1;
2621 }
2622 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2623 {
2624 error ("wrong number of branch edges after unconditional jump"
2625 " in bb %i", bb->index);
2626 err = 1;
2627 }
2628 if (n_branch != 1 && any_condjump_p (BB_END (bb))
2629 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2630 {
2631 error ("wrong amount of branch edges after conditional jump"
2632 " in bb %i", bb->index);
2633 err = 1;
2634 }
2635 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2636 {
2637 error ("abnormal call edges for non-call insn in bb %i", bb->index);
2638 err = 1;
2639 }
2640 if (n_sibcall && !CALL_P (BB_END (bb)))
2641 {
2642 error ("sibcall edges for non-call insn in bb %i", bb->index);
2643 err = 1;
2644 }
2645 if (n_abnormal > n_eh
2646 && !(CALL_P (BB_END (bb))
2647 && n_abnormal == n_abnormal_call + n_sibcall)
2648 && (!JUMP_P (BB_END (bb))
2649 || any_condjump_p (BB_END (bb))
2650 || any_uncondjump_p (BB_END (bb))))
2651 {
2652 error ("abnormal edges for no purpose in bb %i", bb->index);
2653 err = 1;
2654 }
2655
2656 int has_eh = -1;
2657 FOR_EACH_EDGE (e, ei, bb->preds)
2658 {
2659 if (has_eh == -1)
2660 has_eh = (e->flags & EDGE_EH);
2661 if ((e->flags & EDGE_EH) == has_eh)
2662 continue;
2663 error ("EH incoming edge mixed with non-EH incoming edges "
2664 "in bb %i", bb->index);
2665 err = 1;
2666 break;
2667 }
2668 }
2669
2670 /* If there are partitions, do a sanity check on them: A basic block in
2671   a cold partition cannot dominate a basic block in a hot partition.  */
2672 if (crtl->has_bb_partition && !err
2673 && current_ir_type () == IR_RTL_CFGLAYOUT)
2674 {
2675 vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2676 err = !bbs_to_fix.is_empty ();
2677 }
2678
2679 /* Clean up. */
2680 return err;
2681 }
2682
2683 /* Checks on the instructions within blocks. Currently checks that each
2684 block starts with a basic block note, and that basic block notes and
2685 control flow jumps are not found in the middle of the block. */
2686
2687 static int
2688 rtl_verify_bb_insns (void)
2689 {
2690 rtx_insn *x;
2691 int err = 0;
2692 basic_block bb;
2693
2694 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2695 {
2696 /* Now check the header of basic
2697 block. It ought to contain optional CODE_LABEL followed
2698 by NOTE_BASIC_BLOCK. */
2699 x = BB_HEAD (bb);
2700 if (LABEL_P (x))
2701 {
2702 if (BB_END (bb) == x)
2703 {
2704 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2705 bb->index);
2706 err = 1;
2707 }
2708
2709 x = NEXT_INSN (x);
2710 }
2711
2712 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2713 {
2714 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2715 bb->index);
2716 err = 1;
2717 }
2718
2719 if (BB_END (bb) == x)
2720 /* Do checks for empty blocks here. */
2721 ;
2722 else
2723 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2724 {
2725 if (NOTE_INSN_BASIC_BLOCK_P (x))
2726 {
2727 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2728 INSN_UID (x), bb->index);
2729 err = 1;
2730 }
2731
2732 if (x == BB_END (bb))
2733 break;
2734
2735 if (control_flow_insn_p (x))
2736 {
2737 error ("in basic block %d:", bb->index);
2738 fatal_insn ("flow control insn inside a basic block", x);
2739 }
2740 }
2741 }
2742
2743 /* Clean up. */
2744 return err;
2745 }
2746
2747 /* Verify that block pointers for instructions in basic blocks, headers and
2748 footers are set appropriately. */
2749
2750 static int
2751 rtl_verify_bb_pointers (void)
2752 {
2753 int err = 0;
2754 basic_block bb;
2755
2756 /* Check the general integrity of the basic blocks. */
2757 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2758 {
2759 rtx_insn *insn;
2760
2761 if (!(bb->flags & BB_RTL))
2762 {
2763 error ("BB_RTL flag not set for block %d", bb->index);
2764 err = 1;
2765 }
2766
2767 FOR_BB_INSNS (bb, insn)
2768 if (BLOCK_FOR_INSN (insn) != bb)
2769 {
2770 error ("insn %d basic block pointer is %d, should be %d",
2771 INSN_UID (insn),
2772 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2773 bb->index);
2774 err = 1;
2775 }
2776
2777 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2778 if (!BARRIER_P (insn)
2779 && BLOCK_FOR_INSN (insn) != NULL)
2780 {
2781 error ("insn %d in header of bb %d has non-NULL basic block",
2782 INSN_UID (insn), bb->index);
2783 err = 1;
2784 }
2785 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2786 if (!BARRIER_P (insn)
2787 && BLOCK_FOR_INSN (insn) != NULL)
2788 {
2789 error ("insn %d in footer of bb %d has non-NULL basic block",
2790 INSN_UID (insn), bb->index);
2791 err = 1;
2792 }
2793 }
2794
2795 /* Clean up. */
2796 return err;
2797 }
2798
2799 /* Verify the CFG and RTL consistency common for both underlying RTL and
2800 cfglayout RTL.
2801
2802 Currently it does following checks:
2803
2804 - overlapping of basic blocks
2805 - insns with wrong BLOCK_FOR_INSN pointers
2806 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2807 - tails of basic blocks (ensure that boundary is necessary)
2808 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2809 and NOTE_INSN_BASIC_BLOCK
2810 - verify that no fall_thru edge crosses hot/cold partition boundaries
2811 - verify that there are no pending RTL branch predictions
2812 - verify that hot blocks are not dominated by cold blocks
2813
2814 In future it can be extended check a lot of other stuff as well
2815 (reachability of basic blocks, life information, etc. etc.). */
2816
2817 static int
2818 rtl_verify_flow_info_1 (void)
2819 {
2820 int err = 0;
2821
2822 err |= rtl_verify_bb_pointers ();
2823
2824 err |= rtl_verify_bb_insns ();
2825
2826 err |= rtl_verify_edges ();
2827
2828 return err;
2829 }
2830
2831 /* Walk the instruction chain and verify that bb head/end pointers
2832 are correct, and that instructions are in exactly one bb and have
2833 correct block pointers. */
2834
2835 static int
2836 rtl_verify_bb_insn_chain (void)
2837 {
2838 basic_block bb;
2839 int err = 0;
2840 rtx_insn *x;
2841 rtx_insn *last_head = get_last_insn ();
2842 basic_block *bb_info;
2843 const int max_uid = get_max_uid ();
2844
2845 bb_info = XCNEWVEC (basic_block, max_uid);
2846
2847 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2848 {
2849 rtx_insn *head = BB_HEAD (bb);
2850 rtx_insn *end = BB_END (bb);
2851
2852 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2853 {
2854 /* Verify the end of the basic block is in the INSN chain. */
2855 if (x == end)
2856 break;
2857
2858 /* And that the code outside of basic blocks has NULL bb field. */
2859 if (!BARRIER_P (x)
2860 && BLOCK_FOR_INSN (x) != NULL)
2861 {
2862 error ("insn %d outside of basic blocks has non-NULL bb field",
2863 INSN_UID (x));
2864 err = 1;
2865 }
2866 }
2867
2868 if (!x)
2869 {
2870 error ("end insn %d for block %d not found in the insn stream",
2871 INSN_UID (end), bb->index);
2872 err = 1;
2873 }
2874
2875 /* Work backwards from the end to the head of the basic block
2876 to verify the head is in the RTL chain. */
2877 for (; x != NULL_RTX; x = PREV_INSN (x))
2878 {
2879 /* While walking over the insn chain, verify insns appear
2880 in only one basic block. */
2881 if (bb_info[INSN_UID (x)] != NULL)
2882 {
2883 error ("insn %d is in multiple basic blocks (%d and %d)",
2884 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2885 err = 1;
2886 }
2887
2888 bb_info[INSN_UID (x)] = bb;
2889
2890 if (x == head)
2891 break;
2892 }
2893 if (!x)
2894 {
2895 error ("head insn %d for block %d not found in the insn stream",
2896 INSN_UID (head), bb->index);
2897 err = 1;
2898 }
2899
2900 last_head = PREV_INSN (x);
2901 }
2902
2903 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2904 {
2905 /* Check that the code before the first basic block has NULL
2906 bb field. */
2907 if (!BARRIER_P (x)
2908 && BLOCK_FOR_INSN (x) != NULL)
2909 {
2910 error ("insn %d outside of basic blocks has non-NULL bb field",
2911 INSN_UID (x));
2912 err = 1;
2913 }
2914 }
2915 free (bb_info);
2916
2917 return err;
2918 }
2919
2920 /* Verify that fallthru edges point to adjacent blocks in layout order and
2921 that barriers exist after non-fallthru blocks. */
2922
2923 static int
2924 rtl_verify_fallthru (void)
2925 {
2926 basic_block bb;
2927 int err = 0;
2928
2929 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2930 {
2931 edge e;
2932
2933 e = find_fallthru_edge (bb->succs);
2934 if (!e)
2935 {
2936 rtx_insn *insn;
2937
2938 /* Ensure existence of barrier in BB with no fallthru edges. */
2939 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2940 {
2941 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2942 {
2943 error ("missing barrier after block %i", bb->index);
2944 err = 1;
2945 break;
2946 }
2947 if (BARRIER_P (insn))
2948 break;
2949 }
2950 }
2951 else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2952 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2953 {
2954 rtx_insn *insn;
2955
2956 if (e->src->next_bb != e->dest)
2957 {
2958 error
2959 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2960 e->src->index, e->dest->index);
2961 err = 1;
2962 }
2963 else
2964 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2965 insn = NEXT_INSN (insn))
2966 if (BARRIER_P (insn) || NONDEBUG_INSN_P (insn))
2967 {
2968 error ("verify_flow_info: Incorrect fallthru %i->%i",
2969 e->src->index, e->dest->index);
2970 fatal_insn ("wrong insn in the fallthru edge", insn);
2971 err = 1;
2972 }
2973 }
2974 }
2975
2976 return err;
2977 }
2978
2979 /* Verify that blocks are laid out in consecutive order. While walking the
2980 instructions, verify that all expected instructions are inside the basic
2981 blocks, and that all returns are followed by barriers. */
2982
2983 static int
2984 rtl_verify_bb_layout (void)
2985 {
2986 basic_block bb;
2987 int err = 0;
2988 rtx_insn *x, *y;
2989 int num_bb_notes;
2990 rtx_insn * const rtx_first = get_insns ();
2991 basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
2992
2993 num_bb_notes = 0;
2994
2995 for (x = rtx_first; x; x = NEXT_INSN (x))
2996 {
2997 if (NOTE_INSN_BASIC_BLOCK_P (x))
2998 {
2999 bb = NOTE_BASIC_BLOCK (x);
3000
3001 num_bb_notes++;
3002 if (bb != last_bb_seen->next_bb)
3003 internal_error ("basic blocks not laid down consecutively");
3004
3005 curr_bb = last_bb_seen = bb;
3006 }
3007
3008 if (!curr_bb)
3009 {
3010 switch (GET_CODE (x))
3011 {
3012 case BARRIER:
3013 case NOTE:
3014 break;
3015
3016 case CODE_LABEL:
3017 /* An ADDR_VEC is placed outside any basic block. */
3018 if (NEXT_INSN (x)
3019 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
3020 x = NEXT_INSN (x);
3021
3022 /* But in any case, non-deletable labels can appear anywhere. */
3023 break;
3024
3025 default:
3026 fatal_insn ("insn outside basic block", x);
3027 }
3028 }
3029
3030 if (JUMP_P (x)
3031 && returnjump_p (x) && ! condjump_p (x)
3032 && ! ((y = next_nonnote_nondebug_insn (x))
3033 && BARRIER_P (y)))
3034 fatal_insn ("return not followed by barrier", x);
3035
3036 if (curr_bb && x == BB_END (curr_bb))
3037 curr_bb = NULL;
3038 }
3039
3040 if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
3041 internal_error
3042 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
3043 num_bb_notes, n_basic_blocks_for_fn (cfun));
3044
3045 return err;
3046 }
3047
3048 /* Verify the CFG and RTL consistency common for both underlying RTL and
3049 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
3050
3051 Currently it does following checks:
3052 - all checks of rtl_verify_flow_info_1
3053 - test head/end pointers
3054 - check that blocks are laid out in consecutive order
3055 - check that all insns are in the basic blocks
3056 (except the switch handling code, barriers and notes)
3057 - check that all returns are followed by barriers
3058 - check that all fallthru edge points to the adjacent blocks
3059 - verify that there is a single hot/cold partition boundary after bbro */
3060
3061 static int
3062 rtl_verify_flow_info (void)
3063 {
3064 int err = 0;
3065
3066 err |= rtl_verify_flow_info_1 ();
3067
3068 err |= rtl_verify_bb_insn_chain ();
3069
3070 err |= rtl_verify_fallthru ();
3071
3072 err |= rtl_verify_bb_layout ();
3073
3074 err |= verify_hot_cold_block_grouping ();
3075
3076 return err;
3077 }
3078 \f
3079 /* Assume that the preceding pass has possibly eliminated jump instructions
3080 or converted the unconditional jumps. Eliminate the edges from CFG.
3081 Return true if any edges are eliminated. */
3082
3083 bool
3084 purge_dead_edges (basic_block bb)
3085 {
3086 edge e;
3087 rtx_insn *insn = BB_END (bb);
3088 rtx note;
3089 bool purged = false;
3090 bool found;
3091 edge_iterator ei;
3092
3093 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
3094 do
3095 insn = PREV_INSN (insn);
3096 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3097
3098 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
3099 if (NONJUMP_INSN_P (insn)
3100 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3101 {
3102 rtx eqnote;
3103
3104 if (! may_trap_p (PATTERN (insn))
3105 || ((eqnote = find_reg_equal_equiv_note (insn))
3106 && ! may_trap_p (XEXP (eqnote, 0))))
3107 remove_note (insn, note);
3108 }
3109
3110 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
3111 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3112 {
3113 bool remove = false;
3114
3115 /* There are three types of edges we need to handle correctly here: EH
3116 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3117 latter can appear when nonlocal gotos are used. */
3118 if (e->flags & EDGE_ABNORMAL_CALL)
3119 {
3120 if (!CALL_P (insn))
3121 remove = true;
3122 else if (can_nonlocal_goto (insn))
3123 ;
3124 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3125 ;
3126 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3127 ;
3128 else
3129 remove = true;
3130 }
3131 else if (e->flags & EDGE_EH)
3132 remove = !can_throw_internal (insn);
3133
3134 if (remove)
3135 {
3136 remove_edge (e);
3137 df_set_bb_dirty (bb);
3138 purged = true;
3139 }
3140 else
3141 ei_next (&ei);
3142 }
3143
3144 if (JUMP_P (insn))
3145 {
3146 rtx note;
3147 edge b,f;
3148 edge_iterator ei;
3149
3150 /* We do care only about conditional jumps and simplejumps. */
3151 if (!any_condjump_p (insn)
3152 && !returnjump_p (insn)
3153 && !simplejump_p (insn))
3154 return purged;
3155
3156 /* Branch probability/prediction notes are defined only for
3157 condjumps. We've possibly turned condjump into simplejump. */
3158 if (simplejump_p (insn))
3159 {
3160 note = find_reg_note (insn, REG_BR_PROB, NULL);
3161 if (note)
3162 remove_note (insn, note);
3163 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3164 remove_note (insn, note);
3165 }
3166
3167 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3168 {
3169 /* Avoid abnormal flags to leak from computed jumps turned
3170 into simplejumps. */
3171
3172 e->flags &= ~EDGE_ABNORMAL;
3173
3174 /* See if this edge is one we should keep. */
3175 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3176 /* A conditional jump can fall through into the next
3177 block, so we should keep the edge. */
3178 {
3179 ei_next (&ei);
3180 continue;
3181 }
3182 else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3183 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
3184 /* If the destination block is the target of the jump,
3185 keep the edge. */
3186 {
3187 ei_next (&ei);
3188 continue;
3189 }
3190 else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3191 && returnjump_p (insn))
3192 /* If the destination block is the exit block, and this
3193 instruction is a return, then keep the edge. */
3194 {
3195 ei_next (&ei);
3196 continue;
3197 }
3198 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3199 /* Keep the edges that correspond to exceptions thrown by
3200 this instruction and rematerialize the EDGE_ABNORMAL
3201 flag we just cleared above. */
3202 {
3203 e->flags |= EDGE_ABNORMAL;
3204 ei_next (&ei);
3205 continue;
3206 }
3207
3208 /* We do not need this edge. */
3209 df_set_bb_dirty (bb);
3210 purged = true;
3211 remove_edge (e);
3212 }
3213
3214 if (EDGE_COUNT (bb->succs) == 0 || !purged)
3215 return purged;
3216
3217 if (dump_file)
3218 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
3219
3220 if (!optimize)
3221 return purged;
3222
3223 /* Redistribute probabilities. */
3224 if (single_succ_p (bb))
3225 {
3226 single_succ_edge (bb)->probability = profile_probability::always ();
3227 }
3228 else
3229 {
3230 note = find_reg_note (insn, REG_BR_PROB, NULL);
3231 if (!note)
3232 return purged;
3233
3234 b = BRANCH_EDGE (bb);
3235 f = FALLTHRU_EDGE (bb);
3236 b->probability = profile_probability::from_reg_br_prob_note
3237 (XINT (note, 0));
3238 f->probability = b->probability.invert ();
3239 }
3240
3241 return purged;
3242 }
3243 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
3244 {
3245 /* First, there should not be any EH or ABCALL edges resulting
3246 from non-local gotos and the like. If there were, we shouldn't
3247 have created the sibcall in the first place. Second, there
3248 should of course never have been a fallthru edge. */
3249 gcc_assert (single_succ_p (bb));
3250 gcc_assert (single_succ_edge (bb)->flags
3251 == (EDGE_SIBCALL | EDGE_ABNORMAL));
3252
3253 return 0;
3254 }
3255
3256 /* If we don't see a jump insn, we don't know exactly why the block would
3257 have been broken at this point. Look for a simple, non-fallthru edge,
3258 as these are only created by conditional branches. If we find such an
3259 edge we know that there used to be a jump here and can then safely
3260 remove all non-fallthru edges. */
3261 found = false;
3262 FOR_EACH_EDGE (e, ei, bb->succs)
3263 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3264 {
3265 found = true;
3266 break;
3267 }
3268
3269 if (!found)
3270 return purged;
3271
3272 /* Remove all but the fake and fallthru edges. The fake edge may be
3273 the only successor for this block in the case of noreturn
3274 calls. */
3275 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3276 {
3277 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
3278 {
3279 df_set_bb_dirty (bb);
3280 remove_edge (e);
3281 purged = true;
3282 }
3283 else
3284 ei_next (&ei);
3285 }
3286
3287 gcc_assert (single_succ_p (bb));
3288
3289 single_succ_edge (bb)->probability = profile_probability::always ();
3290
3291 if (dump_file)
3292 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
3293 bb->index);
3294 return purged;
3295 }
3296
3297 /* Search all basic blocks for potentially dead edges and purge them. Return
3298 true if some edge has been eliminated. */
3299
3300 bool
3301 purge_all_dead_edges (void)
3302 {
3303 int purged = false;
3304 basic_block bb;
3305
3306 FOR_EACH_BB_FN (bb, cfun)
3307 {
3308 bool purged_here = purge_dead_edges (bb);
3309
3310 purged |= purged_here;
3311 }
3312
3313 return purged;
3314 }
3315
3316 /* This is used by a few passes that emit some instructions after abnormal
3317 calls, moving the basic block's end, while they in fact do want to emit
3318 them on the fallthru edge. Look for abnormal call edges, find backward
3319 the call in the block and insert the instructions on the edge instead.
3320
3321 Similarly, handle instructions throwing exceptions internally.
3322
3323 Return true when instructions have been found and inserted on edges. */
3324
3325 bool
3326 fixup_abnormal_edges (void)
3327 {
3328 bool inserted = false;
3329 basic_block bb;
3330
3331 FOR_EACH_BB_FN (bb, cfun)
3332 {
3333 edge e;
3334 edge_iterator ei;
3335
3336 /* Look for cases we are interested in - calls or instructions causing
3337 exceptions. */
3338 FOR_EACH_EDGE (e, ei, bb->succs)
3339 if ((e->flags & EDGE_ABNORMAL_CALL)
3340 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3341 == (EDGE_ABNORMAL | EDGE_EH)))
3342 break;
3343
3344 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3345 {
3346 rtx_insn *insn;
3347
3348 /* Get past the new insns generated. Allow notes, as the insns
3349 may be already deleted. */
3350 insn = BB_END (bb);
3351 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3352 && !can_throw_internal (insn)
3353 && insn != BB_HEAD (bb))
3354 insn = PREV_INSN (insn);
3355
3356 if (CALL_P (insn) || can_throw_internal (insn))
3357 {
3358 rtx_insn *stop, *next;
3359
3360 e = find_fallthru_edge (bb->succs);
3361
3362 stop = NEXT_INSN (BB_END (bb));
3363 BB_END (bb) = insn;
3364
3365 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3366 {
3367 next = NEXT_INSN (insn);
3368 if (INSN_P (insn))
3369 {
3370 delete_insn (insn);
3371
3372 /* Sometimes there's still the return value USE.
3373 If it's placed after a trapping call (i.e. that
3374 call is the last insn anyway), we have no fallthru
3375 edge. Simply delete this use and don't try to insert
3376 on the non-existent edge.
3377 Similarly, sometimes a call that can throw is
3378 followed in the source with __builtin_unreachable (),
3379 meaning that there is UB if the call returns rather
3380 than throws. If there weren't any instructions
3381 following such calls before, supposedly even the ones
3382 we've deleted aren't significant and can be
3383 removed. */
3384 if (e)
3385 {
3386 /* We're not deleting it, we're moving it. */
3387 insn->set_undeleted ();
3388 SET_PREV_INSN (insn) = NULL_RTX;
3389 SET_NEXT_INSN (insn) = NULL_RTX;
3390
3391 insert_insn_on_edge (insn, e);
3392 inserted = true;
3393 }
3394 }
3395 else if (!BARRIER_P (insn))
3396 set_block_for_insn (insn, NULL);
3397 }
3398 }
3399
3400 /* It may be that we don't find any trapping insn. In this
3401 case we discovered quite late that the insn that had been
3402 marked as can_throw_internal in fact couldn't trap at all.
3403 So we should in fact delete the EH edges out of the block. */
3404 else
3405 purge_dead_edges (bb);
3406 }
3407 }
3408
3409 return inserted;
3410 }
3411 \f
3412 /* Cut the insns from FIRST to LAST out of the insns stream. */
3413
3414 rtx_insn *
3415 unlink_insn_chain (rtx_insn *first, rtx_insn *last)
3416 {
3417 rtx_insn *prevfirst = PREV_INSN (first);
3418 rtx_insn *nextlast = NEXT_INSN (last);
3419
3420 SET_PREV_INSN (first) = NULL;
3421 SET_NEXT_INSN (last) = NULL;
3422 if (prevfirst)
3423 SET_NEXT_INSN (prevfirst) = nextlast;
3424 if (nextlast)
3425 SET_PREV_INSN (nextlast) = prevfirst;
3426 else
3427 set_last_insn (prevfirst);
3428 if (!prevfirst)
3429 set_first_insn (nextlast);
3430 return first;
3431 }
3432 \f
3433 /* Skip over inter-block insns occurring after BB which are typically
3434 associated with BB (e.g., barriers). If there are any such insns,
3435 we return the last one. Otherwise, we return the end of BB. */
3436
3437 static rtx_insn *
3438 skip_insns_after_block (basic_block bb)
3439 {
3440 rtx_insn *insn, *last_insn, *next_head, *prev;
3441
3442 next_head = NULL;
3443 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3444 next_head = BB_HEAD (bb->next_bb);
3445
3446 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3447 {
3448 if (insn == next_head)
3449 break;
3450
3451 switch (GET_CODE (insn))
3452 {
3453 case BARRIER:
3454 last_insn = insn;
3455 continue;
3456
3457 case NOTE:
3458 switch (NOTE_KIND (insn))
3459 {
3460 case NOTE_INSN_BLOCK_END:
3461 gcc_unreachable ();
3462 continue;
3463 default:
3464 continue;
3465 break;
3466 }
3467 break;
3468
3469 case CODE_LABEL:
3470 if (NEXT_INSN (insn)
3471 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3472 {
3473 insn = NEXT_INSN (insn);
3474 last_insn = insn;
3475 continue;
3476 }
3477 break;
3478
3479 default:
3480 break;
3481 }
3482
3483 break;
3484 }
3485
3486 /* It is possible to hit contradictory sequence. For instance:
3487
3488 jump_insn
3489 NOTE_INSN_BLOCK_BEG
3490 barrier
3491
3492 Where barrier belongs to jump_insn, but the note does not. This can be
3493 created by removing the basic block originally following
3494 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3495
3496 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3497 {
3498 prev = PREV_INSN (insn);
3499 if (NOTE_P (insn))
3500 switch (NOTE_KIND (insn))
3501 {
3502 case NOTE_INSN_BLOCK_END:
3503 gcc_unreachable ();
3504 break;
3505 case NOTE_INSN_DELETED:
3506 case NOTE_INSN_DELETED_LABEL:
3507 case NOTE_INSN_DELETED_DEBUG_LABEL:
3508 continue;
3509 default:
3510 reorder_insns (insn, insn, last_insn);
3511 }
3512 }
3513
3514 return last_insn;
3515 }
3516
3517 /* Locate or create a label for a given basic block. */
3518
3519 static rtx_insn *
3520 label_for_bb (basic_block bb)
3521 {
3522 rtx_insn *label = BB_HEAD (bb);
3523
3524 if (!LABEL_P (label))
3525 {
3526 if (dump_file)
3527 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3528
3529 label = block_label (bb);
3530 }
3531
3532 return label;
3533 }
3534
3535 /* Locate the effective beginning and end of the insn chain for each
3536 block, as defined by skip_insns_after_block above. */
3537
3538 static void
3539 record_effective_endpoints (void)
3540 {
3541 rtx_insn *next_insn;
3542 basic_block bb;
3543 rtx_insn *insn;
3544
3545 for (insn = get_insns ();
3546 insn
3547 && NOTE_P (insn)
3548 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3549 insn = NEXT_INSN (insn))
3550 continue;
3551 /* No basic blocks at all? */
3552 gcc_assert (insn);
3553
3554 if (PREV_INSN (insn))
3555 cfg_layout_function_header =
3556 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3557 else
3558 cfg_layout_function_header = NULL;
3559
3560 next_insn = get_insns ();
3561 FOR_EACH_BB_FN (bb, cfun)
3562 {
3563 rtx_insn *end;
3564
3565 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3566 BB_HEADER (bb) = unlink_insn_chain (next_insn,
3567 PREV_INSN (BB_HEAD (bb)));
3568 end = skip_insns_after_block (bb);
3569 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3570 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3571 next_insn = NEXT_INSN (BB_END (bb));
3572 }
3573
3574 cfg_layout_function_footer = next_insn;
3575 if (cfg_layout_function_footer)
3576 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3577 }
3578 \f
3579 namespace {
3580
3581 const pass_data pass_data_into_cfg_layout_mode =
3582 {
3583 RTL_PASS, /* type */
3584 "into_cfglayout", /* name */
3585 OPTGROUP_NONE, /* optinfo_flags */
3586 TV_CFG, /* tv_id */
3587 0, /* properties_required */
3588 PROP_cfglayout, /* properties_provided */
3589 0, /* properties_destroyed */
3590 0, /* todo_flags_start */
3591 0, /* todo_flags_finish */
3592 };
3593
3594 class pass_into_cfg_layout_mode : public rtl_opt_pass
3595 {
3596 public:
3597 pass_into_cfg_layout_mode (gcc::context *ctxt)
3598 : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
3599 {}
3600
3601 /* opt_pass methods: */
3602 virtual unsigned int execute (function *)
3603 {
3604 cfg_layout_initialize (0);
3605 return 0;
3606 }
3607
3608 }; // class pass_into_cfg_layout_mode
3609
3610 } // anon namespace
3611
3612 rtl_opt_pass *
3613 make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3614 {
3615 return new pass_into_cfg_layout_mode (ctxt);
3616 }
3617
3618 namespace {
3619
3620 const pass_data pass_data_outof_cfg_layout_mode =
3621 {
3622 RTL_PASS, /* type */
3623 "outof_cfglayout", /* name */
3624 OPTGROUP_NONE, /* optinfo_flags */
3625 TV_CFG, /* tv_id */
3626 0, /* properties_required */
3627 0, /* properties_provided */
3628 PROP_cfglayout, /* properties_destroyed */
3629 0, /* todo_flags_start */
3630 0, /* todo_flags_finish */
3631 };
3632
3633 class pass_outof_cfg_layout_mode : public rtl_opt_pass
3634 {
3635 public:
3636 pass_outof_cfg_layout_mode (gcc::context *ctxt)
3637 : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
3638 {}
3639
3640 /* opt_pass methods: */
3641 virtual unsigned int execute (function *);
3642
3643 }; // class pass_outof_cfg_layout_mode
3644
3645 unsigned int
3646 pass_outof_cfg_layout_mode::execute (function *fun)
3647 {
3648 basic_block bb;
3649
3650 FOR_EACH_BB_FN (bb, fun)
3651 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3652 bb->aux = bb->next_bb;
3653
3654 cfg_layout_finalize ();
3655
3656 return 0;
3657 }
3658
3659 } // anon namespace
3660
3661 rtl_opt_pass *
3662 make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3663 {
3664 return new pass_outof_cfg_layout_mode (ctxt);
3665 }
3666 \f
3667
3668 /* Link the basic blocks in the correct order, compacting the basic
3669 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3670 function also clears the basic block header and footer fields.
3671
3672 This function is usually called after a pass (e.g. tracer) finishes
3673 some transformations while in cfglayout mode. The required sequence
3674 of the basic blocks is in a linked list along the bb->aux field.
3675 This functions re-links the basic block prev_bb and next_bb pointers
3676 accordingly, and it compacts and renumbers the blocks.
3677
3678 FIXME: This currently works only for RTL, but the only RTL-specific
3679 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3680 to GIMPLE a long time ago, but it doesn't relink the basic block
3681 chain. It could do that (to give better initial RTL) if this function
3682 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
3683
3684 void
3685 relink_block_chain (bool stay_in_cfglayout_mode)
3686 {
3687 basic_block bb, prev_bb;
3688 int index;
3689
3690 /* Maybe dump the re-ordered sequence. */
3691 if (dump_file)
3692 {
3693 fprintf (dump_file, "Reordered sequence:\n");
3694 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3695 NUM_FIXED_BLOCKS;
3696 bb;
3697 bb = (basic_block) bb->aux, index++)
3698 {
3699 fprintf (dump_file, " %i ", index);
3700 if (get_bb_original (bb))
3701 fprintf (dump_file, "duplicate of %i\n",
3702 get_bb_original (bb)->index);
3703 else if (forwarder_block_p (bb)
3704 && !LABEL_P (BB_HEAD (bb)))
3705 fprintf (dump_file, "compensation\n");
3706 else
3707 fprintf (dump_file, "bb %i\n", bb->index);
3708 }
3709 }
3710
3711 /* Now reorder the blocks. */
3712 prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3713 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
3714 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3715 {
3716 bb->prev_bb = prev_bb;
3717 prev_bb->next_bb = bb;
3718 }
3719 prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3720 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
3721
3722 /* Then, clean up the aux fields. */
3723 FOR_ALL_BB_FN (bb, cfun)
3724 {
3725 bb->aux = NULL;
3726 if (!stay_in_cfglayout_mode)
3727 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3728 }
3729
3730 /* Maybe reset the original copy tables, they are not valid anymore
3731 when we renumber the basic blocks in compact_blocks. If we are
3732 are going out of cfglayout mode, don't re-allocate the tables. */
3733 if (original_copy_tables_initialized_p ())
3734 free_original_copy_tables ();
3735 if (stay_in_cfglayout_mode)
3736 initialize_original_copy_tables ();
3737
3738 /* Finally, put basic_block_info in the new order. */
3739 compact_blocks ();
3740 }
3741 \f
3742
3743 /* Given a reorder chain, rearrange the code to match. */
3744
3745 static void
3746 fixup_reorder_chain (void)
3747 {
3748 basic_block bb;
3749 rtx_insn *insn = NULL;
3750
3751 if (cfg_layout_function_header)
3752 {
3753 set_first_insn (cfg_layout_function_header);
3754 insn = cfg_layout_function_header;
3755 while (NEXT_INSN (insn))
3756 insn = NEXT_INSN (insn);
3757 }
3758
3759 /* First do the bulk reordering -- rechain the blocks without regard to
3760 the needed changes to jumps and labels. */
3761
3762 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3763 bb->aux)
3764 {
3765 if (BB_HEADER (bb))
3766 {
3767 if (insn)
3768 SET_NEXT_INSN (insn) = BB_HEADER (bb);
3769 else
3770 set_first_insn (BB_HEADER (bb));
3771 SET_PREV_INSN (BB_HEADER (bb)) = insn;
3772 insn = BB_HEADER (bb);
3773 while (NEXT_INSN (insn))
3774 insn = NEXT_INSN (insn);
3775 }
3776 if (insn)
3777 SET_NEXT_INSN (insn) = BB_HEAD (bb);
3778 else
3779 set_first_insn (BB_HEAD (bb));
3780 SET_PREV_INSN (BB_HEAD (bb)) = insn;
3781 insn = BB_END (bb);
3782 if (BB_FOOTER (bb))
3783 {
3784 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3785 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
3786 while (NEXT_INSN (insn))
3787 insn = NEXT_INSN (insn);
3788 }
3789 }
3790
3791 SET_NEXT_INSN (insn) = cfg_layout_function_footer;
3792 if (cfg_layout_function_footer)
3793 SET_PREV_INSN (cfg_layout_function_footer) = insn;
3794
3795 while (NEXT_INSN (insn))
3796 insn = NEXT_INSN (insn);
3797
3798 set_last_insn (insn);
3799 if (flag_checking)
3800 verify_insn_chain ();
3801
3802 /* Now add jumps and labels as needed to match the blocks new
3803 outgoing edges. */
3804
3805 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3806 bb->aux)
3807 {
3808 edge e_fall, e_taken, e;
3809 rtx_insn *bb_end_insn;
3810 rtx ret_label = NULL_RTX;
3811 basic_block nb;
3812 edge_iterator ei;
3813
3814 if (EDGE_COUNT (bb->succs) == 0)
3815 continue;
3816
3817 /* Find the old fallthru edge, and another non-EH edge for
3818 a taken jump. */
3819 e_taken = e_fall = NULL;
3820
3821 FOR_EACH_EDGE (e, ei, bb->succs)
3822 if (e->flags & EDGE_FALLTHRU)
3823 e_fall = e;
3824 else if (! (e->flags & EDGE_EH))
3825 e_taken = e;
3826
3827 bb_end_insn = BB_END (bb);
3828 if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
3829 {
3830 ret_label = JUMP_LABEL (bb_end_jump);
3831 if (any_condjump_p (bb_end_jump))
3832 {
3833 /* This might happen if the conditional jump has side
3834 effects and could therefore not be optimized away.
3835 Make the basic block to end with a barrier in order
3836 to prevent rtl_verify_flow_info from complaining. */
3837 if (!e_fall)
3838 {
3839 gcc_assert (!onlyjump_p (bb_end_jump)
3840 || returnjump_p (bb_end_jump)
3841 || (e_taken->flags & EDGE_CROSSING));
3842 emit_barrier_after (bb_end_jump);
3843 continue;
3844 }
3845
3846 /* If the old fallthru is still next, nothing to do. */
3847 if (bb->aux == e_fall->dest
3848 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3849 continue;
3850
3851 /* The degenerated case of conditional jump jumping to the next
3852 instruction can happen for jumps with side effects. We need
3853 to construct a forwarder block and this will be done just
3854 fine by force_nonfallthru below. */
3855 if (!e_taken)
3856 ;
3857
3858 /* There is another special case: if *neither* block is next,
3859 such as happens at the very end of a function, then we'll
3860 need to add a new unconditional jump. Choose the taken
3861 edge based on known or assumed probability. */
3862 else if (bb->aux != e_taken->dest)
3863 {
3864 rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
3865
3866 if (note
3867 && profile_probability::from_reg_br_prob_note
3868 (XINT (note, 0)) < profile_probability::even ()
3869 && invert_jump (bb_end_jump,
3870 (e_fall->dest
3871 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3872 ? NULL_RTX
3873 : label_for_bb (e_fall->dest)), 0))
3874 {
3875 e_fall->flags &= ~EDGE_FALLTHRU;
3876 gcc_checking_assert (could_fall_through
3877 (e_taken->src, e_taken->dest));
3878 e_taken->flags |= EDGE_FALLTHRU;
3879 update_br_prob_note (bb);
3880 e = e_fall, e_fall = e_taken, e_taken = e;
3881 }
3882 }
3883
3884 /* If the "jumping" edge is a crossing edge, and the fall
3885 through edge is non-crossing, leave things as they are. */
3886 else if ((e_taken->flags & EDGE_CROSSING)
3887 && !(e_fall->flags & EDGE_CROSSING))
3888 continue;
3889
3890 /* Otherwise we can try to invert the jump. This will
3891 basically never fail, however, keep up the pretense. */
3892 else if (invert_jump (bb_end_jump,
3893 (e_fall->dest
3894 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3895 ? NULL_RTX
3896 : label_for_bb (e_fall->dest)), 0))
3897 {
3898 e_fall->flags &= ~EDGE_FALLTHRU;
3899 gcc_checking_assert (could_fall_through
3900 (e_taken->src, e_taken->dest));
3901 e_taken->flags |= EDGE_FALLTHRU;
3902 update_br_prob_note (bb);
3903 if (LABEL_NUSES (ret_label) == 0
3904 && single_pred_p (e_taken->dest))
3905 delete_insn (as_a<rtx_insn *> (ret_label));
3906 continue;
3907 }
3908 }
3909 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3910 {
3911 /* If the old fallthru is still next or if
3912 asm goto doesn't have a fallthru (e.g. when followed by
3913 __builtin_unreachable ()), nothing to do. */
3914 if (! e_fall
3915 || bb->aux == e_fall->dest
3916 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3917 continue;
3918
3919 /* Otherwise we'll have to use the fallthru fixup below. */
3920 }
3921 else
3922 {
3923 /* Otherwise we have some return, switch or computed
3924 jump. In the 99% case, there should not have been a
3925 fallthru edge. */
3926 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3927 continue;
3928 }
3929 }
3930 else
3931 {
3932 /* No fallthru implies a noreturn function with EH edges, or
3933 something similarly bizarre. In any case, we don't need to
3934 do anything. */
3935 if (! e_fall)
3936 continue;
3937
3938 /* If the fallthru block is still next, nothing to do. */
3939 if (bb->aux == e_fall->dest)
3940 continue;
3941
3942 /* A fallthru to exit block. */
3943 if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3944 continue;
3945 }
3946
3947 /* We got here if we need to add a new jump insn.
3948 Note force_nonfallthru can delete E_FALL and thus we have to
3949 save E_FALL->src prior to the call to force_nonfallthru. */
3950 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3951 if (nb)
3952 {
3953 nb->aux = bb->aux;
3954 bb->aux = nb;
3955 /* Don't process this new block. */
3956 bb = nb;
3957 }
3958 }
3959
3960 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3961
3962 /* Annoying special case - jump around dead jumptables left in the code. */
3963 FOR_EACH_BB_FN (bb, cfun)
3964 {
3965 edge e = find_fallthru_edge (bb->succs);
3966
3967 if (e && !can_fallthru (e->src, e->dest))
3968 force_nonfallthru (e);
3969 }
3970
3971 /* Ensure goto_locus from edges has some instructions with that locus in RTL
3972 when not optimizing. */
3973 if (!optimize && !DECL_IGNORED_P (current_function_decl))
3974 FOR_EACH_BB_FN (bb, cfun)
3975 {
3976 edge e;
3977 edge_iterator ei;
3978
3979 FOR_EACH_EDGE (e, ei, bb->succs)
3980 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
3981 && !(e->flags & EDGE_ABNORMAL))
3982 {
3983 edge e2;
3984 edge_iterator ei2;
3985 basic_block dest, nb;
3986 rtx_insn *end;
3987
3988 insn = BB_END (e->src);
3989 end = PREV_INSN (BB_HEAD (e->src));
3990 while (insn != end
3991 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
3992 insn = PREV_INSN (insn);
3993 if (insn != end
3994 && INSN_LOCATION (insn) == e->goto_locus)
3995 continue;
3996 if (simplejump_p (BB_END (e->src))
3997 && !INSN_HAS_LOCATION (BB_END (e->src)))
3998 {
3999 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
4000 continue;
4001 }
4002 dest = e->dest;
4003 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4004 {
4005 /* Non-fallthru edges to the exit block cannot be split. */
4006 if (!(e->flags & EDGE_FALLTHRU))
4007 continue;
4008 }
4009 else
4010 {
4011 insn = BB_HEAD (dest);
4012 end = NEXT_INSN (BB_END (dest));
4013 while (insn != end && !NONDEBUG_INSN_P (insn))
4014 insn = NEXT_INSN (insn);
4015 if (insn != end && INSN_HAS_LOCATION (insn)
4016 && INSN_LOCATION (insn) == e->goto_locus)
4017 continue;
4018 }
4019 nb = split_edge (e);
4020 if (!INSN_P (BB_END (nb)))
4021 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
4022 nb);
4023 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
4024
4025 /* If there are other incoming edges to the destination block
4026 with the same goto locus, redirect them to the new block as
4027 well, this can prevent other such blocks from being created
4028 in subsequent iterations of the loop. */
4029 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
4030 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
4031 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
4032 && e->goto_locus == e2->goto_locus)
4033 redirect_edge_and_branch (e2, nb);
4034 else
4035 ei_next (&ei2);
4036 }
4037 }
4038 }
4039 \f
4040 /* Perform sanity checks on the insn chain.
4041 1. Check that next/prev pointers are consistent in both the forward and
4042 reverse direction.
4043 2. Count insns in chain, going both directions, and check if equal.
4044 3. Check that get_last_insn () returns the actual end of chain. */
4045
4046 DEBUG_FUNCTION void
4047 verify_insn_chain (void)
4048 {
4049 rtx_insn *x, *prevx, *nextx;
4050 int insn_cnt1, insn_cnt2;
4051
4052 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
4053 x != 0;
4054 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
4055 gcc_assert (PREV_INSN (x) == prevx);
4056
4057 gcc_assert (prevx == get_last_insn ());
4058
4059 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
4060 x != 0;
4061 nextx = x, insn_cnt2++, x = PREV_INSN (x))
4062 gcc_assert (NEXT_INSN (x) == nextx);
4063
4064 gcc_assert (insn_cnt1 == insn_cnt2);
4065 }
4066 \f
4067 /* If we have assembler epilogues, the block falling through to exit must
4068 be the last one in the reordered chain when we reach final. Ensure
4069 that this condition is met. */
4070 static void
4071 fixup_fallthru_exit_predecessor (void)
4072 {
4073 edge e;
4074 basic_block bb = NULL;
4075
4076 /* This transformation is not valid before reload, because we might
4077 separate a call from the instruction that copies the return
4078 value. */
4079 gcc_assert (reload_completed);
4080
4081 e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4082 if (e)
4083 bb = e->src;
4084
4085 if (bb && bb->aux)
4086 {
4087 basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4088
4089 /* If the very first block is the one with the fall-through exit
4090 edge, we have to split that block. */
4091 if (c == bb)
4092 {
4093 bb = split_block_after_labels (bb)->dest;
4094 bb->aux = c->aux;
4095 c->aux = bb;
4096 BB_FOOTER (bb) = BB_FOOTER (c);
4097 BB_FOOTER (c) = NULL;
4098 }
4099
4100 while (c->aux != bb)
4101 c = (basic_block) c->aux;
4102
4103 c->aux = bb->aux;
4104 while (c->aux)
4105 c = (basic_block) c->aux;
4106
4107 c->aux = bb;
4108 bb->aux = NULL;
4109 }
4110 }
4111
4112 /* In case there are more than one fallthru predecessors of exit, force that
4113 there is only one. */
4114
4115 static void
4116 force_one_exit_fallthru (void)
4117 {
4118 edge e, predecessor = NULL;
4119 bool more = false;
4120 edge_iterator ei;
4121 basic_block forwarder, bb;
4122
4123 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4124 if (e->flags & EDGE_FALLTHRU)
4125 {
4126 if (predecessor == NULL)
4127 predecessor = e;
4128 else
4129 {
4130 more = true;
4131 break;
4132 }
4133 }
4134
4135 if (!more)
4136 return;
4137
4138 /* Exit has several fallthru predecessors. Create a forwarder block for
4139 them. */
4140 forwarder = split_edge (predecessor);
4141 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4142 (e = ei_safe_edge (ei)); )
4143 {
4144 if (e->src == forwarder
4145 || !(e->flags & EDGE_FALLTHRU))
4146 ei_next (&ei);
4147 else
4148 redirect_edge_and_branch_force (e, forwarder);
4149 }
4150
4151 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4152 exit block. */
4153 FOR_EACH_BB_FN (bb, cfun)
4154 {
4155 if (bb->aux == NULL && bb != forwarder)
4156 {
4157 bb->aux = forwarder;
4158 break;
4159 }
4160 }
4161 }
4162 \f
4163 /* Return true in case it is possible to duplicate the basic block BB. */
4164
4165 static bool
4166 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4167 {
4168 /* Do not attempt to duplicate tablejumps, as we need to unshare
4169 the dispatch table. This is difficult to do, as the instructions
4170 computing jump destination may be hoisted outside the basic block. */
4171 if (tablejump_p (BB_END (bb), NULL, NULL))
4172 return false;
4173
4174 /* Do not duplicate blocks containing insns that can't be copied. */
4175 if (targetm.cannot_copy_insn_p)
4176 {
4177 rtx_insn *insn = BB_HEAD (bb);
4178 while (1)
4179 {
4180 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4181 return false;
4182 if (insn == BB_END (bb))
4183 break;
4184 insn = NEXT_INSN (insn);
4185 }
4186 }
4187
4188 return true;
4189 }
4190
4191 rtx_insn *
4192 duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
4193 {
4194 rtx_insn *insn, *next, *copy;
4195 rtx_note *last;
4196
4197 /* Avoid updating of boundaries of previous basic block. The
4198 note will get removed from insn stream in fixup. */
4199 last = emit_note (NOTE_INSN_DELETED);
4200
4201 /* Create copy at the end of INSN chain. The chain will
4202 be reordered later. */
4203 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4204 {
4205 switch (GET_CODE (insn))
4206 {
4207 case DEBUG_INSN:
4208 /* Don't duplicate label debug insns. */
4209 if (DEBUG_BIND_INSN_P (insn)
4210 && TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4211 break;
4212 /* FALLTHRU */
4213 case INSN:
4214 case CALL_INSN:
4215 case JUMP_INSN:
4216 copy = emit_copy_of_insn_after (insn, get_last_insn ());
4217 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4218 && ANY_RETURN_P (JUMP_LABEL (insn)))
4219 JUMP_LABEL (copy) = JUMP_LABEL (insn);
4220 maybe_copy_prologue_epilogue_insn (insn, copy);
4221 break;
4222
4223 case JUMP_TABLE_DATA:
4224 /* Avoid copying of dispatch tables. We never duplicate
4225 tablejumps, so this can hit only in case the table got
4226 moved far from original jump.
4227 Avoid copying following barrier as well if any
4228 (and debug insns in between). */
4229 for (next = NEXT_INSN (insn);
4230 next != NEXT_INSN (to);
4231 next = NEXT_INSN (next))
4232 if (!DEBUG_INSN_P (next))
4233 break;
4234 if (next != NEXT_INSN (to) && BARRIER_P (next))
4235 insn = next;
4236 break;
4237
4238 case CODE_LABEL:
4239 break;
4240
4241 case BARRIER:
4242 emit_barrier ();
4243 break;
4244
4245 case NOTE:
4246 switch (NOTE_KIND (insn))
4247 {
4248 /* In case prologue is empty and function contain label
4249 in first BB, we may want to copy the block. */
4250 case NOTE_INSN_PROLOGUE_END:
4251
4252 case NOTE_INSN_DELETED:
4253 case NOTE_INSN_DELETED_LABEL:
4254 case NOTE_INSN_DELETED_DEBUG_LABEL:
4255 /* No problem to strip these. */
4256 case NOTE_INSN_FUNCTION_BEG:
4257 /* There is always just single entry to function. */
4258 case NOTE_INSN_BASIC_BLOCK:
4259 /* We should only switch text sections once. */
4260 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4261 break;
4262
4263 case NOTE_INSN_EPILOGUE_BEG:
4264 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
4265 emit_note_copy (as_a <rtx_note *> (insn));
4266 break;
4267
4268 default:
4269 /* All other notes should have already been eliminated. */
4270 gcc_unreachable ();
4271 }
4272 break;
4273 default:
4274 gcc_unreachable ();
4275 }
4276 }
4277 insn = NEXT_INSN (last);
4278 delete_insn (last);
4279 return insn;
4280 }
4281
4282 /* Create a duplicate of the basic block BB. */
4283
4284 static basic_block
4285 cfg_layout_duplicate_bb (basic_block bb, copy_bb_data *)
4286 {
4287 rtx_insn *insn;
4288 basic_block new_bb;
4289
4290 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4291 new_bb = create_basic_block (insn,
4292 insn ? get_last_insn () : NULL,
4293 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
4294
4295 BB_COPY_PARTITION (new_bb, bb);
4296 if (BB_HEADER (bb))
4297 {
4298 insn = BB_HEADER (bb);
4299 while (NEXT_INSN (insn))
4300 insn = NEXT_INSN (insn);
4301 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4302 if (insn)
4303 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4304 }
4305
4306 if (BB_FOOTER (bb))
4307 {
4308 insn = BB_FOOTER (bb);
4309 while (NEXT_INSN (insn))
4310 insn = NEXT_INSN (insn);
4311 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4312 if (insn)
4313 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4314 }
4315
4316 return new_bb;
4317 }
4318
4319 \f
4320 /* Main entry point to this module - initialize the datastructures for
4321 CFG layout changes. It keeps LOOPS up-to-date if not null.
4322
4323 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4324
4325 void
4326 cfg_layout_initialize (int flags)
4327 {
4328 rtx_insn_list *x;
4329 basic_block bb;
4330
4331 /* Once bb partitioning is complete, cfg layout mode should not be
4332 re-entered. Entering cfg layout mode may require fixups. As an
4333 example, if edge forwarding performed when optimizing the cfg
4334 layout required moving a block from the hot to the cold
4335 section. This would create an illegal partitioning unless some
4336 manual fixup was performed. */
4337 gcc_assert (!crtl->bb_reorder_complete || !crtl->has_bb_partition);
4338
4339 initialize_original_copy_tables ();
4340
4341 cfg_layout_rtl_register_cfg_hooks ();
4342
4343 record_effective_endpoints ();
4344
4345 /* Make sure that the targets of non local gotos are marked. */
4346 for (x = nonlocal_goto_handler_labels; x; x = x->next ())
4347 {
4348 bb = BLOCK_FOR_INSN (x->insn ());
4349 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4350 }
4351
4352 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4353 }
4354
4355 /* Splits superblocks. */
4356 void
4357 break_superblocks (void)
4358 {
4359 bool need = false;
4360 basic_block bb;
4361
4362 auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
4363 bitmap_clear (superblocks);
4364
4365 FOR_EACH_BB_FN (bb, cfun)
4366 if (bb->flags & BB_SUPERBLOCK)
4367 {
4368 bb->flags &= ~BB_SUPERBLOCK;
4369 bitmap_set_bit (superblocks, bb->index);
4370 need = true;
4371 }
4372
4373 if (need)
4374 {
4375 rebuild_jump_labels (get_insns ());
4376 find_many_sub_basic_blocks (superblocks);
4377 }
4378 }
4379
4380 /* Finalize the changes: reorder insn list according to the sequence specified
4381 by aux pointers, enter compensation code, rebuild scope forest. */
4382
4383 void
4384 cfg_layout_finalize (void)
4385 {
4386 free_dominance_info (CDI_DOMINATORS);
4387 force_one_exit_fallthru ();
4388 rtl_register_cfg_hooks ();
4389 if (reload_completed && !targetm.have_epilogue ())
4390 fixup_fallthru_exit_predecessor ();
4391 fixup_reorder_chain ();
4392
4393 rebuild_jump_labels (get_insns ());
4394 delete_dead_jumptables ();
4395
4396 if (flag_checking)
4397 verify_insn_chain ();
4398 checking_verify_flow_info ();
4399 }
4400
4401
4402 /* Same as split_block but update cfg_layout structures. */
4403
4404 static basic_block
4405 cfg_layout_split_block (basic_block bb, void *insnp)
4406 {
4407 rtx insn = (rtx) insnp;
4408 basic_block new_bb = rtl_split_block (bb, insn);
4409
4410 BB_FOOTER (new_bb) = BB_FOOTER (bb);
4411 BB_FOOTER (bb) = NULL;
4412
4413 return new_bb;
4414 }
4415
4416 /* Redirect Edge to DEST. */
4417 static edge
4418 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
4419 {
4420 basic_block src = e->src;
4421 edge ret;
4422
4423 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4424 return NULL;
4425
4426 if (e->dest == dest)
4427 return e;
4428
4429 if (e->flags & EDGE_CROSSING
4430 && BB_PARTITION (e->src) == BB_PARTITION (dest)
4431 && simplejump_p (BB_END (src)))
4432 {
4433 if (dump_file)
4434 fprintf (dump_file,
4435 "Removing crossing jump while redirecting edge form %i to %i\n",
4436 e->src->index, dest->index);
4437 delete_insn (BB_END (src));
4438 remove_barriers_from_footer (src);
4439 e->flags |= EDGE_FALLTHRU;
4440 }
4441
4442 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4443 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
4444 {
4445 df_set_bb_dirty (src);
4446 return ret;
4447 }
4448
4449 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4450 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4451 {
4452 if (dump_file)
4453 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
4454 e->src->index, dest->index);
4455
4456 df_set_bb_dirty (e->src);
4457 redirect_edge_succ (e, dest);
4458 return e;
4459 }
4460
4461 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4462 in the case the basic block appears to be in sequence. Avoid this
4463 transformation. */
4464
4465 if (e->flags & EDGE_FALLTHRU)
4466 {
4467 /* Redirect any branch edges unified with the fallthru one. */
4468 if (JUMP_P (BB_END (src))
4469 && label_is_jump_target_p (BB_HEAD (e->dest),
4470 BB_END (src)))
4471 {
4472 edge redirected;
4473
4474 if (dump_file)
4475 fprintf (dump_file, "Fallthru edge unified with branch "
4476 "%i->%i redirected to %i\n",
4477 e->src->index, e->dest->index, dest->index);
4478 e->flags &= ~EDGE_FALLTHRU;
4479 redirected = redirect_branch_edge (e, dest);
4480 gcc_assert (redirected);
4481 redirected->flags |= EDGE_FALLTHRU;
4482 df_set_bb_dirty (redirected->src);
4483 return redirected;
4484 }
4485 /* In case we are redirecting fallthru edge to the branch edge
4486 of conditional jump, remove it. */
4487 if (EDGE_COUNT (src->succs) == 2)
4488 {
4489 /* Find the edge that is different from E. */
4490 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
4491
4492 if (s->dest == dest
4493 && any_condjump_p (BB_END (src))
4494 && onlyjump_p (BB_END (src)))
4495 delete_insn (BB_END (src));
4496 }
4497 if (dump_file)
4498 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
4499 e->src->index, e->dest->index, dest->index);
4500 ret = redirect_edge_succ_nodup (e, dest);
4501 }
4502 else
4503 ret = redirect_branch_edge (e, dest);
4504
4505 if (!ret)
4506 return NULL;
4507
4508 fixup_partition_crossing (ret);
4509 /* We don't want simplejumps in the insn stream during cfglayout. */
4510 gcc_assert (!simplejump_p (BB_END (src)) || CROSSING_JUMP_P (BB_END (src)));
4511
4512 df_set_bb_dirty (src);
4513 return ret;
4514 }
4515
4516 /* Simple wrapper as we always can redirect fallthru edges. */
4517 static basic_block
4518 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
4519 {
4520 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4521
4522 gcc_assert (redirected);
4523 return NULL;
4524 }
4525
4526 /* Same as delete_basic_block but update cfg_layout structures. */
4527
4528 static void
4529 cfg_layout_delete_block (basic_block bb)
4530 {
4531 rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
4532 rtx_insn **to;
4533
4534 if (BB_HEADER (bb))
4535 {
4536 next = BB_HEAD (bb);
4537 if (prev)
4538 SET_NEXT_INSN (prev) = BB_HEADER (bb);
4539 else
4540 set_first_insn (BB_HEADER (bb));
4541 SET_PREV_INSN (BB_HEADER (bb)) = prev;
4542 insn = BB_HEADER (bb);
4543 while (NEXT_INSN (insn))
4544 insn = NEXT_INSN (insn);
4545 SET_NEXT_INSN (insn) = next;
4546 SET_PREV_INSN (next) = insn;
4547 }
4548 next = NEXT_INSN (BB_END (bb));
4549 if (BB_FOOTER (bb))
4550 {
4551 insn = BB_FOOTER (bb);
4552 while (insn)
4553 {
4554 if (BARRIER_P (insn))
4555 {
4556 if (PREV_INSN (insn))
4557 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4558 else
4559 BB_FOOTER (bb) = NEXT_INSN (insn);
4560 if (NEXT_INSN (insn))
4561 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4562 }
4563 if (LABEL_P (insn))
4564 break;
4565 insn = NEXT_INSN (insn);
4566 }
4567 if (BB_FOOTER (bb))
4568 {
4569 insn = BB_END (bb);
4570 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4571 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
4572 while (NEXT_INSN (insn))
4573 insn = NEXT_INSN (insn);
4574 SET_NEXT_INSN (insn) = next;
4575 if (next)
4576 SET_PREV_INSN (next) = insn;
4577 else
4578 set_last_insn (insn);
4579 }
4580 }
4581 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4582 to = &BB_HEADER (bb->next_bb);
4583 else
4584 to = &cfg_layout_function_footer;
4585
4586 rtl_delete_block (bb);
4587
4588 if (prev)
4589 prev = NEXT_INSN (prev);
4590 else
4591 prev = get_insns ();
4592 if (next)
4593 next = PREV_INSN (next);
4594 else
4595 next = get_last_insn ();
4596
4597 if (next && NEXT_INSN (next) != prev)
4598 {
4599 remaints = unlink_insn_chain (prev, next);
4600 insn = remaints;
4601 while (NEXT_INSN (insn))
4602 insn = NEXT_INSN (insn);
4603 SET_NEXT_INSN (insn) = *to;
4604 if (*to)
4605 SET_PREV_INSN (*to) = insn;
4606 *to = remaints;
4607 }
4608 }
4609
4610 /* Return true when blocks A and B can be safely merged. */
4611
4612 static bool
4613 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4614 {
4615 /* If we are partitioning hot/cold basic blocks, we don't want to
4616 mess up unconditional or indirect jumps that cross between hot
4617 and cold sections.
4618
4619 Basic block partitioning may result in some jumps that appear to
4620 be optimizable (or blocks that appear to be mergeable), but which really
4621 must be left untouched (they are required to make it safely across
4622 partition boundaries). See the comments at the top of
4623 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4624
4625 if (BB_PARTITION (a) != BB_PARTITION (b))
4626 return false;
4627
4628 /* Protect the loop latches. */
4629 if (current_loops && b->loop_father->latch == b)
4630 return false;
4631
4632 /* If we would end up moving B's instructions, make sure it doesn't fall
4633 through into the exit block, since we cannot recover from a fallthrough
4634 edge into the exit block occurring in the middle of a function. */
4635 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4636 {
4637 edge e = find_fallthru_edge (b->succs);
4638 if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4639 return false;
4640 }
4641
4642 /* There must be exactly one edge in between the blocks. */
4643 return (single_succ_p (a)
4644 && single_succ (a) == b
4645 && single_pred_p (b) == 1
4646 && a != b
4647 /* Must be simple edge. */
4648 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4649 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4650 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
4651 /* If the jump insn has side effects, we can't kill the edge.
4652 When not optimizing, try_redirect_by_replacing_jump will
4653 not allow us to redirect an edge by replacing a table jump. */
4654 && (!JUMP_P (BB_END (a))
4655 || ((!optimize || reload_completed)
4656 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4657 }
4658
4659 /* Merge block A and B. The blocks must be mergeable. */
4660
4661 static void
4662 cfg_layout_merge_blocks (basic_block a, basic_block b)
4663 {
4664 /* If B is a forwarder block whose outgoing edge has no location, we'll
4665 propagate the locus of the edge between A and B onto it. */
4666 const bool forward_edge_locus
4667 = (b->flags & BB_FORWARDER_BLOCK) != 0
4668 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
4669 rtx_insn *insn;
4670
4671 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4672
4673 if (dump_file)
4674 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4675 a->index);
4676
4677 /* If there was a CODE_LABEL beginning B, delete it. */
4678 if (LABEL_P (BB_HEAD (b)))
4679 {
4680 delete_insn (BB_HEAD (b));
4681 }
4682
4683 /* We should have fallthru edge in a, or we can do dummy redirection to get
4684 it cleaned up. */
4685 if (JUMP_P (BB_END (a)))
4686 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4687 gcc_assert (!JUMP_P (BB_END (a)));
4688
4689 /* If not optimizing, preserve the locus of the single edge between
4690 blocks A and B if necessary by emitting a nop. */
4691 if (!optimize
4692 && !forward_edge_locus
4693 && !DECL_IGNORED_P (current_function_decl))
4694 emit_nop_for_unique_locus_between (a, b);
4695
4696 /* Move things from b->footer after a->footer. */
4697 if (BB_FOOTER (b))
4698 {
4699 if (!BB_FOOTER (a))
4700 BB_FOOTER (a) = BB_FOOTER (b);
4701 else
4702 {
4703 rtx_insn *last = BB_FOOTER (a);
4704
4705 while (NEXT_INSN (last))
4706 last = NEXT_INSN (last);
4707 SET_NEXT_INSN (last) = BB_FOOTER (b);
4708 SET_PREV_INSN (BB_FOOTER (b)) = last;
4709 }
4710 BB_FOOTER (b) = NULL;
4711 }
4712
4713 /* Move things from b->header before a->footer.
4714 Note that this may include dead tablejump data, but we don't clean
4715 those up until we go out of cfglayout mode. */
4716 if (BB_HEADER (b))
4717 {
4718 if (! BB_FOOTER (a))
4719 BB_FOOTER (a) = BB_HEADER (b);
4720 else
4721 {
4722 rtx_insn *last = BB_HEADER (b);
4723
4724 while (NEXT_INSN (last))
4725 last = NEXT_INSN (last);
4726 SET_NEXT_INSN (last) = BB_FOOTER (a);
4727 SET_PREV_INSN (BB_FOOTER (a)) = last;
4728 BB_FOOTER (a) = BB_HEADER (b);
4729 }
4730 BB_HEADER (b) = NULL;
4731 }
4732
4733 /* In the case basic blocks are not adjacent, move them around. */
4734 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4735 {
4736 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4737
4738 emit_insn_after_noloc (insn, BB_END (a), a);
4739 }
4740 /* Otherwise just re-associate the instructions. */
4741 else
4742 {
4743 insn = BB_HEAD (b);
4744 BB_END (a) = BB_END (b);
4745 }
4746
4747 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4748 We need to explicitly call. */
4749 update_bb_for_insn_chain (insn, BB_END (b), a);
4750
4751 /* Skip possible DELETED_LABEL insn. */
4752 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4753 insn = NEXT_INSN (insn);
4754 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4755 BB_HEAD (b) = BB_END (b) = NULL;
4756 delete_insn (insn);
4757
4758 df_bb_delete (b->index);
4759
4760 if (forward_edge_locus)
4761 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4762
4763 if (dump_file)
4764 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4765 }
4766
4767 /* Split edge E. */
4768
4769 static basic_block
4770 cfg_layout_split_edge (edge e)
4771 {
4772 basic_block new_bb =
4773 create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4774 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
4775 NULL_RTX, e->src);
4776
4777 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4778 BB_COPY_PARTITION (new_bb, e->src);
4779 else
4780 BB_COPY_PARTITION (new_bb, e->dest);
4781 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4782 redirect_edge_and_branch_force (e, new_bb);
4783
4784 return new_bb;
4785 }
4786
4787 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4788
4789 static void
4790 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4791 {
4792 }
4793
4794 /* Return true if BB contains only labels or non-executable
4795 instructions. */
4796
4797 static bool
4798 rtl_block_empty_p (basic_block bb)
4799 {
4800 rtx_insn *insn;
4801
4802 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4803 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4804 return true;
4805
4806 FOR_BB_INSNS (bb, insn)
4807 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4808 return false;
4809
4810 return true;
4811 }
4812
4813 /* Split a basic block if it ends with a conditional branch and if
4814 the other part of the block is not empty. */
4815
4816 static basic_block
4817 rtl_split_block_before_cond_jump (basic_block bb)
4818 {
4819 rtx_insn *insn;
4820 rtx_insn *split_point = NULL;
4821 rtx_insn *last = NULL;
4822 bool found_code = false;
4823
4824 FOR_BB_INSNS (bb, insn)
4825 {
4826 if (any_condjump_p (insn))
4827 split_point = last;
4828 else if (NONDEBUG_INSN_P (insn))
4829 found_code = true;
4830 last = insn;
4831 }
4832
4833 /* Did not find everything. */
4834 if (found_code && split_point)
4835 return split_block (bb, split_point)->dest;
4836 else
4837 return NULL;
4838 }
4839
4840 /* Return 1 if BB ends with a call, possibly followed by some
4841 instructions that must stay with the call, 0 otherwise. */
4842
4843 static bool
4844 rtl_block_ends_with_call_p (basic_block bb)
4845 {
4846 rtx_insn *insn = BB_END (bb);
4847
4848 while (!CALL_P (insn)
4849 && insn != BB_HEAD (bb)
4850 && (keep_with_call_p (insn)
4851 || NOTE_P (insn)
4852 || DEBUG_INSN_P (insn)))
4853 insn = PREV_INSN (insn);
4854 return (CALL_P (insn));
4855 }
4856
4857 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4858
4859 static bool
4860 rtl_block_ends_with_condjump_p (const_basic_block bb)
4861 {
4862 return any_condjump_p (BB_END (bb));
4863 }
4864
4865 /* Return true if we need to add fake edge to exit.
4866 Helper function for rtl_flow_call_edges_add. */
4867
4868 static bool
4869 need_fake_edge_p (const rtx_insn *insn)
4870 {
4871 if (!INSN_P (insn))
4872 return false;
4873
4874 if ((CALL_P (insn)
4875 && !SIBLING_CALL_P (insn)
4876 && !find_reg_note (insn, REG_NORETURN, NULL)
4877 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4878 return true;
4879
4880 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4881 && MEM_VOLATILE_P (PATTERN (insn)))
4882 || (GET_CODE (PATTERN (insn)) == PARALLEL
4883 && asm_noperands (insn) != -1
4884 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4885 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4886 }
4887
4888 /* Add fake edges to the function exit for any non constant and non noreturn
4889 calls, volatile inline assembly in the bitmap of blocks specified by
4890 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4891 that were split.
4892
4893 The goal is to expose cases in which entering a basic block does not imply
4894 that all subsequent instructions must be executed. */
4895
4896 static int
4897 rtl_flow_call_edges_add (sbitmap blocks)
4898 {
4899 int i;
4900 int blocks_split = 0;
4901 int last_bb = last_basic_block_for_fn (cfun);
4902 bool check_last_block = false;
4903
4904 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
4905 return 0;
4906
4907 if (! blocks)
4908 check_last_block = true;
4909 else
4910 check_last_block = bitmap_bit_p (blocks,
4911 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
4912
4913 /* In the last basic block, before epilogue generation, there will be
4914 a fallthru edge to EXIT. Special care is required if the last insn
4915 of the last basic block is a call because make_edge folds duplicate
4916 edges, which would result in the fallthru edge also being marked
4917 fake, which would result in the fallthru edge being removed by
4918 remove_fake_edges, which would result in an invalid CFG.
4919
4920 Moreover, we can't elide the outgoing fake edge, since the block
4921 profiler needs to take this into account in order to solve the minimal
4922 spanning tree in the case that the call doesn't return.
4923
4924 Handle this by adding a dummy instruction in a new last basic block. */
4925 if (check_last_block)
4926 {
4927 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
4928 rtx_insn *insn = BB_END (bb);
4929
4930 /* Back up past insns that must be kept in the same block as a call. */
4931 while (insn != BB_HEAD (bb)
4932 && keep_with_call_p (insn))
4933 insn = PREV_INSN (insn);
4934
4935 if (need_fake_edge_p (insn))
4936 {
4937 edge e;
4938
4939 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4940 if (e)
4941 {
4942 insert_insn_on_edge (gen_use (const0_rtx), e);
4943 commit_edge_insertions ();
4944 }
4945 }
4946 }
4947
4948 /* Now add fake edges to the function exit for any non constant
4949 calls since there is no way that we can determine if they will
4950 return or not... */
4951
4952 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4953 {
4954 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
4955 rtx_insn *insn;
4956 rtx_insn *prev_insn;
4957
4958 if (!bb)
4959 continue;
4960
4961 if (blocks && !bitmap_bit_p (blocks, i))
4962 continue;
4963
4964 for (insn = BB_END (bb); ; insn = prev_insn)
4965 {
4966 prev_insn = PREV_INSN (insn);
4967 if (need_fake_edge_p (insn))
4968 {
4969 edge e;
4970 rtx_insn *split_at_insn = insn;
4971
4972 /* Don't split the block between a call and an insn that should
4973 remain in the same block as the call. */
4974 if (CALL_P (insn))
4975 while (split_at_insn != BB_END (bb)
4976 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4977 split_at_insn = NEXT_INSN (split_at_insn);
4978
4979 /* The handling above of the final block before the epilogue
4980 should be enough to verify that there is no edge to the exit
4981 block in CFG already. Calling make_edge in such case would
4982 cause us to mark that edge as fake and remove it later. */
4983
4984 if (flag_checking && split_at_insn == BB_END (bb))
4985 {
4986 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4987 gcc_assert (e == NULL);
4988 }
4989
4990 /* Note that the following may create a new basic block
4991 and renumber the existing basic blocks. */
4992 if (split_at_insn != BB_END (bb))
4993 {
4994 e = split_block (bb, split_at_insn);
4995 if (e)
4996 blocks_split++;
4997 }
4998
4999 edge ne = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
5000 ne->probability = profile_probability::guessed_never ();
5001 }
5002
5003 if (insn == BB_HEAD (bb))
5004 break;
5005 }
5006 }
5007
5008 if (blocks_split)
5009 verify_flow_info ();
5010
5011 return blocks_split;
5012 }
5013
5014 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
5015 the conditional branch target, SECOND_HEAD should be the fall-thru
5016 there is no need to handle this here the loop versioning code handles
5017 this. the reason for SECON_HEAD is that it is needed for condition
5018 in trees, and this should be of the same type since it is a hook. */
5019 static void
5020 rtl_lv_add_condition_to_bb (basic_block first_head ,
5021 basic_block second_head ATTRIBUTE_UNUSED,
5022 basic_block cond_bb, void *comp_rtx)
5023 {
5024 rtx_code_label *label;
5025 rtx_insn *seq, *jump;
5026 rtx op0 = XEXP ((rtx)comp_rtx, 0);
5027 rtx op1 = XEXP ((rtx)comp_rtx, 1);
5028 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
5029 machine_mode mode;
5030
5031
5032 label = block_label (first_head);
5033 mode = GET_MODE (op0);
5034 if (mode == VOIDmode)
5035 mode = GET_MODE (op1);
5036
5037 start_sequence ();
5038 op0 = force_operand (op0, NULL_RTX);
5039 op1 = force_operand (op1, NULL_RTX);
5040 do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label,
5041 profile_probability::uninitialized ());
5042 jump = get_last_insn ();
5043 JUMP_LABEL (jump) = label;
5044 LABEL_NUSES (label)++;
5045 seq = get_insns ();
5046 end_sequence ();
5047
5048 /* Add the new cond, in the new head. */
5049 emit_insn_after (seq, BB_END (cond_bb));
5050 }
5051
5052
5053 /* Given a block B with unconditional branch at its end, get the
5054 store the return the branch edge and the fall-thru edge in
5055 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
5056 static void
5057 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
5058 edge *fallthru_edge)
5059 {
5060 edge e = EDGE_SUCC (b, 0);
5061
5062 if (e->flags & EDGE_FALLTHRU)
5063 {
5064 *fallthru_edge = e;
5065 *branch_edge = EDGE_SUCC (b, 1);
5066 }
5067 else
5068 {
5069 *branch_edge = e;
5070 *fallthru_edge = EDGE_SUCC (b, 1);
5071 }
5072 }
5073
5074 void
5075 init_rtl_bb_info (basic_block bb)
5076 {
5077 gcc_assert (!bb->il.x.rtl);
5078 bb->il.x.head_ = NULL;
5079 bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5080 }
5081
5082 /* Returns true if it is possible to remove edge E by redirecting
5083 it to the destination of the other edge from E->src. */
5084
5085 static bool
5086 rtl_can_remove_branch_p (const_edge e)
5087 {
5088 const_basic_block src = e->src;
5089 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
5090 const rtx_insn *insn = BB_END (src);
5091 rtx set;
5092
5093 /* The conditions are taken from try_redirect_by_replacing_jump. */
5094 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
5095 return false;
5096
5097 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
5098 return false;
5099
5100 if (BB_PARTITION (src) != BB_PARTITION (target))
5101 return false;
5102
5103 if (!onlyjump_p (insn)
5104 || tablejump_p (insn, NULL, NULL))
5105 return false;
5106
5107 set = single_set (insn);
5108 if (!set || side_effects_p (set))
5109 return false;
5110
5111 return true;
5112 }
5113
5114 static basic_block
5115 rtl_duplicate_bb (basic_block bb, copy_bb_data *id)
5116 {
5117 bb = cfg_layout_duplicate_bb (bb, id);
5118 bb->aux = NULL;
5119 return bb;
5120 }
5121
5122 /* Do book-keeping of basic block BB for the profile consistency checker.
5123 Store the counting in RECORD. */
5124 static void
5125 rtl_account_profile_record (basic_block bb, struct profile_record *record)
5126 {
5127 rtx_insn *insn;
5128 FOR_BB_INSNS (bb, insn)
5129 if (INSN_P (insn))
5130 {
5131 record->size += insn_cost (insn, false);
5132 if (bb->count.initialized_p ())
5133 record->time
5134 += insn_cost (insn, true) * bb->count.to_gcov_type ();
5135 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
5136 record->time
5137 += insn_cost (insn, true) * bb->count.to_frequency (cfun);
5138 }
5139 }
5140
5141 /* Implementation of CFG manipulation for linearized RTL. */
5142 struct cfg_hooks rtl_cfg_hooks = {
5143 "rtl",
5144 rtl_verify_flow_info,
5145 rtl_dump_bb,
5146 rtl_dump_bb_for_graph,
5147 rtl_create_basic_block,
5148 rtl_redirect_edge_and_branch,
5149 rtl_redirect_edge_and_branch_force,
5150 rtl_can_remove_branch_p,
5151 rtl_delete_block,
5152 rtl_split_block,
5153 rtl_move_block_after,
5154 rtl_can_merge_blocks, /* can_merge_blocks_p */
5155 rtl_merge_blocks,
5156 rtl_predict_edge,
5157 rtl_predicted_by_p,
5158 cfg_layout_can_duplicate_bb_p,
5159 rtl_duplicate_bb,
5160 rtl_split_edge,
5161 rtl_make_forwarder_block,
5162 rtl_tidy_fallthru_edge,
5163 rtl_force_nonfallthru,
5164 rtl_block_ends_with_call_p,
5165 rtl_block_ends_with_condjump_p,
5166 rtl_flow_call_edges_add,
5167 NULL, /* execute_on_growing_pred */
5168 NULL, /* execute_on_shrinking_pred */
5169 NULL, /* duplicate loop for trees */
5170 NULL, /* lv_add_condition_to_bb */
5171 NULL, /* lv_adjust_loop_header_phi*/
5172 NULL, /* extract_cond_bb_edges */
5173 NULL, /* flush_pending_stmts */
5174 rtl_block_empty_p, /* block_empty_p */
5175 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5176 rtl_account_profile_record,
5177 };
5178
5179 /* Implementation of CFG manipulation for cfg layout RTL, where
5180 basic block connected via fallthru edges does not have to be adjacent.
5181 This representation will hopefully become the default one in future
5182 version of the compiler. */
5183
5184 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5185 "cfglayout mode",
5186 rtl_verify_flow_info_1,
5187 rtl_dump_bb,
5188 rtl_dump_bb_for_graph,
5189 cfg_layout_create_basic_block,
5190 cfg_layout_redirect_edge_and_branch,
5191 cfg_layout_redirect_edge_and_branch_force,
5192 rtl_can_remove_branch_p,
5193 cfg_layout_delete_block,
5194 cfg_layout_split_block,
5195 rtl_move_block_after,
5196 cfg_layout_can_merge_blocks_p,
5197 cfg_layout_merge_blocks,
5198 rtl_predict_edge,
5199 rtl_predicted_by_p,
5200 cfg_layout_can_duplicate_bb_p,
5201 cfg_layout_duplicate_bb,
5202 cfg_layout_split_edge,
5203 rtl_make_forwarder_block,
5204 NULL, /* tidy_fallthru_edge */
5205 rtl_force_nonfallthru,
5206 rtl_block_ends_with_call_p,
5207 rtl_block_ends_with_condjump_p,
5208 rtl_flow_call_edges_add,
5209 NULL, /* execute_on_growing_pred */
5210 NULL, /* execute_on_shrinking_pred */
5211 duplicate_loop_to_header_edge, /* duplicate loop for trees */
5212 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5213 NULL, /* lv_adjust_loop_header_phi*/
5214 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
5215 NULL, /* flush_pending_stmts */
5216 rtl_block_empty_p, /* block_empty_p */
5217 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5218 rtl_account_profile_record,
5219 };
5220
5221 #include "gt-cfgrtl.h"
5222
5223 #if __GNUC__ >= 10
5224 # pragma GCC diagnostic pop
5225 #endif