]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfglayout.c
cfgloop.h: Do not include rtl.h.
[thirdparty/gcc.git] / gcc / cfglayout.c
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "obstack.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "function.h"
32 #include "cfglayout.h"
33 #include "cfgloop.h"
34 #include "target.h"
35 #include "common/common-target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
39 #include "tree-pass.h"
40 #include "df.h"
41 #include "vecprim.h"
42 #include "emit-rtl.h"
43
44 /* Holds the interesting trailing notes for the function. */
45 rtx cfg_layout_function_footer;
46 rtx cfg_layout_function_header;
47
48 static rtx skip_insns_after_block (basic_block);
49 static void record_effective_endpoints (void);
50 static rtx label_for_bb (basic_block);
51 static void fixup_reorder_chain (void);
52
53 static void change_scope (rtx, tree, tree);
54
55 void verify_insn_chain (void);
56 static void fixup_fallthru_exit_predecessor (void);
57 \f
58 rtx
59 unlink_insn_chain (rtx first, rtx last)
60 {
61 rtx prevfirst = PREV_INSN (first);
62 rtx nextlast = NEXT_INSN (last);
63
64 PREV_INSN (first) = NULL;
65 NEXT_INSN (last) = NULL;
66 if (prevfirst)
67 NEXT_INSN (prevfirst) = nextlast;
68 if (nextlast)
69 PREV_INSN (nextlast) = prevfirst;
70 else
71 set_last_insn (prevfirst);
72 if (!prevfirst)
73 set_first_insn (nextlast);
74 return first;
75 }
76 \f
77 /* Skip over inter-block insns occurring after BB which are typically
78 associated with BB (e.g., barriers). If there are any such insns,
79 we return the last one. Otherwise, we return the end of BB. */
80
81 static rtx
82 skip_insns_after_block (basic_block bb)
83 {
84 rtx insn, last_insn, next_head, prev;
85
86 next_head = NULL_RTX;
87 if (bb->next_bb != EXIT_BLOCK_PTR)
88 next_head = BB_HEAD (bb->next_bb);
89
90 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
91 {
92 if (insn == next_head)
93 break;
94
95 switch (GET_CODE (insn))
96 {
97 case BARRIER:
98 last_insn = insn;
99 continue;
100
101 case NOTE:
102 switch (NOTE_KIND (insn))
103 {
104 case NOTE_INSN_BLOCK_END:
105 gcc_unreachable ();
106 continue;
107 default:
108 continue;
109 break;
110 }
111 break;
112
113 case CODE_LABEL:
114 if (NEXT_INSN (insn)
115 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
116 {
117 insn = NEXT_INSN (insn);
118 last_insn = insn;
119 continue;
120 }
121 break;
122
123 default:
124 break;
125 }
126
127 break;
128 }
129
130 /* It is possible to hit contradictory sequence. For instance:
131
132 jump_insn
133 NOTE_INSN_BLOCK_BEG
134 barrier
135
136 Where barrier belongs to jump_insn, but the note does not. This can be
137 created by removing the basic block originally following
138 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
139
140 for (insn = last_insn; insn != BB_END (bb); insn = prev)
141 {
142 prev = PREV_INSN (insn);
143 if (NOTE_P (insn))
144 switch (NOTE_KIND (insn))
145 {
146 case NOTE_INSN_BLOCK_END:
147 gcc_unreachable ();
148 break;
149 case NOTE_INSN_DELETED:
150 case NOTE_INSN_DELETED_LABEL:
151 case NOTE_INSN_DELETED_DEBUG_LABEL:
152 continue;
153 default:
154 reorder_insns (insn, insn, last_insn);
155 }
156 }
157
158 return last_insn;
159 }
160
161 /* Locate or create a label for a given basic block. */
162
163 static rtx
164 label_for_bb (basic_block bb)
165 {
166 rtx label = BB_HEAD (bb);
167
168 if (!LABEL_P (label))
169 {
170 if (dump_file)
171 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
172
173 label = block_label (bb);
174 }
175
176 return label;
177 }
178
179 /* Locate the effective beginning and end of the insn chain for each
180 block, as defined by skip_insns_after_block above. */
181
182 static void
183 record_effective_endpoints (void)
184 {
185 rtx next_insn;
186 basic_block bb;
187 rtx insn;
188
189 for (insn = get_insns ();
190 insn
191 && NOTE_P (insn)
192 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
193 insn = NEXT_INSN (insn))
194 continue;
195 /* No basic blocks at all? */
196 gcc_assert (insn);
197
198 if (PREV_INSN (insn))
199 cfg_layout_function_header =
200 unlink_insn_chain (get_insns (), PREV_INSN (insn));
201 else
202 cfg_layout_function_header = NULL_RTX;
203
204 next_insn = get_insns ();
205 FOR_EACH_BB (bb)
206 {
207 rtx end;
208
209 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
210 BB_HEADER (bb) = unlink_insn_chain (next_insn,
211 PREV_INSN (BB_HEAD (bb)));
212 end = skip_insns_after_block (bb);
213 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
214 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
215 next_insn = NEXT_INSN (BB_END (bb));
216 }
217
218 cfg_layout_function_footer = next_insn;
219 if (cfg_layout_function_footer)
220 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
221 }
222 \f
223 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
224 numbers and files. In order to be GGC friendly we need to use separate
225 varrays. This also slightly improve the memory locality in binary search.
226 The _locs array contains locators where the given property change. The
227 block_locators_blocks contains the scope block that is used for all insn
228 locator greater than corresponding block_locators_locs value and smaller
229 than the following one. Similarly for the other properties. */
230 static VEC(int,heap) *block_locators_locs;
231 static GTY(()) VEC(tree,gc) *block_locators_blocks;
232 static VEC(int,heap) *locations_locators_locs;
233 DEF_VEC_O(location_t);
234 DEF_VEC_ALLOC_O(location_t,heap);
235 static VEC(location_t,heap) *locations_locators_vals;
236 int prologue_locator;
237 int epilogue_locator;
238
239 /* Hold current location information and last location information, so the
240 datastructures are built lazily only when some instructions in given
241 place are needed. */
242 static location_t curr_location, last_location;
243 static tree curr_block, last_block;
244 static int curr_rtl_loc = -1;
245
246 /* Allocate insn locator datastructure. */
247 void
248 insn_locators_alloc (void)
249 {
250 prologue_locator = epilogue_locator = 0;
251
252 block_locators_locs = VEC_alloc (int, heap, 32);
253 block_locators_blocks = VEC_alloc (tree, gc, 32);
254 locations_locators_locs = VEC_alloc (int, heap, 32);
255 locations_locators_vals = VEC_alloc (location_t, heap, 32);
256
257 curr_location = UNKNOWN_LOCATION;
258 last_location = UNKNOWN_LOCATION;
259 curr_block = NULL;
260 last_block = NULL;
261 curr_rtl_loc = 0;
262 }
263
264 /* At the end of emit stage, clear current location. */
265 void
266 insn_locators_finalize (void)
267 {
268 if (curr_rtl_loc >= 0)
269 epilogue_locator = curr_insn_locator ();
270 curr_rtl_loc = -1;
271 }
272
273 /* Allocate insn locator datastructure. */
274 void
275 insn_locators_free (void)
276 {
277 prologue_locator = epilogue_locator = 0;
278
279 VEC_free (int, heap, block_locators_locs);
280 VEC_free (tree,gc, block_locators_blocks);
281 VEC_free (int, heap, locations_locators_locs);
282 VEC_free (location_t, heap, locations_locators_vals);
283 }
284
285
286 /* Set current location. */
287 void
288 set_curr_insn_source_location (location_t location)
289 {
290 /* IV opts calls into RTL expansion to compute costs of operations. At this
291 time locators are not initialized. */
292 if (curr_rtl_loc == -1)
293 return;
294 curr_location = location;
295 }
296
297 /* Get current location. */
298 location_t
299 get_curr_insn_source_location (void)
300 {
301 return curr_location;
302 }
303
304 /* Set current scope block. */
305 void
306 set_curr_insn_block (tree b)
307 {
308 /* IV opts calls into RTL expansion to compute costs of operations. At this
309 time locators are not initialized. */
310 if (curr_rtl_loc == -1)
311 return;
312 if (b)
313 curr_block = b;
314 }
315
316 /* Get current scope block. */
317 tree
318 get_curr_insn_block (void)
319 {
320 return curr_block;
321 }
322
323 /* Return current insn locator. */
324 int
325 curr_insn_locator (void)
326 {
327 if (curr_rtl_loc == -1 || curr_location == UNKNOWN_LOCATION)
328 return 0;
329 if (last_block != curr_block)
330 {
331 curr_rtl_loc++;
332 VEC_safe_push (int, heap, block_locators_locs, curr_rtl_loc);
333 VEC_safe_push (tree, gc, block_locators_blocks, curr_block);
334 last_block = curr_block;
335 }
336 if (last_location != curr_location)
337 {
338 curr_rtl_loc++;
339 VEC_safe_push (int, heap, locations_locators_locs, curr_rtl_loc);
340 VEC_safe_push (location_t, heap, locations_locators_vals, &curr_location);
341 last_location = curr_location;
342 }
343 return curr_rtl_loc;
344 }
345
346 static unsigned int
347 into_cfg_layout_mode (void)
348 {
349 cfg_layout_initialize (0);
350 return 0;
351 }
352
353 static unsigned int
354 outof_cfg_layout_mode (void)
355 {
356 basic_block bb;
357
358 FOR_EACH_BB (bb)
359 if (bb->next_bb != EXIT_BLOCK_PTR)
360 bb->aux = bb->next_bb;
361
362 cfg_layout_finalize ();
363
364 return 0;
365 }
366
367 struct rtl_opt_pass pass_into_cfg_layout_mode =
368 {
369 {
370 RTL_PASS,
371 "into_cfglayout", /* name */
372 NULL, /* gate */
373 into_cfg_layout_mode, /* execute */
374 NULL, /* sub */
375 NULL, /* next */
376 0, /* static_pass_number */
377 TV_CFG, /* tv_id */
378 0, /* properties_required */
379 PROP_cfglayout, /* properties_provided */
380 0, /* properties_destroyed */
381 0, /* todo_flags_start */
382 0 /* todo_flags_finish */
383 }
384 };
385
386 struct rtl_opt_pass pass_outof_cfg_layout_mode =
387 {
388 {
389 RTL_PASS,
390 "outof_cfglayout", /* name */
391 NULL, /* gate */
392 outof_cfg_layout_mode, /* execute */
393 NULL, /* sub */
394 NULL, /* next */
395 0, /* static_pass_number */
396 TV_CFG, /* tv_id */
397 0, /* properties_required */
398 0, /* properties_provided */
399 PROP_cfglayout, /* properties_destroyed */
400 0, /* todo_flags_start */
401 0 /* todo_flags_finish */
402 }
403 };
404 \f
405 /* Return scope resulting from combination of S1 and S2. */
406 static tree
407 choose_inner_scope (tree s1, tree s2)
408 {
409 if (!s1)
410 return s2;
411 if (!s2)
412 return s1;
413 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
414 return s1;
415 return s2;
416 }
417 \f
418 /* Emit lexical block notes needed to change scope from S1 to S2. */
419
420 static void
421 change_scope (rtx orig_insn, tree s1, tree s2)
422 {
423 rtx insn = orig_insn;
424 tree com = NULL_TREE;
425 tree ts1 = s1, ts2 = s2;
426 tree s;
427
428 while (ts1 != ts2)
429 {
430 gcc_assert (ts1 && ts2);
431 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
432 ts1 = BLOCK_SUPERCONTEXT (ts1);
433 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
434 ts2 = BLOCK_SUPERCONTEXT (ts2);
435 else
436 {
437 ts1 = BLOCK_SUPERCONTEXT (ts1);
438 ts2 = BLOCK_SUPERCONTEXT (ts2);
439 }
440 }
441 com = ts1;
442
443 /* Close scopes. */
444 s = s1;
445 while (s != com)
446 {
447 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
448 NOTE_BLOCK (note) = s;
449 s = BLOCK_SUPERCONTEXT (s);
450 }
451
452 /* Open scopes. */
453 s = s2;
454 while (s != com)
455 {
456 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
457 NOTE_BLOCK (insn) = s;
458 s = BLOCK_SUPERCONTEXT (s);
459 }
460 }
461
462 /* Return lexical scope block locator belongs to. */
463 static tree
464 locator_scope (int loc)
465 {
466 int max = VEC_length (int, block_locators_locs);
467 int min = 0;
468
469 /* When block_locators_locs was initialized, the pro- and epilogue
470 insns didn't exist yet and can therefore not be found this way.
471 But we know that they belong to the outer most block of the
472 current function.
473 Without this test, the prologue would be put inside the block of
474 the first valid instruction in the function and when that first
475 insn is part of an inlined function then the low_pc of that
476 inlined function is messed up. Likewise for the epilogue and
477 the last valid instruction. */
478 if (loc == prologue_locator || loc == epilogue_locator)
479 return DECL_INITIAL (cfun->decl);
480
481 if (!max || !loc)
482 return NULL;
483 while (1)
484 {
485 int pos = (min + max) / 2;
486 int tmp = VEC_index (int, block_locators_locs, pos);
487
488 if (tmp <= loc && min != pos)
489 min = pos;
490 else if (tmp > loc && max != pos)
491 max = pos;
492 else
493 {
494 min = pos;
495 break;
496 }
497 }
498 return VEC_index (tree, block_locators_blocks, min);
499 }
500
501 /* Return lexical scope block insn belongs to. */
502 tree
503 insn_scope (const_rtx insn)
504 {
505 return locator_scope (INSN_LOCATOR (insn));
506 }
507
508 /* Return line number of the statement specified by the locator. */
509 location_t
510 locator_location (int loc)
511 {
512 int max = VEC_length (int, locations_locators_locs);
513 int min = 0;
514
515 while (1)
516 {
517 int pos = (min + max) / 2;
518 int tmp = VEC_index (int, locations_locators_locs, pos);
519
520 if (tmp <= loc && min != pos)
521 min = pos;
522 else if (tmp > loc && max != pos)
523 max = pos;
524 else
525 {
526 min = pos;
527 break;
528 }
529 }
530 return *VEC_index (location_t, locations_locators_vals, min);
531 }
532
533 /* Return source line of the statement that produced this insn. */
534 int
535 locator_line (int loc)
536 {
537 expanded_location xloc;
538 if (!loc)
539 return 0;
540 else
541 xloc = expand_location (locator_location (loc));
542 return xloc.line;
543 }
544
545 /* Return line number of the statement that produced this insn. */
546 int
547 insn_line (const_rtx insn)
548 {
549 return locator_line (INSN_LOCATOR (insn));
550 }
551
552 /* Return source file of the statement specified by LOC. */
553 const char *
554 locator_file (int loc)
555 {
556 expanded_location xloc;
557 if (!loc)
558 return 0;
559 else
560 xloc = expand_location (locator_location (loc));
561 return xloc.file;
562 }
563
564 /* Return source file of the statement that produced this insn. */
565 const char *
566 insn_file (const_rtx insn)
567 {
568 return locator_file (INSN_LOCATOR (insn));
569 }
570
571 /* Return true if LOC1 and LOC2 locators have the same location and scope. */
572 bool
573 locator_eq (int loc1, int loc2)
574 {
575 if (loc1 == loc2)
576 return true;
577 if (locator_location (loc1) != locator_location (loc2))
578 return false;
579 return locator_scope (loc1) == locator_scope (loc2);
580 }
581
582 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
583 on the scope tree and the newly reordered instructions. */
584
585 void
586 reemit_insn_block_notes (void)
587 {
588 tree cur_block = DECL_INITIAL (cfun->decl);
589 rtx insn, note;
590
591 insn = get_insns ();
592 if (!active_insn_p (insn))
593 insn = next_active_insn (insn);
594 for (; insn; insn = next_active_insn (insn))
595 {
596 tree this_block;
597
598 /* Avoid putting scope notes between jump table and its label. */
599 if (JUMP_TABLE_DATA_P (insn))
600 continue;
601
602 this_block = insn_scope (insn);
603 /* For sequences compute scope resulting from merging all scopes
604 of instructions nested inside. */
605 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
606 {
607 int i;
608 rtx body = PATTERN (insn);
609
610 this_block = NULL;
611 for (i = 0; i < XVECLEN (body, 0); i++)
612 this_block = choose_inner_scope (this_block,
613 insn_scope (XVECEXP (body, 0, i)));
614 }
615 if (! this_block)
616 continue;
617
618 if (this_block != cur_block)
619 {
620 change_scope (insn, cur_block, this_block);
621 cur_block = this_block;
622 }
623 }
624
625 /* change_scope emits before the insn, not after. */
626 note = emit_note (NOTE_INSN_DELETED);
627 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
628 delete_insn (note);
629
630 reorder_blocks ();
631 }
632 \f
633
634 /* Link the basic blocks in the correct order, compacting the basic
635 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
636 function also clears the basic block header and footer fields.
637
638 This function is usually called after a pass (e.g. tracer) finishes
639 some transformations while in cfglayout mode. The required sequence
640 of the basic blocks is in a linked list along the bb->aux field.
641 This functions re-links the basic block prev_bb and next_bb pointers
642 accordingly, and it compacts and renumbers the blocks. */
643
644 void
645 relink_block_chain (bool stay_in_cfglayout_mode)
646 {
647 basic_block bb, prev_bb;
648 int index;
649
650 /* Maybe dump the re-ordered sequence. */
651 if (dump_file)
652 {
653 fprintf (dump_file, "Reordered sequence:\n");
654 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
655 bb;
656 bb = (basic_block) bb->aux, index++)
657 {
658 fprintf (dump_file, " %i ", index);
659 if (get_bb_original (bb))
660 fprintf (dump_file, "duplicate of %i ",
661 get_bb_original (bb)->index);
662 else if (forwarder_block_p (bb)
663 && !LABEL_P (BB_HEAD (bb)))
664 fprintf (dump_file, "compensation ");
665 else
666 fprintf (dump_file, "bb %i ", bb->index);
667 fprintf (dump_file, " [%i]\n", bb->frequency);
668 }
669 }
670
671 /* Now reorder the blocks. */
672 prev_bb = ENTRY_BLOCK_PTR;
673 bb = ENTRY_BLOCK_PTR->next_bb;
674 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
675 {
676 bb->prev_bb = prev_bb;
677 prev_bb->next_bb = bb;
678 }
679 prev_bb->next_bb = EXIT_BLOCK_PTR;
680 EXIT_BLOCK_PTR->prev_bb = prev_bb;
681
682 /* Then, clean up the aux fields. */
683 FOR_ALL_BB (bb)
684 {
685 bb->aux = NULL;
686 if (!stay_in_cfglayout_mode)
687 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
688 }
689
690 /* Maybe reset the original copy tables, they are not valid anymore
691 when we renumber the basic blocks in compact_blocks. If we are
692 are going out of cfglayout mode, don't re-allocate the tables. */
693 free_original_copy_tables ();
694 if (stay_in_cfglayout_mode)
695 initialize_original_copy_tables ();
696
697 /* Finally, put basic_block_info in the new order. */
698 compact_blocks ();
699 }
700 \f
701
702 /* Given a reorder chain, rearrange the code to match. */
703
704 static void
705 fixup_reorder_chain (void)
706 {
707 basic_block bb;
708 rtx insn = NULL;
709
710 if (cfg_layout_function_header)
711 {
712 set_first_insn (cfg_layout_function_header);
713 insn = cfg_layout_function_header;
714 while (NEXT_INSN (insn))
715 insn = NEXT_INSN (insn);
716 }
717
718 /* First do the bulk reordering -- rechain the blocks without regard to
719 the needed changes to jumps and labels. */
720
721 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
722 {
723 if (BB_HEADER (bb))
724 {
725 if (insn)
726 NEXT_INSN (insn) = BB_HEADER (bb);
727 else
728 set_first_insn (BB_HEADER (bb));
729 PREV_INSN (BB_HEADER (bb)) = insn;
730 insn = BB_HEADER (bb);
731 while (NEXT_INSN (insn))
732 insn = NEXT_INSN (insn);
733 }
734 if (insn)
735 NEXT_INSN (insn) = BB_HEAD (bb);
736 else
737 set_first_insn (BB_HEAD (bb));
738 PREV_INSN (BB_HEAD (bb)) = insn;
739 insn = BB_END (bb);
740 if (BB_FOOTER (bb))
741 {
742 NEXT_INSN (insn) = BB_FOOTER (bb);
743 PREV_INSN (BB_FOOTER (bb)) = insn;
744 while (NEXT_INSN (insn))
745 insn = NEXT_INSN (insn);
746 }
747 }
748
749 NEXT_INSN (insn) = cfg_layout_function_footer;
750 if (cfg_layout_function_footer)
751 PREV_INSN (cfg_layout_function_footer) = insn;
752
753 while (NEXT_INSN (insn))
754 insn = NEXT_INSN (insn);
755
756 set_last_insn (insn);
757 #ifdef ENABLE_CHECKING
758 verify_insn_chain ();
759 #endif
760
761 /* Now add jumps and labels as needed to match the blocks new
762 outgoing edges. */
763
764 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
765 {
766 edge e_fall, e_taken, e;
767 rtx bb_end_insn;
768 rtx ret_label = NULL_RTX;
769 basic_block nb, src_bb;
770 edge_iterator ei;
771
772 if (EDGE_COUNT (bb->succs) == 0)
773 continue;
774
775 /* Find the old fallthru edge, and another non-EH edge for
776 a taken jump. */
777 e_taken = e_fall = NULL;
778
779 FOR_EACH_EDGE (e, ei, bb->succs)
780 if (e->flags & EDGE_FALLTHRU)
781 e_fall = e;
782 else if (! (e->flags & EDGE_EH))
783 e_taken = e;
784
785 bb_end_insn = BB_END (bb);
786 if (JUMP_P (bb_end_insn))
787 {
788 ret_label = JUMP_LABEL (bb_end_insn);
789 if (any_condjump_p (bb_end_insn))
790 {
791 /* This might happen if the conditional jump has side
792 effects and could therefore not be optimized away.
793 Make the basic block to end with a barrier in order
794 to prevent rtl_verify_flow_info from complaining. */
795 if (!e_fall)
796 {
797 gcc_assert (!onlyjump_p (bb_end_insn)
798 || returnjump_p (bb_end_insn));
799 BB_FOOTER (bb) = emit_barrier_after (bb_end_insn);
800 continue;
801 }
802
803 /* If the old fallthru is still next, nothing to do. */
804 if (bb->aux == e_fall->dest
805 || e_fall->dest == EXIT_BLOCK_PTR)
806 continue;
807
808 /* The degenerated case of conditional jump jumping to the next
809 instruction can happen for jumps with side effects. We need
810 to construct a forwarder block and this will be done just
811 fine by force_nonfallthru below. */
812 if (!e_taken)
813 ;
814
815 /* There is another special case: if *neither* block is next,
816 such as happens at the very end of a function, then we'll
817 need to add a new unconditional jump. Choose the taken
818 edge based on known or assumed probability. */
819 else if (bb->aux != e_taken->dest)
820 {
821 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
822
823 if (note
824 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
825 && invert_jump (bb_end_insn,
826 (e_fall->dest == EXIT_BLOCK_PTR
827 ? NULL_RTX
828 : label_for_bb (e_fall->dest)), 0))
829 {
830 e_fall->flags &= ~EDGE_FALLTHRU;
831 gcc_checking_assert (could_fall_through
832 (e_taken->src, e_taken->dest));
833 e_taken->flags |= EDGE_FALLTHRU;
834 update_br_prob_note (bb);
835 e = e_fall, e_fall = e_taken, e_taken = e;
836 }
837 }
838
839 /* If the "jumping" edge is a crossing edge, and the fall
840 through edge is non-crossing, leave things as they are. */
841 else if ((e_taken->flags & EDGE_CROSSING)
842 && !(e_fall->flags & EDGE_CROSSING))
843 continue;
844
845 /* Otherwise we can try to invert the jump. This will
846 basically never fail, however, keep up the pretense. */
847 else if (invert_jump (bb_end_insn,
848 (e_fall->dest == EXIT_BLOCK_PTR
849 ? NULL_RTX
850 : label_for_bb (e_fall->dest)), 0))
851 {
852 e_fall->flags &= ~EDGE_FALLTHRU;
853 gcc_checking_assert (could_fall_through
854 (e_taken->src, e_taken->dest));
855 e_taken->flags |= EDGE_FALLTHRU;
856 update_br_prob_note (bb);
857 if (LABEL_NUSES (ret_label) == 0
858 && single_pred_p (e_taken->dest))
859 delete_insn (ret_label);
860 continue;
861 }
862 }
863 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
864 {
865 /* If the old fallthru is still next or if
866 asm goto doesn't have a fallthru (e.g. when followed by
867 __builtin_unreachable ()), nothing to do. */
868 if (! e_fall
869 || bb->aux == e_fall->dest
870 || e_fall->dest == EXIT_BLOCK_PTR)
871 continue;
872
873 /* Otherwise we'll have to use the fallthru fixup below. */
874 }
875 else
876 {
877 /* Otherwise we have some return, switch or computed
878 jump. In the 99% case, there should not have been a
879 fallthru edge. */
880 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
881 continue;
882 }
883 }
884 else
885 {
886 /* No fallthru implies a noreturn function with EH edges, or
887 something similarly bizarre. In any case, we don't need to
888 do anything. */
889 if (! e_fall)
890 continue;
891
892 /* If the fallthru block is still next, nothing to do. */
893 if (bb->aux == e_fall->dest)
894 continue;
895
896 /* A fallthru to exit block. */
897 if (e_fall->dest == EXIT_BLOCK_PTR)
898 continue;
899 }
900
901 /* We got here if we need to add a new jump insn.
902 Note force_nonfallthru can delete E_FALL and thus we have to
903 save E_FALL->src prior to the call to force_nonfallthru. */
904 src_bb = e_fall->src;
905 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
906 if (nb)
907 {
908 nb->aux = bb->aux;
909 bb->aux = nb;
910 /* Don't process this new block. */
911 bb = nb;
912
913 /* Make sure new bb is tagged for correct section (same as
914 fall-thru source, since you cannot fall-thru across
915 section boundaries). */
916 BB_COPY_PARTITION (src_bb, single_pred (bb));
917 if (flag_reorder_blocks_and_partition
918 && targetm_common.have_named_sections
919 && JUMP_P (BB_END (bb))
920 && !any_condjump_p (BB_END (bb))
921 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
922 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
923 }
924 }
925
926 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
927
928 /* Annoying special case - jump around dead jumptables left in the code. */
929 FOR_EACH_BB (bb)
930 {
931 edge e = find_fallthru_edge (bb->succs);
932
933 if (e && !can_fallthru (e->src, e->dest))
934 force_nonfallthru (e);
935 }
936
937 /* Ensure goto_locus from edges has some instructions with that locus
938 in RTL. */
939 if (!optimize)
940 FOR_EACH_BB (bb)
941 {
942 edge e;
943 edge_iterator ei;
944
945 FOR_EACH_EDGE (e, ei, bb->succs)
946 if (e->goto_locus && !(e->flags & EDGE_ABNORMAL))
947 {
948 edge e2;
949 edge_iterator ei2;
950 basic_block dest, nb;
951 rtx end;
952
953 insn = BB_END (e->src);
954 end = PREV_INSN (BB_HEAD (e->src));
955 while (insn != end
956 && (!NONDEBUG_INSN_P (insn) || INSN_LOCATOR (insn) == 0))
957 insn = PREV_INSN (insn);
958 if (insn != end
959 && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus))
960 continue;
961 if (simplejump_p (BB_END (e->src))
962 && INSN_LOCATOR (BB_END (e->src)) == 0)
963 {
964 INSN_LOCATOR (BB_END (e->src)) = e->goto_locus;
965 continue;
966 }
967 dest = e->dest;
968 if (dest == EXIT_BLOCK_PTR)
969 {
970 /* Non-fallthru edges to the exit block cannot be split. */
971 if (!(e->flags & EDGE_FALLTHRU))
972 continue;
973 }
974 else
975 {
976 insn = BB_HEAD (dest);
977 end = NEXT_INSN (BB_END (dest));
978 while (insn != end && !NONDEBUG_INSN_P (insn))
979 insn = NEXT_INSN (insn);
980 if (insn != end && INSN_LOCATOR (insn)
981 && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus))
982 continue;
983 }
984 nb = split_edge (e);
985 if (!INSN_P (BB_END (nb)))
986 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
987 nb);
988 INSN_LOCATOR (BB_END (nb)) = e->goto_locus;
989
990 /* If there are other incoming edges to the destination block
991 with the same goto locus, redirect them to the new block as
992 well, this can prevent other such blocks from being created
993 in subsequent iterations of the loop. */
994 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
995 if (e2->goto_locus
996 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
997 && locator_eq (e->goto_locus, e2->goto_locus))
998 redirect_edge_and_branch (e2, nb);
999 else
1000 ei_next (&ei2);
1001 }
1002 }
1003 }
1004 \f
1005 /* Perform sanity checks on the insn chain.
1006 1. Check that next/prev pointers are consistent in both the forward and
1007 reverse direction.
1008 2. Count insns in chain, going both directions, and check if equal.
1009 3. Check that get_last_insn () returns the actual end of chain. */
1010
1011 DEBUG_FUNCTION void
1012 verify_insn_chain (void)
1013 {
1014 rtx x, prevx, nextx;
1015 int insn_cnt1, insn_cnt2;
1016
1017 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
1018 x != 0;
1019 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
1020 gcc_assert (PREV_INSN (x) == prevx);
1021
1022 gcc_assert (prevx == get_last_insn ());
1023
1024 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
1025 x != 0;
1026 nextx = x, insn_cnt2++, x = PREV_INSN (x))
1027 gcc_assert (NEXT_INSN (x) == nextx);
1028
1029 gcc_assert (insn_cnt1 == insn_cnt2);
1030 }
1031 \f
1032 /* If we have assembler epilogues, the block falling through to exit must
1033 be the last one in the reordered chain when we reach final. Ensure
1034 that this condition is met. */
1035 static void
1036 fixup_fallthru_exit_predecessor (void)
1037 {
1038 edge e;
1039 basic_block bb = NULL;
1040
1041 /* This transformation is not valid before reload, because we might
1042 separate a call from the instruction that copies the return
1043 value. */
1044 gcc_assert (reload_completed);
1045
1046 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
1047 if (e)
1048 bb = e->src;
1049
1050 if (bb && bb->aux)
1051 {
1052 basic_block c = ENTRY_BLOCK_PTR->next_bb;
1053
1054 /* If the very first block is the one with the fall-through exit
1055 edge, we have to split that block. */
1056 if (c == bb)
1057 {
1058 bb = split_block (bb, NULL)->dest;
1059 bb->aux = c->aux;
1060 c->aux = bb;
1061 BB_FOOTER (bb) = BB_FOOTER (c);
1062 BB_FOOTER (c) = NULL;
1063 }
1064
1065 while (c->aux != bb)
1066 c = (basic_block) c->aux;
1067
1068 c->aux = bb->aux;
1069 while (c->aux)
1070 c = (basic_block) c->aux;
1071
1072 c->aux = bb;
1073 bb->aux = NULL;
1074 }
1075 }
1076
1077 /* In case there are more than one fallthru predecessors of exit, force that
1078 there is only one. */
1079
1080 static void
1081 force_one_exit_fallthru (void)
1082 {
1083 edge e, predecessor = NULL;
1084 bool more = false;
1085 edge_iterator ei;
1086 basic_block forwarder, bb;
1087
1088 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1089 if (e->flags & EDGE_FALLTHRU)
1090 {
1091 if (predecessor == NULL)
1092 predecessor = e;
1093 else
1094 {
1095 more = true;
1096 break;
1097 }
1098 }
1099
1100 if (!more)
1101 return;
1102
1103 /* Exit has several fallthru predecessors. Create a forwarder block for
1104 them. */
1105 forwarder = split_edge (predecessor);
1106 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
1107 {
1108 if (e->src == forwarder
1109 || !(e->flags & EDGE_FALLTHRU))
1110 ei_next (&ei);
1111 else
1112 redirect_edge_and_branch_force (e, forwarder);
1113 }
1114
1115 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
1116 exit block. */
1117 FOR_EACH_BB (bb)
1118 {
1119 if (bb->aux == NULL && bb != forwarder)
1120 {
1121 bb->aux = forwarder;
1122 break;
1123 }
1124 }
1125 }
1126 \f
1127 /* Return true in case it is possible to duplicate the basic block BB. */
1128
1129 /* We do not want to declare the function in a header file, since it should
1130 only be used through the cfghooks interface, and we do not want to move
1131 it to cfgrtl.c since it would require also moving quite a lot of related
1132 code. */
1133 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
1134
1135 bool
1136 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
1137 {
1138 /* Do not attempt to duplicate tablejumps, as we need to unshare
1139 the dispatch table. This is difficult to do, as the instructions
1140 computing jump destination may be hoisted outside the basic block. */
1141 if (tablejump_p (BB_END (bb), NULL, NULL))
1142 return false;
1143
1144 /* Do not duplicate blocks containing insns that can't be copied. */
1145 if (targetm.cannot_copy_insn_p)
1146 {
1147 rtx insn = BB_HEAD (bb);
1148 while (1)
1149 {
1150 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
1151 return false;
1152 if (insn == BB_END (bb))
1153 break;
1154 insn = NEXT_INSN (insn);
1155 }
1156 }
1157
1158 return true;
1159 }
1160
1161 rtx
1162 duplicate_insn_chain (rtx from, rtx to)
1163 {
1164 rtx insn, last, copy;
1165
1166 /* Avoid updating of boundaries of previous basic block. The
1167 note will get removed from insn stream in fixup. */
1168 last = emit_note (NOTE_INSN_DELETED);
1169
1170 /* Create copy at the end of INSN chain. The chain will
1171 be reordered later. */
1172 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
1173 {
1174 switch (GET_CODE (insn))
1175 {
1176 case DEBUG_INSN:
1177 /* Don't duplicate label debug insns. */
1178 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
1179 break;
1180 /* FALLTHRU */
1181 case INSN:
1182 case CALL_INSN:
1183 case JUMP_INSN:
1184 /* Avoid copying of dispatch tables. We never duplicate
1185 tablejumps, so this can hit only in case the table got
1186 moved far from original jump. */
1187 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
1188 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1189 {
1190 /* Avoid copying following barrier as well if any
1191 (and debug insns in between). */
1192 rtx next;
1193
1194 for (next = NEXT_INSN (insn);
1195 next != NEXT_INSN (to);
1196 next = NEXT_INSN (next))
1197 if (!DEBUG_INSN_P (next))
1198 break;
1199 if (next != NEXT_INSN (to) && BARRIER_P (next))
1200 insn = next;
1201 break;
1202 }
1203 copy = emit_copy_of_insn_after (insn, get_last_insn ());
1204 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
1205 && ANY_RETURN_P (JUMP_LABEL (insn)))
1206 JUMP_LABEL (copy) = JUMP_LABEL (insn);
1207 maybe_copy_prologue_epilogue_insn (insn, copy);
1208 break;
1209
1210 case CODE_LABEL:
1211 break;
1212
1213 case BARRIER:
1214 emit_barrier ();
1215 break;
1216
1217 case NOTE:
1218 switch (NOTE_KIND (insn))
1219 {
1220 /* In case prologue is empty and function contain label
1221 in first BB, we may want to copy the block. */
1222 case NOTE_INSN_PROLOGUE_END:
1223
1224 case NOTE_INSN_DELETED:
1225 case NOTE_INSN_DELETED_LABEL:
1226 case NOTE_INSN_DELETED_DEBUG_LABEL:
1227 /* No problem to strip these. */
1228 case NOTE_INSN_FUNCTION_BEG:
1229 /* There is always just single entry to function. */
1230 case NOTE_INSN_BASIC_BLOCK:
1231 break;
1232
1233 case NOTE_INSN_EPILOGUE_BEG:
1234 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1235 emit_note_copy (insn);
1236 break;
1237
1238 default:
1239 /* All other notes should have already been eliminated. */
1240 gcc_unreachable ();
1241 }
1242 break;
1243 default:
1244 gcc_unreachable ();
1245 }
1246 }
1247 insn = NEXT_INSN (last);
1248 delete_insn (last);
1249 return insn;
1250 }
1251 /* Create a duplicate of the basic block BB. */
1252
1253 /* We do not want to declare the function in a header file, since it should
1254 only be used through the cfghooks interface, and we do not want to move
1255 it to cfgrtl.c since it would require also moving quite a lot of related
1256 code. */
1257 extern basic_block cfg_layout_duplicate_bb (basic_block);
1258
1259 basic_block
1260 cfg_layout_duplicate_bb (basic_block bb)
1261 {
1262 rtx insn;
1263 basic_block new_bb;
1264
1265 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1266 new_bb = create_basic_block (insn,
1267 insn ? get_last_insn () : NULL,
1268 EXIT_BLOCK_PTR->prev_bb);
1269
1270 BB_COPY_PARTITION (new_bb, bb);
1271 if (BB_HEADER (bb))
1272 {
1273 insn = BB_HEADER (bb);
1274 while (NEXT_INSN (insn))
1275 insn = NEXT_INSN (insn);
1276 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
1277 if (insn)
1278 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
1279 }
1280
1281 if (BB_FOOTER (bb))
1282 {
1283 insn = BB_FOOTER (bb);
1284 while (NEXT_INSN (insn))
1285 insn = NEXT_INSN (insn);
1286 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
1287 if (insn)
1288 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
1289 }
1290
1291 return new_bb;
1292 }
1293
1294 \f
1295 /* Main entry point to this module - initialize the datastructures for
1296 CFG layout changes. It keeps LOOPS up-to-date if not null.
1297
1298 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
1299
1300 void
1301 cfg_layout_initialize (unsigned int flags)
1302 {
1303 rtx x;
1304 basic_block bb;
1305
1306 initialize_original_copy_tables ();
1307
1308 cfg_layout_rtl_register_cfg_hooks ();
1309
1310 record_effective_endpoints ();
1311
1312 /* Make sure that the targets of non local gotos are marked. */
1313 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1314 {
1315 bb = BLOCK_FOR_INSN (XEXP (x, 0));
1316 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
1317 }
1318
1319 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1320 }
1321
1322 /* Splits superblocks. */
1323 void
1324 break_superblocks (void)
1325 {
1326 sbitmap superblocks;
1327 bool need = false;
1328 basic_block bb;
1329
1330 superblocks = sbitmap_alloc (last_basic_block);
1331 sbitmap_zero (superblocks);
1332
1333 FOR_EACH_BB (bb)
1334 if (bb->flags & BB_SUPERBLOCK)
1335 {
1336 bb->flags &= ~BB_SUPERBLOCK;
1337 SET_BIT (superblocks, bb->index);
1338 need = true;
1339 }
1340
1341 if (need)
1342 {
1343 rebuild_jump_labels (get_insns ());
1344 find_many_sub_basic_blocks (superblocks);
1345 }
1346
1347 free (superblocks);
1348 }
1349
1350 /* Finalize the changes: reorder insn list according to the sequence specified
1351 by aux pointers, enter compensation code, rebuild scope forest. */
1352
1353 void
1354 cfg_layout_finalize (void)
1355 {
1356 #ifdef ENABLE_CHECKING
1357 verify_flow_info ();
1358 #endif
1359 force_one_exit_fallthru ();
1360 rtl_register_cfg_hooks ();
1361 if (reload_completed
1362 #ifdef HAVE_epilogue
1363 && !HAVE_epilogue
1364 #endif
1365 )
1366 fixup_fallthru_exit_predecessor ();
1367 fixup_reorder_chain ();
1368
1369 rebuild_jump_labels (get_insns ());
1370 delete_dead_jumptables ();
1371
1372 #ifdef ENABLE_CHECKING
1373 verify_insn_chain ();
1374 verify_flow_info ();
1375 #endif
1376 }
1377
1378 /* Checks whether all N blocks in BBS array can be copied. */
1379 bool
1380 can_copy_bbs_p (basic_block *bbs, unsigned n)
1381 {
1382 unsigned i;
1383 edge e;
1384 int ret = true;
1385
1386 for (i = 0; i < n; i++)
1387 bbs[i]->flags |= BB_DUPLICATED;
1388
1389 for (i = 0; i < n; i++)
1390 {
1391 /* In case we should redirect abnormal edge during duplication, fail. */
1392 edge_iterator ei;
1393 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1394 if ((e->flags & EDGE_ABNORMAL)
1395 && (e->dest->flags & BB_DUPLICATED))
1396 {
1397 ret = false;
1398 goto end;
1399 }
1400
1401 if (!can_duplicate_block_p (bbs[i]))
1402 {
1403 ret = false;
1404 break;
1405 }
1406 }
1407
1408 end:
1409 for (i = 0; i < n; i++)
1410 bbs[i]->flags &= ~BB_DUPLICATED;
1411
1412 return ret;
1413 }
1414
1415 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1416 are placed into array NEW_BBS in the same order. Edges from basic blocks
1417 in BBS are also duplicated and copies of those of them
1418 that lead into BBS are redirected to appropriate newly created block. The
1419 function assigns bbs into loops (copy of basic block bb is assigned to
1420 bb->loop_father->copy loop, so this must be set up correctly in advance)
1421 and updates dominators locally (LOOPS structure that contains the information
1422 about dominators is passed to enable this).
1423
1424 BASE is the superloop to that basic block belongs; if its header or latch
1425 is copied, we do not set the new blocks as header or latch.
1426
1427 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1428 also in the same order.
1429
1430 Newly created basic blocks are put after the basic block AFTER in the
1431 instruction stream, and the order of the blocks in BBS array is preserved. */
1432
1433 void
1434 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1435 edge *edges, unsigned num_edges, edge *new_edges,
1436 struct loop *base, basic_block after)
1437 {
1438 unsigned i, j;
1439 basic_block bb, new_bb, dom_bb;
1440 edge e;
1441
1442 /* Duplicate bbs, update dominators, assign bbs to loops. */
1443 for (i = 0; i < n; i++)
1444 {
1445 /* Duplicate. */
1446 bb = bbs[i];
1447 new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1448 after = new_bb;
1449 bb->flags |= BB_DUPLICATED;
1450 /* Possibly set loop header. */
1451 if (bb->loop_father->header == bb && bb->loop_father != base)
1452 new_bb->loop_father->header = new_bb;
1453 /* Or latch. */
1454 if (bb->loop_father->latch == bb && bb->loop_father != base)
1455 new_bb->loop_father->latch = new_bb;
1456 }
1457
1458 /* Set dominators. */
1459 for (i = 0; i < n; i++)
1460 {
1461 bb = bbs[i];
1462 new_bb = new_bbs[i];
1463
1464 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1465 if (dom_bb->flags & BB_DUPLICATED)
1466 {
1467 dom_bb = get_bb_copy (dom_bb);
1468 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1469 }
1470 }
1471
1472 /* Redirect edges. */
1473 for (j = 0; j < num_edges; j++)
1474 new_edges[j] = NULL;
1475 for (i = 0; i < n; i++)
1476 {
1477 edge_iterator ei;
1478 new_bb = new_bbs[i];
1479 bb = bbs[i];
1480
1481 FOR_EACH_EDGE (e, ei, new_bb->succs)
1482 {
1483 for (j = 0; j < num_edges; j++)
1484 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1485 new_edges[j] = e;
1486
1487 if (!(e->dest->flags & BB_DUPLICATED))
1488 continue;
1489 redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1490 }
1491 }
1492
1493 /* Clear information about duplicates. */
1494 for (i = 0; i < n; i++)
1495 bbs[i]->flags &= ~BB_DUPLICATED;
1496 }
1497
1498 #include "gt-cfglayout.h"