]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cfgbuild.c
trans-array.c (gfc_conv_descriptor_data_get): Rename from gfc_conv_descriptor_data.
[thirdparty/gcc.git] / gcc / cfgbuild.c
1 /* Control flow graph building code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* find_basic_blocks divides the current function's rtl into basic
23 blocks and constructs the CFG. The blocks are recorded in the
24 basic_block_info array; the CFG exists in the edge structures
25 referenced by the blocks.
26
27 find_basic_blocks also finds any unreachable loops and deletes them.
28
29 Available functionality:
30 - CFG construction
31 find_basic_blocks */
32 \f
33 #include "config.h"
34 #include "system.h"
35 #include "coretypes.h"
36 #include "tm.h"
37 #include "tree.h"
38 #include "rtl.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "regs.h"
42 #include "flags.h"
43 #include "output.h"
44 #include "function.h"
45 #include "except.h"
46 #include "toplev.h"
47 #include "timevar.h"
48
49 static int count_basic_blocks (rtx);
50 static void find_basic_blocks_1 (rtx);
51 static void make_edges (basic_block, basic_block, int);
52 static void make_label_edge (sbitmap, basic_block, rtx, int);
53 static void find_bb_boundaries (basic_block);
54 static void compute_outgoing_frequencies (basic_block);
55 \f
56 /* Return true if insn is something that should be contained inside basic
57 block. */
58
59 bool
60 inside_basic_block_p (rtx insn)
61 {
62 switch (GET_CODE (insn))
63 {
64 case CODE_LABEL:
65 /* Avoid creating of basic block for jumptables. */
66 return (NEXT_INSN (insn) == 0
67 || !JUMP_P (NEXT_INSN (insn))
68 || (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC
69 && GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC));
70
71 case JUMP_INSN:
72 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
73 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
74
75 case CALL_INSN:
76 case INSN:
77 return true;
78
79 case BARRIER:
80 case NOTE:
81 return false;
82
83 default:
84 gcc_unreachable ();
85 }
86 }
87
88 /* Return true if INSN may cause control flow transfer, so it should be last in
89 the basic block. */
90
91 bool
92 control_flow_insn_p (rtx insn)
93 {
94 rtx note;
95
96 switch (GET_CODE (insn))
97 {
98 case NOTE:
99 case CODE_LABEL:
100 return false;
101
102 case JUMP_INSN:
103 /* Jump insn always causes control transfer except for tablejumps. */
104 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
105 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
106
107 case CALL_INSN:
108 /* Noreturn and sibling call instructions terminate the basic blocks
109 (but only if they happen unconditionally). */
110 if ((SIBLING_CALL_P (insn)
111 || find_reg_note (insn, REG_NORETURN, 0))
112 && GET_CODE (PATTERN (insn)) != COND_EXEC)
113 return true;
114 /* Call insn may return to the nonlocal goto handler. */
115 return ((nonlocal_goto_handler_labels
116 && (0 == (note = find_reg_note (insn, REG_EH_REGION,
117 NULL_RTX))
118 || INTVAL (XEXP (note, 0)) >= 0))
119 /* Or may trap. */
120 || can_throw_internal (insn));
121
122 case INSN:
123 return (flag_non_call_exceptions && can_throw_internal (insn));
124
125 case BARRIER:
126 /* It is nonsense to reach barrier when looking for the
127 end of basic block, but before dead code is eliminated
128 this may happen. */
129 return false;
130
131 default:
132 gcc_unreachable ();
133 }
134 }
135
136 /* Count the basic blocks of the function. */
137
138 static int
139 count_basic_blocks (rtx f)
140 {
141 int count = 0;
142 bool saw_insn = false;
143 rtx insn;
144
145 for (insn = f; insn; insn = NEXT_INSN (insn))
146 {
147 /* Code labels and barriers causes current basic block to be
148 terminated at previous real insn. */
149 if ((LABEL_P (insn) || BARRIER_P (insn))
150 && saw_insn)
151 count++, saw_insn = false;
152
153 /* Start basic block if needed. */
154 if (!saw_insn && inside_basic_block_p (insn))
155 saw_insn = true;
156
157 /* Control flow insn causes current basic block to be terminated. */
158 if (saw_insn && control_flow_insn_p (insn))
159 count++, saw_insn = false;
160 }
161
162 if (saw_insn)
163 count++;
164
165 /* The rest of the compiler works a bit smoother when we don't have to
166 check for the edge case of do-nothing functions with no basic blocks. */
167 if (count == 0)
168 {
169 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
170 count = 1;
171 }
172
173 return count;
174 }
175 \f
176 /* Create an edge between two basic blocks. FLAGS are auxiliary information
177 about the edge that is accumulated between calls. */
178
179 /* Create an edge from a basic block to a label. */
180
181 static void
182 make_label_edge (sbitmap edge_cache, basic_block src, rtx label, int flags)
183 {
184 gcc_assert (LABEL_P (label));
185
186 /* If the label was never emitted, this insn is junk, but avoid a
187 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
188 as a result of a syntax error and a diagnostic has already been
189 printed. */
190
191 if (INSN_UID (label) == 0)
192 return;
193
194 cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
195 }
196
197 /* Create the edges generated by INSN in REGION. */
198
199 void
200 rtl_make_eh_edge (sbitmap edge_cache, basic_block src, rtx insn)
201 {
202 int is_call = CALL_P (insn) ? EDGE_ABNORMAL_CALL : 0;
203 rtx handlers, i;
204
205 handlers = reachable_handlers (insn);
206
207 for (i = handlers; i; i = XEXP (i, 1))
208 make_label_edge (edge_cache, src, XEXP (i, 0),
209 EDGE_ABNORMAL | EDGE_EH | is_call);
210
211 free_INSN_LIST_list (&handlers);
212 }
213
214 /* States of basic block as seen by find_many_sub_basic_blocks. */
215 enum state {
216 /* Basic blocks created via split_block belong to this state.
217 make_edges will examine these basic blocks to see if we need to
218 create edges going out of them. */
219 BLOCK_NEW = 0,
220
221 /* Basic blocks that do not need examining belong to this state.
222 These blocks will be left intact. In particular, make_edges will
223 not create edges going out of these basic blocks. */
224 BLOCK_ORIGINAL,
225
226 /* Basic blocks that may need splitting (due to a label appearing in
227 the middle, etc) belong to this state. After splitting them,
228 make_edges will create edges going out of them as needed. */
229 BLOCK_TO_SPLIT
230 };
231
232 #define STATE(BB) (enum state) ((size_t) (BB)->aux)
233 #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
234
235 /* Used internally by purge_dead_tablejump_edges, ORed into state. */
236 #define BLOCK_USED_BY_TABLEJUMP 32
237 #define FULL_STATE(BB) ((size_t) (BB)->aux)
238
239 /* Identify the edges going out of basic blocks between MIN and MAX,
240 inclusive, that have their states set to BLOCK_NEW or
241 BLOCK_TO_SPLIT.
242
243 UPDATE_P should be nonzero if we are updating CFG and zero if we
244 are building CFG from scratch. */
245
246 static void
247 make_edges (basic_block min, basic_block max, int update_p)
248 {
249 basic_block bb;
250 sbitmap edge_cache = NULL;
251
252 /* Heavy use of computed goto in machine-generated code can lead to
253 nearly fully-connected CFGs. In that case we spend a significant
254 amount of time searching the edge lists for duplicates. */
255 if (forced_labels || cfun->max_jumptable_ents > 100)
256 edge_cache = sbitmap_alloc (last_basic_block);
257
258 /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
259 is always the entry. */
260 if (min == ENTRY_BLOCK_PTR->next_bb)
261 make_edge (ENTRY_BLOCK_PTR, min, EDGE_FALLTHRU);
262
263 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
264 {
265 rtx insn, x;
266 enum rtx_code code;
267 edge e;
268 edge_iterator ei;
269
270 if (STATE (bb) == BLOCK_ORIGINAL)
271 continue;
272
273 /* If we have an edge cache, cache edges going out of BB. */
274 if (edge_cache)
275 {
276 sbitmap_zero (edge_cache);
277 if (update_p)
278 {
279 FOR_EACH_EDGE (e, ei, bb->succs)
280 if (e->dest != EXIT_BLOCK_PTR)
281 SET_BIT (edge_cache, e->dest->index);
282 }
283 }
284
285 if (LABEL_P (BB_HEAD (bb))
286 && LABEL_ALT_ENTRY_P (BB_HEAD (bb)))
287 cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
288
289 /* Examine the last instruction of the block, and discover the
290 ways we can leave the block. */
291
292 insn = BB_END (bb);
293 code = GET_CODE (insn);
294
295 /* A branch. */
296 if (code == JUMP_INSN)
297 {
298 rtx tmp;
299
300 /* Recognize exception handling placeholders. */
301 if (GET_CODE (PATTERN (insn)) == RESX)
302 rtl_make_eh_edge (edge_cache, bb, insn);
303
304 /* Recognize a non-local goto as a branch outside the
305 current function. */
306 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
307 ;
308
309 /* Recognize a tablejump and do the right thing. */
310 else if (tablejump_p (insn, NULL, &tmp))
311 {
312 rtvec vec;
313 int j;
314
315 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
316 vec = XVEC (PATTERN (tmp), 0);
317 else
318 vec = XVEC (PATTERN (tmp), 1);
319
320 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
321 make_label_edge (edge_cache, bb,
322 XEXP (RTVEC_ELT (vec, j), 0), 0);
323
324 /* Some targets (eg, ARM) emit a conditional jump that also
325 contains the out-of-range target. Scan for these and
326 add an edge if necessary. */
327 if ((tmp = single_set (insn)) != NULL
328 && SET_DEST (tmp) == pc_rtx
329 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
330 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
331 make_label_edge (edge_cache, bb,
332 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
333 }
334
335 /* If this is a computed jump, then mark it as reaching
336 everything on the forced_labels list. */
337 else if (computed_jump_p (insn))
338 {
339 for (x = forced_labels; x; x = XEXP (x, 1))
340 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
341 }
342
343 /* Returns create an exit out. */
344 else if (returnjump_p (insn))
345 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
346
347 /* Otherwise, we have a plain conditional or unconditional jump. */
348 else
349 {
350 gcc_assert (JUMP_LABEL (insn));
351 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
352 }
353 }
354
355 /* If this is a sibling call insn, then this is in effect a combined call
356 and return, and so we need an edge to the exit block. No need to
357 worry about EH edges, since we wouldn't have created the sibling call
358 in the first place. */
359 if (code == CALL_INSN && SIBLING_CALL_P (insn))
360 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
361 EDGE_SIBCALL | EDGE_ABNORMAL);
362
363 /* If this is a CALL_INSN, then mark it as reaching the active EH
364 handler for this CALL_INSN. If we're handling non-call
365 exceptions then any insn can reach any of the active handlers.
366 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
367 else if (code == CALL_INSN || flag_non_call_exceptions)
368 {
369 /* Add any appropriate EH edges. */
370 rtl_make_eh_edge (edge_cache, bb, insn);
371
372 if (code == CALL_INSN && nonlocal_goto_handler_labels)
373 {
374 /* ??? This could be made smarter: in some cases it's possible
375 to tell that certain calls will not do a nonlocal goto.
376 For example, if the nested functions that do the nonlocal
377 gotos do not have their addresses taken, then only calls to
378 those functions or to other nested functions that use them
379 could possibly do nonlocal gotos. */
380
381 /* We do know that a REG_EH_REGION note with a value less
382 than 0 is guaranteed not to perform a non-local goto. */
383 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
384
385 if (!note || INTVAL (XEXP (note, 0)) >= 0)
386 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
387 make_label_edge (edge_cache, bb, XEXP (x, 0),
388 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
389 }
390 }
391
392 /* Find out if we can drop through to the next block. */
393 insn = NEXT_INSN (insn);
394 e = find_edge (bb, EXIT_BLOCK_PTR);
395 if (e && e->flags & EDGE_FALLTHRU)
396 insn = NULL;
397
398 while (insn
399 && NOTE_P (insn)
400 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
401 insn = NEXT_INSN (insn);
402
403 if (!insn)
404 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
405 else if (bb->next_bb != EXIT_BLOCK_PTR)
406 {
407 if (insn == BB_HEAD (bb->next_bb))
408 cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
409 }
410 }
411
412 if (edge_cache)
413 sbitmap_vector_free (edge_cache);
414 }
415 \f
416 /* Find all basic blocks of the function whose first insn is F.
417
418 Collect and return a list of labels whose addresses are taken. This
419 will be used in make_edges for use with computed gotos. */
420
421 static void
422 find_basic_blocks_1 (rtx f)
423 {
424 rtx insn, next;
425 rtx bb_note = NULL_RTX;
426 rtx head = NULL_RTX;
427 rtx end = NULL_RTX;
428 basic_block prev = ENTRY_BLOCK_PTR;
429
430 /* We process the instructions in a slightly different way than we did
431 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
432 closed out the previous block, so that it gets attached at the proper
433 place. Since this form should be equivalent to the previous,
434 count_basic_blocks continues to use the old form as a check. */
435
436 for (insn = f; insn; insn = next)
437 {
438 enum rtx_code code = GET_CODE (insn);
439
440 next = NEXT_INSN (insn);
441
442 if ((LABEL_P (insn) || BARRIER_P (insn))
443 && head)
444 {
445 prev = create_basic_block_structure (head, end, bb_note, prev);
446 head = end = NULL_RTX;
447 bb_note = NULL_RTX;
448 }
449
450 if (inside_basic_block_p (insn))
451 {
452 if (head == NULL_RTX)
453 head = insn;
454 end = insn;
455 }
456
457 if (head && control_flow_insn_p (insn))
458 {
459 prev = create_basic_block_structure (head, end, bb_note, prev);
460 head = end = NULL_RTX;
461 bb_note = NULL_RTX;
462 }
463
464 switch (code)
465 {
466 case NOTE:
467 {
468 int kind = NOTE_LINE_NUMBER (insn);
469
470 /* Look for basic block notes with which to keep the
471 basic_block_info pointers stable. Unthread the note now;
472 we'll put it back at the right place in create_basic_block.
473 Or not at all if we've already found a note in this block. */
474 if (kind == NOTE_INSN_BASIC_BLOCK)
475 {
476 if (bb_note == NULL_RTX)
477 bb_note = insn;
478 else
479 next = delete_insn (insn);
480 }
481 break;
482 }
483
484 case CODE_LABEL:
485 case JUMP_INSN:
486 case CALL_INSN:
487 case INSN:
488 case BARRIER:
489 break;
490
491 default:
492 gcc_unreachable ();
493 }
494 }
495
496 if (head != NULL_RTX)
497 create_basic_block_structure (head, end, bb_note, prev);
498 else if (bb_note)
499 delete_insn (bb_note);
500
501 gcc_assert (last_basic_block == n_basic_blocks);
502
503 clear_aux_for_blocks ();
504 }
505
506
507 /* Find basic blocks of the current function.
508 F is the first insn of the function. */
509
510 void
511 find_basic_blocks (rtx f)
512 {
513 basic_block bb;
514
515 timevar_push (TV_CFG);
516
517 /* Flush out existing data. */
518 if (basic_block_info != NULL)
519 {
520 clear_edges ();
521
522 /* Clear bb->aux on all extant basic blocks. We'll use this as a
523 tag for reuse during create_basic_block, just in case some pass
524 copies around basic block notes improperly. */
525 FOR_EACH_BB (bb)
526 bb->aux = NULL;
527
528 basic_block_info = NULL;
529 }
530
531 n_basic_blocks = count_basic_blocks (f);
532 last_basic_block = 0;
533 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
534 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
535
536 /* Size the basic block table. The actual structures will be allocated
537 by find_basic_blocks_1, since we want to keep the structure pointers
538 stable across calls to find_basic_blocks. */
539 /* ??? This whole issue would be much simpler if we called find_basic_blocks
540 exactly once, and thereafter we don't have a single long chain of
541 instructions at all until close to the end of compilation when we
542 actually lay them out. */
543
544 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
545
546 find_basic_blocks_1 (f);
547
548 profile_status = PROFILE_ABSENT;
549
550 /* Tell make_edges to examine every block for out-going edges. */
551 FOR_EACH_BB (bb)
552 SET_STATE (bb, BLOCK_NEW);
553
554 /* Discover the edges of our cfg. */
555 make_edges (ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, 0);
556
557 /* Do very simple cleanup now, for the benefit of code that runs between
558 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
559 tidy_fallthru_edges ();
560
561 #ifdef ENABLE_CHECKING
562 verify_flow_info ();
563 #endif
564 timevar_pop (TV_CFG);
565 }
566 \f
567 static void
568 mark_tablejump_edge (rtx label)
569 {
570 basic_block bb;
571
572 gcc_assert (LABEL_P (label));
573 /* See comment in make_label_edge. */
574 if (INSN_UID (label) == 0)
575 return;
576 bb = BLOCK_FOR_INSN (label);
577 SET_STATE (bb, FULL_STATE (bb) | BLOCK_USED_BY_TABLEJUMP);
578 }
579
580 static void
581 purge_dead_tablejump_edges (basic_block bb, rtx table)
582 {
583 rtx insn = BB_END (bb), tmp;
584 rtvec vec;
585 int j;
586 edge_iterator ei;
587 edge e;
588
589 if (GET_CODE (PATTERN (table)) == ADDR_VEC)
590 vec = XVEC (PATTERN (table), 0);
591 else
592 vec = XVEC (PATTERN (table), 1);
593
594 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
595 mark_tablejump_edge (XEXP (RTVEC_ELT (vec, j), 0));
596
597 /* Some targets (eg, ARM) emit a conditional jump that also
598 contains the out-of-range target. Scan for these and
599 add an edge if necessary. */
600 if ((tmp = single_set (insn)) != NULL
601 && SET_DEST (tmp) == pc_rtx
602 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
603 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
604 mark_tablejump_edge (XEXP (XEXP (SET_SRC (tmp), 2), 0));
605
606 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
607 {
608 if (FULL_STATE (e->dest) & BLOCK_USED_BY_TABLEJUMP)
609 SET_STATE (e->dest, FULL_STATE (e->dest)
610 & ~(size_t) BLOCK_USED_BY_TABLEJUMP);
611 else if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
612 {
613 remove_edge (e);
614 continue;
615 }
616 ei_next (&ei);
617 }
618 }
619
620 /* Scan basic block BB for possible BB boundaries inside the block
621 and create new basic blocks in the progress. */
622
623 static void
624 find_bb_boundaries (basic_block bb)
625 {
626 basic_block orig_bb = bb;
627 rtx insn = BB_HEAD (bb);
628 rtx end = BB_END (bb);
629 rtx table;
630 rtx flow_transfer_insn = NULL_RTX;
631 edge fallthru = NULL;
632
633 if (insn == BB_END (bb))
634 return;
635
636 if (LABEL_P (insn))
637 insn = NEXT_INSN (insn);
638
639 /* Scan insn chain and try to find new basic block boundaries. */
640 while (1)
641 {
642 enum rtx_code code = GET_CODE (insn);
643
644 /* On code label, split current basic block. */
645 if (code == CODE_LABEL)
646 {
647 fallthru = split_block (bb, PREV_INSN (insn));
648 if (flow_transfer_insn)
649 BB_END (bb) = flow_transfer_insn;
650
651 bb = fallthru->dest;
652 remove_edge (fallthru);
653 flow_transfer_insn = NULL_RTX;
654 if (LABEL_ALT_ENTRY_P (insn))
655 make_edge (ENTRY_BLOCK_PTR, bb, 0);
656 }
657
658 /* In case we've previously seen an insn that effects a control
659 flow transfer, split the block. */
660 if (flow_transfer_insn && inside_basic_block_p (insn))
661 {
662 fallthru = split_block (bb, PREV_INSN (insn));
663 BB_END (bb) = flow_transfer_insn;
664 bb = fallthru->dest;
665 remove_edge (fallthru);
666 flow_transfer_insn = NULL_RTX;
667 }
668
669 if (control_flow_insn_p (insn))
670 flow_transfer_insn = insn;
671 if (insn == end)
672 break;
673 insn = NEXT_INSN (insn);
674 }
675
676 /* In case expander replaced normal insn by sequence terminating by
677 return and barrier, or possibly other sequence not behaving like
678 ordinary jump, we need to take care and move basic block boundary. */
679 if (flow_transfer_insn)
680 BB_END (bb) = flow_transfer_insn;
681
682 /* We've possibly replaced the conditional jump by conditional jump
683 followed by cleanup at fallthru edge, so the outgoing edges may
684 be dead. */
685 purge_dead_edges (bb);
686
687 /* purge_dead_edges doesn't handle tablejump's, but if we have split the
688 basic block, we might need to kill some edges. */
689 if (bb != orig_bb && tablejump_p (BB_END (bb), NULL, &table))
690 purge_dead_tablejump_edges (bb, table);
691 }
692
693 /* Assume that frequency of basic block B is known. Compute frequencies
694 and probabilities of outgoing edges. */
695
696 static void
697 compute_outgoing_frequencies (basic_block b)
698 {
699 edge e, f;
700 edge_iterator ei;
701
702 if (EDGE_COUNT (b->succs) == 2)
703 {
704 rtx note = find_reg_note (BB_END (b), REG_BR_PROB, NULL);
705 int probability;
706
707 if (note)
708 {
709 probability = INTVAL (XEXP (note, 0));
710 e = BRANCH_EDGE (b);
711 e->probability = probability;
712 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
713 / REG_BR_PROB_BASE);
714 f = FALLTHRU_EDGE (b);
715 f->probability = REG_BR_PROB_BASE - probability;
716 f->count = b->count - e->count;
717 return;
718 }
719 }
720
721 if (single_succ_p (b))
722 {
723 e = single_succ_edge (b);
724 e->probability = REG_BR_PROB_BASE;
725 e->count = b->count;
726 return;
727 }
728 guess_outgoing_edge_probabilities (b);
729 if (b->count)
730 FOR_EACH_EDGE (e, ei, b->succs)
731 e->count = ((b->count * e->probability + REG_BR_PROB_BASE / 2)
732 / REG_BR_PROB_BASE);
733 }
734
735 /* Assume that some pass has inserted labels or control flow
736 instructions within a basic block. Split basic blocks as needed
737 and create edges. */
738
739 void
740 find_many_sub_basic_blocks (sbitmap blocks)
741 {
742 basic_block bb, min, max;
743
744 FOR_EACH_BB (bb)
745 SET_STATE (bb,
746 TEST_BIT (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
747
748 FOR_EACH_BB (bb)
749 if (STATE (bb) == BLOCK_TO_SPLIT)
750 find_bb_boundaries (bb);
751
752 FOR_EACH_BB (bb)
753 if (STATE (bb) != BLOCK_ORIGINAL)
754 break;
755
756 min = max = bb;
757 for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb)
758 if (STATE (bb) != BLOCK_ORIGINAL)
759 max = bb;
760
761 /* Now re-scan and wire in all edges. This expect simple (conditional)
762 jumps at the end of each new basic blocks. */
763 make_edges (min, max, 1);
764
765 /* Update branch probabilities. Expect only (un)conditional jumps
766 to be created with only the forward edges. */
767 if (profile_status != PROFILE_ABSENT)
768 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
769 {
770 edge e;
771 edge_iterator ei;
772
773 if (STATE (bb) == BLOCK_ORIGINAL)
774 continue;
775 if (STATE (bb) == BLOCK_NEW)
776 {
777 bb->count = 0;
778 bb->frequency = 0;
779 FOR_EACH_EDGE (e, ei, bb->preds)
780 {
781 bb->count += e->count;
782 bb->frequency += EDGE_FREQUENCY (e);
783 }
784 }
785
786 compute_outgoing_frequencies (bb);
787 }
788
789 FOR_EACH_BB (bb)
790 SET_STATE (bb, 0);
791 }