]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgbuild.c
2015-06-04 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / cfgbuild.c
CommitLineData
65f34de5 1/* Control flow graph building code for GNU compiler.
d353bf18 2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
65f34de5 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
65f34de5 9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
65f34de5 19
65f34de5 20\f
21#include "config.h"
22#include "system.h"
805e22b2 23#include "coretypes.h"
24#include "tm.h"
b20a8bb4 25#include "hash-set.h"
b20a8bb4 26#include "vec.h"
b20a8bb4 27#include "input.h"
28#include "alias.h"
29#include "symtab.h"
b20a8bb4 30#include "inchash.h"
65f34de5 31#include "tree.h"
32#include "rtl.h"
33#include "hard-reg-set.h"
94ea8568 34#include "predict.h"
a3020f2f 35#include "hashtab.h"
65f34de5 36#include "function.h"
94ea8568 37#include "dominance.h"
38#include "cfg.h"
39#include "cfgrtl.h"
40#include "cfganal.h"
41#include "cfgbuild.h"
42#include "basic-block.h"
43#include "regs.h"
44#include "flags.h"
65f34de5 45#include "except.h"
d53441c8 46#include "statistics.h"
d53441c8 47#include "insn-config.h"
48#include "expmed.h"
49#include "dojump.h"
50#include "explow.h"
51#include "calls.h"
52#include "emit-rtl.h"
53#include "varasm.h"
54#include "stmt.h"
e38def9c 55#include "expr.h"
0b205f4c 56#include "diagnostic-core.h"
65f34de5 57#include "timevar.h"
0f71a633 58#include "sbitmap.h"
e4fc8aad 59
6ab3afb2 60static void make_edges (basic_block, basic_block, int);
841999ef 61static void make_label_edge (sbitmap, basic_block, rtx, int);
4c9e08a4 62static void find_bb_boundaries (basic_block);
63static void compute_outgoing_frequencies (basic_block);
e4fc8aad 64\f
6964f998 65/* Return true if insn is something that should be contained inside basic
66 block. */
67
505f406c 68bool
380fe2ee 69inside_basic_block_p (const rtx_insn *insn)
6964f998 70{
71 switch (GET_CODE (insn))
72 {
73 case CODE_LABEL:
74 /* Avoid creating of basic block for jumptables. */
e4fc8aad 75 return (NEXT_INSN (insn) == 0
53859f1c 76 || ! JUMP_TABLE_DATA_P (NEXT_INSN (insn)));
6964f998 77
78 case JUMP_INSN:
6964f998 79 case CALL_INSN:
80 case INSN:
9845d120 81 case DEBUG_INSN:
6964f998 82 return true;
83
91f71fa3 84 case JUMP_TABLE_DATA:
6964f998 85 case BARRIER:
86 case NOTE:
87 return false;
88
89 default:
cc636d56 90 gcc_unreachable ();
6964f998 91 }
92}
93
e4fc8aad 94/* Return true if INSN may cause control flow transfer, so it should be last in
95 the basic block. */
6964f998 96
d16f9b9d 97bool
f817147b 98control_flow_insn_p (const rtx_insn *insn)
6964f998 99{
6964f998 100 switch (GET_CODE (insn))
101 {
db34a109 102 case NOTE:
103 case CODE_LABEL:
9845d120 104 case DEBUG_INSN:
db34a109 105 return false;
106
107 case JUMP_INSN:
91f71fa3 108 return true;
db34a109 109
110 case CALL_INSN:
918de276 111 /* Noreturn and sibling call instructions terminate the basic blocks
112 (but only if they happen unconditionally). */
113 if ((SIBLING_CALL_P (insn)
114 || find_reg_note (insn, REG_NORETURN, 0))
115 && GET_CODE (PATTERN (insn)) != COND_EXEC)
116 return true;
e38def9c 117
db34a109 118 /* Call insn may return to the nonlocal goto handler. */
e38def9c 119 if (can_nonlocal_goto (insn))
120 return true;
121 break;
db34a109 122
123 case INSN:
88a00894 124 /* Treat trap instructions like noreturn calls (same provision). */
125 if (GET_CODE (PATTERN (insn)) == TRAP_IF
126 && XEXP (PATTERN (insn), 0) == const1_rtx)
127 return true;
cbeb677e 128 if (!cfun->can_throw_non_call_exceptions)
e38def9c 129 return false;
130 break;
db34a109 131
91f71fa3 132 case JUMP_TABLE_DATA:
db34a109 133 case BARRIER:
91f71fa3 134 /* It is nonsense to reach this when looking for the
a0c938f0 135 end of basic block, but before dead code is eliminated
136 this may happen. */
db34a109 137 return false;
138
139 default:
cc636d56 140 gcc_unreachable ();
6964f998 141 }
e38def9c 142
143 return can_throw_internal (insn);
6964f998 144}
65f34de5 145
65f34de5 146\f
147/* Create an edge between two basic blocks. FLAGS are auxiliary information
148 about the edge that is accumulated between calls. */
149
150/* Create an edge from a basic block to a label. */
151
152static void
841999ef 153make_label_edge (sbitmap edge_cache, basic_block src, rtx label, int flags)
65f34de5 154{
cc636d56 155 gcc_assert (LABEL_P (label));
65f34de5 156
157 /* If the label was never emitted, this insn is junk, but avoid a
158 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
159 as a result of a syntax error and a diagnostic has already been
160 printed. */
161
162 if (INSN_UID (label) == 0)
163 return;
164
7392df29 165 cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
65f34de5 166}
167
168/* Create the edges generated by INSN in REGION. */
169
54f7a985 170void
841999ef 171rtl_make_eh_edge (sbitmap edge_cache, basic_block src, rtx insn)
65f34de5 172{
e38def9c 173 eh_landing_pad lp = get_eh_landing_pad_from_rtx (insn);
65f34de5 174
e38def9c 175 if (lp)
176 {
177 rtx label = lp->landing_pad;
65f34de5 178
e38def9c 179 /* During initial rtl generation, use the post_landing_pad. */
180 if (label == NULL)
181 {
182 gcc_assert (lp->post_landing_pad);
183 label = label_rtx (lp->post_landing_pad);
184 }
65f34de5 185
e38def9c 186 make_label_edge (edge_cache, src, label,
187 EDGE_ABNORMAL | EDGE_EH
188 | (CALL_P (insn) ? EDGE_ABNORMAL_CALL : 0));
189 }
65f34de5 190}
e4fc8aad 191
e7c3df04 192/* States of basic block as seen by find_many_sub_basic_blocks. */
193enum state {
194 /* Basic blocks created via split_block belong to this state.
195 make_edges will examine these basic blocks to see if we need to
196 create edges going out of them. */
197 BLOCK_NEW = 0,
198
199 /* Basic blocks that do not need examining belong to this state.
200 These blocks will be left intact. In particular, make_edges will
201 not create edges going out of these basic blocks. */
202 BLOCK_ORIGINAL,
203
204 /* Basic blocks that may need splitting (due to a label appearing in
205 the middle, etc) belong to this state. After splitting them,
442e3cb9 206 make_edges will create edges going out of them as needed. */
e7c3df04 207 BLOCK_TO_SPLIT
208};
edaaa3ff 209
210#define STATE(BB) (enum state) ((size_t) (BB)->aux)
211#define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
212
213/* Used internally by purge_dead_tablejump_edges, ORed into state. */
214#define BLOCK_USED_BY_TABLEJUMP 32
215#define FULL_STATE(BB) ((size_t) (BB)->aux)
216
e7c3df04 217/* Identify the edges going out of basic blocks between MIN and MAX,
218 inclusive, that have their states set to BLOCK_NEW or
219 BLOCK_TO_SPLIT.
65f34de5 220
e7c3df04 221 UPDATE_P should be nonzero if we are updating CFG and zero if we
222 are building CFG from scratch. */
65f34de5 223
224static void
6ab3afb2 225make_edges (basic_block min, basic_block max, int update_p)
65f34de5 226{
4c26117a 227 basic_block bb;
841999ef 228 sbitmap edge_cache = NULL;
65f34de5 229
65f34de5 230 /* Heavy use of computed goto in machine-generated code can lead to
231 nearly fully-connected CFGs. In that case we spend a significant
232 amount of time searching the edge lists for duplicates. */
edb7afe8 233 if (forced_labels || cfun->cfg->max_jumptable_ents > 100)
fe672ac0 234 edge_cache = sbitmap_alloc (last_basic_block_for_fn (cfun));
65f34de5 235
4c26117a 236 /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
237 is always the entry. */
34154e27 238 if (min == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
239 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), min, EDGE_FALLTHRU);
65f34de5 240
4c26117a 241 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
65f34de5 242 {
c6e40a43 243 rtx_insn *insn;
65f34de5 244 enum rtx_code code;
9bb8a4af 245 edge e;
841999ef 246 edge_iterator ei;
65f34de5 247
edaaa3ff 248 if (STATE (bb) == BLOCK_ORIGINAL)
249 continue;
250
841999ef 251 /* If we have an edge cache, cache edges going out of BB. */
252 if (edge_cache)
253 {
53c5d9d4 254 bitmap_clear (edge_cache);
841999ef 255 if (update_p)
256 {
257 FOR_EACH_EDGE (e, ei, bb->succs)
34154e27 258 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
08b7917c 259 bitmap_set_bit (edge_cache, e->dest->index);
841999ef 260 }
261 }
262
6d7dc5b9 263 if (LABEL_P (BB_HEAD (bb))
5496dbfc 264 && LABEL_ALT_ENTRY_P (BB_HEAD (bb)))
34154e27 265 cached_make_edge (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, 0);
65f34de5 266
267 /* Examine the last instruction of the block, and discover the
268 ways we can leave the block. */
269
5496dbfc 270 insn = BB_END (bb);
65f34de5 271 code = GET_CODE (insn);
272
273 /* A branch. */
274 if (code == JUMP_INSN)
275 {
276 rtx tmp;
c86d86ff 277 rtx_jump_table_data *table;
65f34de5 278
65f34de5 279 /* Recognize a non-local goto as a branch outside the
280 current function. */
e38def9c 281 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
65f34de5 282 ;
283
b19beda9 284 /* Recognize a tablejump and do the right thing. */
c86d86ff 285 else if (tablejump_p (insn, NULL, &table))
65f34de5 286 {
b77639be 287 rtvec vec = table->get_labels ();
65f34de5 288 int j;
289
65f34de5 290 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
291 make_label_edge (edge_cache, bb,
292 XEXP (RTVEC_ELT (vec, j), 0), 0);
293
294 /* Some targets (eg, ARM) emit a conditional jump that also
295 contains the out-of-range target. Scan for these and
296 add an edge if necessary. */
297 if ((tmp = single_set (insn)) != NULL
298 && SET_DEST (tmp) == pc_rtx
299 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
300 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
301 make_label_edge (edge_cache, bb,
b49f2e4b 302 LABEL_REF_LABEL (XEXP (SET_SRC (tmp), 2)), 0);
65f34de5 303 }
304
305 /* If this is a computed jump, then mark it as reaching
6ab3afb2 306 everything on the forced_labels list. */
65f34de5 307 else if (computed_jump_p (insn))
308 {
231c0441 309 for (rtx_insn_list *x = forced_labels; x; x = x->next ())
310 make_label_edge (edge_cache, bb, x->insn (), EDGE_ABNORMAL);
65f34de5 311 }
312
313 /* Returns create an exit out. */
314 else if (returnjump_p (insn))
34154e27 315 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
65f34de5 316
78f55ca8 317 /* Recognize asm goto and do the right thing. */
318 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
319 {
320 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
321 for (i = 0; i < n; ++i)
322 make_label_edge (edge_cache, bb,
323 XEXP (ASM_OPERANDS_LABEL (tmp, i), 0), 0);
324 }
325
65f34de5 326 /* Otherwise, we have a plain conditional or unconditional jump. */
327 else
328 {
cc636d56 329 gcc_assert (JUMP_LABEL (insn));
65f34de5 330 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
331 }
332 }
333
e4fc8aad 334 /* If this is a sibling call insn, then this is in effect a combined call
335 and return, and so we need an edge to the exit block. No need to
336 worry about EH edges, since we wouldn't have created the sibling call
337 in the first place. */
65f34de5 338 if (code == CALL_INSN && SIBLING_CALL_P (insn))
34154e27 339 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
e816161c 340 EDGE_SIBCALL | EDGE_ABNORMAL);
65f34de5 341
342 /* If this is a CALL_INSN, then mark it as reaching the active EH
343 handler for this CALL_INSN. If we're handling non-call
344 exceptions then any insn can reach any of the active handlers.
65f34de5 345 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
cbeb677e 346 else if (code == CALL_INSN || cfun->can_throw_non_call_exceptions)
65f34de5 347 {
348 /* Add any appropriate EH edges. */
4ee9c684 349 rtl_make_eh_edge (edge_cache, bb, insn);
65f34de5 350
4c0315d0 351 if (code == CALL_INSN)
65f34de5 352 {
e38def9c 353 if (can_nonlocal_goto (insn))
4c0315d0 354 {
355 /* ??? This could be made smarter: in some cases it's
356 possible to tell that certain calls will not do a
357 nonlocal goto. For example, if the nested functions
358 that do the nonlocal gotos do not have their addresses
359 taken, then only calls to those functions or to other
360 nested functions that use them could possibly do
361 nonlocal gotos. */
a4de1c23 362 for (rtx_insn_list *x = nonlocal_goto_handler_labels;
6e16e157 363 x;
364 x = x->next ())
a4de1c23 365 make_label_edge (edge_cache, bb, x->insn (),
4c0315d0 366 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
367 }
368
369 if (flag_tm)
370 {
371 rtx note;
372 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
373 if (REG_NOTE_KIND (note) == REG_TM)
374 make_label_edge (edge_cache, bb, XEXP (note, 0),
375 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
376 }
65f34de5 377 }
378 }
379
380 /* Find out if we can drop through to the next block. */
5e69cae4 381 insn = NEXT_INSN (insn);
34154e27 382 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
c6356c17 383 if (e && e->flags & EDGE_FALLTHRU)
384 insn = NULL;
385
5e69cae4 386 while (insn
6d7dc5b9 387 && NOTE_P (insn)
ad4583d9 388 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK)
5e69cae4 389 insn = NEXT_INSN (insn);
390
ac7d3688 391 if (!insn)
34154e27 392 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
393 EDGE_FALLTHRU);
394 else if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
65f34de5 395 {
ac7d3688 396 if (insn == BB_HEAD (bb->next_bb))
345ac34a 397 cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
65f34de5 398 }
399 }
400
401 if (edge_cache)
53c5d9d4 402 sbitmap_free (edge_cache);
65f34de5 403}
404\f
0f6e52a0 405static void
406mark_tablejump_edge (rtx label)
407{
408 basic_block bb;
409
410 gcc_assert (LABEL_P (label));
411 /* See comment in make_label_edge. */
412 if (INSN_UID (label) == 0)
413 return;
414 bb = BLOCK_FOR_INSN (label);
415 SET_STATE (bb, FULL_STATE (bb) | BLOCK_USED_BY_TABLEJUMP);
416}
417
418static void
b77639be 419purge_dead_tablejump_edges (basic_block bb, rtx_jump_table_data *table)
0f6e52a0 420{
c6e40a43 421 rtx_insn *insn = BB_END (bb);
422 rtx tmp;
0f6e52a0 423 rtvec vec;
424 int j;
425 edge_iterator ei;
426 edge e;
427
b77639be 428 vec = table->get_labels ();
0f6e52a0 429
430 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
431 mark_tablejump_edge (XEXP (RTVEC_ELT (vec, j), 0));
432
433 /* Some targets (eg, ARM) emit a conditional jump that also
434 contains the out-of-range target. Scan for these and
435 add an edge if necessary. */
436 if ((tmp = single_set (insn)) != NULL
437 && SET_DEST (tmp) == pc_rtx
438 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
439 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
b49f2e4b 440 mark_tablejump_edge (LABEL_REF_LABEL (XEXP (SET_SRC (tmp), 2)));
0f6e52a0 441
442 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
443 {
444 if (FULL_STATE (e->dest) & BLOCK_USED_BY_TABLEJUMP)
a0c938f0 445 SET_STATE (e->dest, FULL_STATE (e->dest)
446 & ~(size_t) BLOCK_USED_BY_TABLEJUMP);
0f6e52a0 447 else if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
a0c938f0 448 {
449 remove_edge (e);
450 continue;
451 }
0f6e52a0 452 ei_next (&ei);
453 }
454}
455
9645fa4f 456/* Scan basic block BB for possible BB boundaries inside the block
457 and create new basic blocks in the progress. */
458
459static void
4c9e08a4 460find_bb_boundaries (basic_block bb)
65f34de5 461{
0f6e52a0 462 basic_block orig_bb = bb;
c6e40a43 463 rtx_insn *insn = BB_HEAD (bb);
464 rtx_insn *end = BB_END (bb), *x;
c86d86ff 465 rtx_jump_table_data *table;
c6e40a43 466 rtx_insn *flow_transfer_insn = NULL;
0060a7fe 467 edge fallthru = NULL;
65f34de5 468
5496dbfc 469 if (insn == BB_END (bb))
65f34de5 470 return;
471
6d7dc5b9 472 if (LABEL_P (insn))
65f34de5 473 insn = NEXT_INSN (insn);
474
475 /* Scan insn chain and try to find new basic block boundaries. */
476 while (1)
477 {
478 enum rtx_code code = GET_CODE (insn);
0060a7fe 479
e38def9c 480 /* In case we've previously seen an insn that effects a control
481 flow transfer, split the block. */
482 if ((flow_transfer_insn || code == CODE_LABEL)
483 && inside_basic_block_p (insn))
65f34de5 484 {
0060a7fe 485 fallthru = split_block (bb, PREV_INSN (insn));
486 if (flow_transfer_insn)
39257eb5 487 {
26bb3cb2 488 BB_END (bb) = flow_transfer_insn;
39257eb5 489
490 /* Clean up the bb field for the insns between the blocks. */
491 for (x = NEXT_INSN (flow_transfer_insn);
492 x != BB_HEAD (fallthru->dest);
493 x = NEXT_INSN (x))
494 if (!BARRIER_P (x))
495 set_block_for_insn (x, NULL);
496 }
e4fc8aad 497
0060a7fe 498 bb = fallthru->dest;
499 remove_edge (fallthru);
c6e40a43 500 flow_transfer_insn = NULL;
e38def9c 501 if (code == CODE_LABEL && LABEL_ALT_ENTRY_P (insn))
34154e27 502 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, 0);
65f34de5 503 }
b80f5ed2 504 else if (code == BARRIER)
505 {
506 /* __builtin_unreachable () may cause a barrier to be emitted in
507 the middle of a BB. We need to split it in the same manner as
508 if the barrier were preceded by a control_flow_insn_p insn. */
509 if (!flow_transfer_insn)
510 flow_transfer_insn = prev_nonnote_insn_bb (insn);
511 }
b80f5ed2 512
f6595509 513 if (control_flow_insn_p (insn))
514 flow_transfer_insn = insn;
65f34de5 515 if (insn == end)
516 break;
517 insn = NEXT_INSN (insn);
518 }
519
520 /* In case expander replaced normal insn by sequence terminating by
521 return and barrier, or possibly other sequence not behaving like
522 ordinary jump, we need to take care and move basic block boundary. */
0060a7fe 523 if (flow_transfer_insn)
39257eb5 524 {
26bb3cb2 525 BB_END (bb) = flow_transfer_insn;
39257eb5 526
527 /* Clean up the bb field for the insns that do not belong to BB. */
528 x = flow_transfer_insn;
529 while (x != end)
530 {
531 x = NEXT_INSN (x);
532 if (!BARRIER_P (x))
533 set_block_for_insn (x, NULL);
534 }
535 }
65f34de5 536
537 /* We've possibly replaced the conditional jump by conditional jump
538 followed by cleanup at fallthru edge, so the outgoing edges may
539 be dead. */
540 purge_dead_edges (bb);
0f6e52a0 541
542 /* purge_dead_edges doesn't handle tablejump's, but if we have split the
543 basic block, we might need to kill some edges. */
544 if (bb != orig_bb && tablejump_p (BB_END (bb), NULL, &table))
545 purge_dead_tablejump_edges (bb, table);
9645fa4f 546}
547
548/* Assume that frequency of basic block B is known. Compute frequencies
549 and probabilities of outgoing edges. */
550
551static void
4c9e08a4 552compute_outgoing_frequencies (basic_block b)
9645fa4f 553{
554 edge e, f;
cd665a06 555 edge_iterator ei;
e4fc8aad 556
cd665a06 557 if (EDGE_COUNT (b->succs) == 2)
9645fa4f 558 {
5496dbfc 559 rtx note = find_reg_note (BB_END (b), REG_BR_PROB, NULL);
9645fa4f 560 int probability;
561
83c8a977 562 if (note)
563 {
9eb946de 564 probability = XINT (note, 0);
83c8a977 565 e = BRANCH_EDGE (b);
566 e->probability = probability;
f9d4b7f4 567 e->count = apply_probability (b->count, probability);
83c8a977 568 f = FALLTHRU_EDGE (b);
569 f->probability = REG_BR_PROB_BASE - probability;
570 f->count = b->count - e->count;
571 return;
572 }
584abc98 573 else
574 {
575 guess_outgoing_edge_probabilities (b);
576 }
9645fa4f 577 }
584abc98 578 else if (single_succ_p (b))
9645fa4f 579 {
ea091dfd 580 e = single_succ_edge (b);
9645fa4f 581 e->probability = REG_BR_PROB_BASE;
582 e->count = b->count;
83c8a977 583 return;
9645fa4f 584 }
584abc98 585 else
586 {
587 /* We rely on BBs with more than two successors to have sane probabilities
588 and do not guess them here. For BBs terminated by switch statements
589 expanded to jump-table jump, we have done the right thing during
590 expansion. For EH edges, we still guess the probabilities here. */
591 bool complex_edge = false;
592 FOR_EACH_EDGE (e, ei, b->succs)
593 if (e->flags & EDGE_COMPLEX)
594 {
595 complex_edge = true;
596 break;
597 }
598 if (complex_edge)
599 guess_outgoing_edge_probabilities (b);
600 }
601
83c8a977 602 if (b->count)
cd665a06 603 FOR_EACH_EDGE (e, ei, b->succs)
f9d4b7f4 604 e->count = apply_probability (b->count, e->probability);
9645fa4f 605}
606
e7c3df04 607/* Assume that some pass has inserted labels or control flow
608 instructions within a basic block. Split basic blocks as needed
609 and create edges. */
9645fa4f 610
611void
4c9e08a4 612find_many_sub_basic_blocks (sbitmap blocks)
9645fa4f 613{
4c26117a 614 basic_block bb, min, max;
9645fa4f 615
fc00614f 616 FOR_EACH_BB_FN (bb, cfun)
4c26117a 617 SET_STATE (bb,
08b7917c 618 bitmap_bit_p (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
9645fa4f 619
fc00614f 620 FOR_EACH_BB_FN (bb, cfun)
4c26117a 621 if (STATE (bb) == BLOCK_TO_SPLIT)
622 find_bb_boundaries (bb);
9645fa4f 623
fc00614f 624 FOR_EACH_BB_FN (bb, cfun)
4c26117a 625 if (STATE (bb) != BLOCK_ORIGINAL)
9645fa4f 626 break;
e4fc8aad 627
4c26117a 628 min = max = bb;
34154e27 629 for (; bb != EXIT_BLOCK_PTR_FOR_FN (cfun); bb = bb->next_bb)
4c26117a 630 if (STATE (bb) != BLOCK_ORIGINAL)
631 max = bb;
65f34de5 632
633 /* Now re-scan and wire in all edges. This expect simple (conditional)
634 jumps at the end of each new basic blocks. */
6ab3afb2 635 make_edges (min, max, 1);
65f34de5 636
637 /* Update branch probabilities. Expect only (un)conditional jumps
638 to be created with only the forward edges. */
f26d8580 639 if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
83c8a977 640 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
641 {
642 edge e;
cd665a06 643 edge_iterator ei;
83c8a977 644
645 if (STATE (bb) == BLOCK_ORIGINAL)
646 continue;
647 if (STATE (bb) == BLOCK_NEW)
648 {
649 bb->count = 0;
650 bb->frequency = 0;
cd665a06 651 FOR_EACH_EDGE (e, ei, bb->preds)
83c8a977 652 {
653 bb->count += e->count;
654 bb->frequency += EDGE_FREQUENCY (e);
655 }
656 }
e4fc8aad 657
83c8a977 658 compute_outgoing_frequencies (bb);
659 }
e4fc8aad 660
fc00614f 661 FOR_EACH_BB_FN (bb, cfun)
4c26117a 662 SET_STATE (bb, 0);
9645fa4f 663}