]>
Commit | Line | Data |
---|---|---|
402209ff JH |
1 | /* Control flow graph building code for GNU compiler. |
2 | Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, | |
88462c42 | 3 | 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc. |
402209ff JH |
4 | |
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 2, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING. If not, write to the Free | |
19 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
20 | 02111-1307, USA. */ | |
21 | ||
22 | /* find_basic_blocks divides the current function's rtl into basic | |
23 | blocks and constructs the CFG. The blocks are recorded in the | |
24 | basic_block_info array; the CFG exists in the edge structures | |
25 | referenced by the blocks. | |
26 | ||
27 | find_basic_blocks also finds any unreachable loops and deletes them. | |
28 | ||
29 | Available functionality: | |
30 | - CFG construction | |
31 | find_basic_blocks | |
32 | - Local CFG construction | |
4891442b | 33 | find_sub_basic_blocks */ |
402209ff JH |
34 | \f |
35 | #include "config.h" | |
36 | #include "system.h" | |
4977bab6 ZW |
37 | #include "coretypes.h" |
38 | #include "tm.h" | |
402209ff JH |
39 | #include "tree.h" |
40 | #include "rtl.h" | |
41 | #include "hard-reg-set.h" | |
42 | #include "basic-block.h" | |
43 | #include "regs.h" | |
44 | #include "flags.h" | |
45 | #include "output.h" | |
46 | #include "function.h" | |
47 | #include "except.h" | |
48 | #include "toplev.h" | |
49 | #include "timevar.h" | |
4891442b | 50 | |
d329e058 AJ |
51 | static int count_basic_blocks (rtx); |
52 | static void find_basic_blocks_1 (rtx); | |
53 | static rtx find_label_refs (rtx, rtx); | |
54 | static void make_edges (rtx, basic_block, basic_block, int); | |
55 | static void make_label_edge (sbitmap *, basic_block, rtx, int); | |
d329e058 AJ |
56 | static void find_bb_boundaries (basic_block); |
57 | static void compute_outgoing_frequencies (basic_block); | |
4891442b | 58 | \f |
9cd56be1 JH |
59 | /* Return true if insn is something that should be contained inside basic |
60 | block. */ | |
61 | ||
8f62128d | 62 | bool |
d329e058 | 63 | inside_basic_block_p (rtx insn) |
9cd56be1 JH |
64 | { |
65 | switch (GET_CODE (insn)) | |
66 | { | |
67 | case CODE_LABEL: | |
68 | /* Avoid creating of basic block for jumptables. */ | |
4891442b RK |
69 | return (NEXT_INSN (insn) == 0 |
70 | || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN | |
71 | || (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC | |
72 | && GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC)); | |
9cd56be1 JH |
73 | |
74 | case JUMP_INSN: | |
4891442b RK |
75 | return (GET_CODE (PATTERN (insn)) != ADDR_VEC |
76 | && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC); | |
9cd56be1 JH |
77 | |
78 | case CALL_INSN: | |
79 | case INSN: | |
80 | return true; | |
81 | ||
82 | case BARRIER: | |
83 | case NOTE: | |
84 | return false; | |
85 | ||
86 | default: | |
87 | abort (); | |
88 | } | |
89 | } | |
90 | ||
4891442b RK |
91 | /* Return true if INSN may cause control flow transfer, so it should be last in |
92 | the basic block. */ | |
9cd56be1 | 93 | |
4a69cf79 | 94 | bool |
d329e058 | 95 | control_flow_insn_p (rtx insn) |
9cd56be1 JH |
96 | { |
97 | rtx note; | |
4891442b | 98 | |
9cd56be1 JH |
99 | switch (GET_CODE (insn)) |
100 | { | |
f87c27b4 KH |
101 | case NOTE: |
102 | case CODE_LABEL: | |
103 | return false; | |
104 | ||
105 | case JUMP_INSN: | |
106 | /* Jump insn always causes control transfer except for tablejumps. */ | |
107 | return (GET_CODE (PATTERN (insn)) != ADDR_VEC | |
108 | && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC); | |
109 | ||
110 | case CALL_INSN: | |
111 | /* Call insn may return to the nonlocal goto handler. */ | |
112 | return ((nonlocal_goto_handler_labels | |
113 | && (0 == (note = find_reg_note (insn, REG_EH_REGION, | |
114 | NULL_RTX)) | |
115 | || INTVAL (XEXP (note, 0)) >= 0)) | |
116 | /* Or may trap. */ | |
117 | || can_throw_internal (insn)); | |
118 | ||
119 | case INSN: | |
120 | return (flag_non_call_exceptions && can_throw_internal (insn)); | |
121 | ||
122 | case BARRIER: | |
a98ebe2e | 123 | /* It is nonsense to reach barrier when looking for the |
f87c27b4 KH |
124 | end of basic block, but before dead code is eliminated |
125 | this may happen. */ | |
126 | return false; | |
127 | ||
128 | default: | |
129 | abort (); | |
9cd56be1 JH |
130 | } |
131 | } | |
402209ff JH |
132 | |
133 | /* Count the basic blocks of the function. */ | |
134 | ||
135 | static int | |
d329e058 | 136 | count_basic_blocks (rtx f) |
402209ff | 137 | { |
b3694847 | 138 | int count = 0; |
9cd56be1 JH |
139 | bool saw_insn = false; |
140 | rtx insn; | |
402209ff | 141 | |
402209ff JH |
142 | for (insn = f; insn; insn = NEXT_INSN (insn)) |
143 | { | |
95bd1dd7 | 144 | /* Code labels and barriers causes current basic block to be |
9cd56be1 | 145 | terminated at previous real insn. */ |
9cd56be1 JH |
146 | if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER) |
147 | && saw_insn) | |
148 | count++, saw_insn = false; | |
402209ff | 149 | |
9cd56be1 JH |
150 | /* Start basic block if needed. */ |
151 | if (!saw_insn && inside_basic_block_p (insn)) | |
152 | saw_insn = true; | |
402209ff | 153 | |
9cd56be1 JH |
154 | /* Control flow insn causes current basic block to be terminated. */ |
155 | if (saw_insn && control_flow_insn_p (insn)) | |
156 | count++, saw_insn = false; | |
402209ff | 157 | } |
4891442b | 158 | |
9cd56be1 JH |
159 | if (saw_insn) |
160 | count++; | |
402209ff JH |
161 | |
162 | /* The rest of the compiler works a bit smoother when we don't have to | |
163 | check for the edge case of do-nothing functions with no basic blocks. */ | |
164 | if (count == 0) | |
165 | { | |
166 | emit_insn (gen_rtx_USE (VOIDmode, const0_rtx)); | |
167 | count = 1; | |
168 | } | |
169 | ||
170 | return count; | |
171 | } | |
172 | ||
173 | /* Scan a list of insns for labels referred to other than by jumps. | |
174 | This is used to scan the alternatives of a call placeholder. */ | |
4891442b | 175 | |
402209ff | 176 | static rtx |
d329e058 | 177 | find_label_refs (rtx f, rtx lvl) |
402209ff JH |
178 | { |
179 | rtx insn; | |
180 | ||
181 | for (insn = f; insn; insn = NEXT_INSN (insn)) | |
182 | if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN) | |
183 | { | |
184 | rtx note; | |
185 | ||
186 | /* Make a list of all labels referred to other than by jumps | |
187 | (which just don't have the REG_LABEL notes). | |
188 | ||
189 | Make a special exception for labels followed by an ADDR*VEC, | |
190 | as this would be a part of the tablejump setup code. | |
191 | ||
192 | Make a special exception to registers loaded with label | |
193 | values just before jump insns that use them. */ | |
194 | ||
195 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) | |
196 | if (REG_NOTE_KIND (note) == REG_LABEL) | |
197 | { | |
198 | rtx lab = XEXP (note, 0), next; | |
199 | ||
200 | if ((next = next_nonnote_insn (lab)) != NULL | |
f87c27b4 KH |
201 | && GET_CODE (next) == JUMP_INSN |
202 | && (GET_CODE (PATTERN (next)) == ADDR_VEC | |
203 | || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC)) | |
402209ff JH |
204 | ; |
205 | else if (GET_CODE (lab) == NOTE) | |
206 | ; | |
207 | else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN | |
208 | && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab)) | |
209 | ; | |
210 | else | |
211 | lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl); | |
212 | } | |
213 | } | |
214 | ||
215 | return lvl; | |
216 | } | |
217 | \f | |
218 | /* Create an edge between two basic blocks. FLAGS are auxiliary information | |
219 | about the edge that is accumulated between calls. */ | |
220 | ||
221 | /* Create an edge from a basic block to a label. */ | |
222 | ||
223 | static void | |
d329e058 | 224 | make_label_edge (sbitmap *edge_cache, basic_block src, rtx label, int flags) |
402209ff JH |
225 | { |
226 | if (GET_CODE (label) != CODE_LABEL) | |
227 | abort (); | |
228 | ||
229 | /* If the label was never emitted, this insn is junk, but avoid a | |
230 | crash trying to refer to BLOCK_FOR_INSN (label). This can happen | |
231 | as a result of a syntax error and a diagnostic has already been | |
232 | printed. */ | |
233 | ||
234 | if (INSN_UID (label) == 0) | |
235 | return; | |
236 | ||
7ded4467 | 237 | cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags); |
402209ff JH |
238 | } |
239 | ||
240 | /* Create the edges generated by INSN in REGION. */ | |
241 | ||
12c3874e | 242 | void |
d329e058 | 243 | make_eh_edge (sbitmap *edge_cache, basic_block src, rtx insn) |
402209ff | 244 | { |
4891442b | 245 | int is_call = GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0; |
402209ff JH |
246 | rtx handlers, i; |
247 | ||
248 | handlers = reachable_handlers (insn); | |
249 | ||
250 | for (i = handlers; i; i = XEXP (i, 1)) | |
251 | make_label_edge (edge_cache, src, XEXP (i, 0), | |
252 | EDGE_ABNORMAL | EDGE_EH | is_call); | |
253 | ||
254 | free_INSN_LIST_list (&handlers); | |
255 | } | |
4891442b | 256 | |
402209ff JH |
257 | /* Identify the edges between basic blocks MIN to MAX. |
258 | ||
259 | NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks | |
260 | that are otherwise unreachable may be reachable with a non-local goto. | |
261 | ||
262 | BB_EH_END is an array indexed by basic block number in which we record | |
263 | the list of exception regions active at the end of the basic block. */ | |
264 | ||
265 | static void | |
d329e058 | 266 | make_edges (rtx label_value_list, basic_block min, basic_block max, int update_p) |
402209ff | 267 | { |
e0082a72 | 268 | basic_block bb; |
402209ff JH |
269 | sbitmap *edge_cache = NULL; |
270 | ||
271 | /* Assume no computed jump; revise as we create edges. */ | |
272 | current_function_has_computed_jump = 0; | |
273 | ||
274 | /* Heavy use of computed goto in machine-generated code can lead to | |
275 | nearly fully-connected CFGs. In that case we spend a significant | |
276 | amount of time searching the edge lists for duplicates. */ | |
db655634 | 277 | if (forced_labels || label_value_list || cfun->max_jumptable_ents > 100) |
402209ff | 278 | { |
d55bc081 ZD |
279 | edge_cache = sbitmap_vector_alloc (last_basic_block, last_basic_block); |
280 | sbitmap_vector_zero (edge_cache, last_basic_block); | |
402209ff JH |
281 | |
282 | if (update_p) | |
e0082a72 | 283 | FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb) |
402209ff JH |
284 | { |
285 | edge e; | |
4891442b | 286 | |
e0082a72 | 287 | for (e = bb->succ; e ; e = e->succ_next) |
402209ff | 288 | if (e->dest != EXIT_BLOCK_PTR) |
e0082a72 | 289 | SET_BIT (edge_cache[bb->index], e->dest->index); |
402209ff JH |
290 | } |
291 | } | |
292 | ||
e0082a72 ZD |
293 | /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block |
294 | is always the entry. */ | |
f6366fc7 ZD |
295 | if (min == ENTRY_BLOCK_PTR->next_bb) |
296 | cached_make_edge (edge_cache, ENTRY_BLOCK_PTR, min, | |
4891442b | 297 | EDGE_FALLTHRU); |
402209ff | 298 | |
e0082a72 | 299 | FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb) |
402209ff | 300 | { |
402209ff JH |
301 | rtx insn, x; |
302 | enum rtx_code code; | |
303 | int force_fallthru = 0; | |
304 | ||
a813c111 SB |
305 | if (GET_CODE (BB_HEAD (bb)) == CODE_LABEL |
306 | && LABEL_ALT_ENTRY_P (BB_HEAD (bb))) | |
7ded4467 | 307 | cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0); |
402209ff JH |
308 | |
309 | /* Examine the last instruction of the block, and discover the | |
310 | ways we can leave the block. */ | |
311 | ||
a813c111 | 312 | insn = BB_END (bb); |
402209ff JH |
313 | code = GET_CODE (insn); |
314 | ||
315 | /* A branch. */ | |
316 | if (code == JUMP_INSN) | |
317 | { | |
318 | rtx tmp; | |
319 | ||
320 | /* Recognize exception handling placeholders. */ | |
321 | if (GET_CODE (PATTERN (insn)) == RESX) | |
322 | make_eh_edge (edge_cache, bb, insn); | |
323 | ||
324 | /* Recognize a non-local goto as a branch outside the | |
325 | current function. */ | |
326 | else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX)) | |
327 | ; | |
328 | ||
e1233a7d RH |
329 | /* Recognize a tablejump and do the right thing. */ |
330 | else if (tablejump_p (insn, NULL, &tmp)) | |
402209ff JH |
331 | { |
332 | rtvec vec; | |
333 | int j; | |
334 | ||
335 | if (GET_CODE (PATTERN (tmp)) == ADDR_VEC) | |
336 | vec = XVEC (PATTERN (tmp), 0); | |
337 | else | |
338 | vec = XVEC (PATTERN (tmp), 1); | |
339 | ||
340 | for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j) | |
341 | make_label_edge (edge_cache, bb, | |
342 | XEXP (RTVEC_ELT (vec, j), 0), 0); | |
343 | ||
344 | /* Some targets (eg, ARM) emit a conditional jump that also | |
345 | contains the out-of-range target. Scan for these and | |
346 | add an edge if necessary. */ | |
347 | if ((tmp = single_set (insn)) != NULL | |
348 | && SET_DEST (tmp) == pc_rtx | |
349 | && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE | |
350 | && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF) | |
351 | make_label_edge (edge_cache, bb, | |
352 | XEXP (XEXP (SET_SRC (tmp), 2), 0), 0); | |
353 | ||
354 | #ifdef CASE_DROPS_THROUGH | |
355 | /* Silly VAXen. The ADDR_VEC is going to be in the way of | |
356 | us naturally detecting fallthru into the next block. */ | |
357 | force_fallthru = 1; | |
358 | #endif | |
359 | } | |
360 | ||
361 | /* If this is a computed jump, then mark it as reaching | |
362 | everything on the label_value_list and forced_labels list. */ | |
363 | else if (computed_jump_p (insn)) | |
364 | { | |
365 | current_function_has_computed_jump = 1; | |
366 | ||
367 | for (x = label_value_list; x; x = XEXP (x, 1)) | |
368 | make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL); | |
369 | ||
370 | for (x = forced_labels; x; x = XEXP (x, 1)) | |
371 | make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL); | |
372 | } | |
373 | ||
374 | /* Returns create an exit out. */ | |
375 | else if (returnjump_p (insn)) | |
7ded4467 | 376 | cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0); |
402209ff JH |
377 | |
378 | /* Otherwise, we have a plain conditional or unconditional jump. */ | |
379 | else | |
380 | { | |
381 | if (! JUMP_LABEL (insn)) | |
382 | abort (); | |
383 | make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0); | |
384 | } | |
385 | } | |
386 | ||
4891442b RK |
387 | /* If this is a sibling call insn, then this is in effect a combined call |
388 | and return, and so we need an edge to the exit block. No need to | |
389 | worry about EH edges, since we wouldn't have created the sibling call | |
390 | in the first place. */ | |
402209ff | 391 | if (code == CALL_INSN && SIBLING_CALL_P (insn)) |
792bb204 RH |
392 | cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, |
393 | EDGE_SIBCALL | EDGE_ABNORMAL); | |
402209ff JH |
394 | |
395 | /* If this is a CALL_INSN, then mark it as reaching the active EH | |
396 | handler for this CALL_INSN. If we're handling non-call | |
397 | exceptions then any insn can reach any of the active handlers. | |
402209ff | 398 | Also mark the CALL_INSN as reaching any nonlocal goto handler. */ |
402209ff JH |
399 | else if (code == CALL_INSN || flag_non_call_exceptions) |
400 | { | |
401 | /* Add any appropriate EH edges. */ | |
402 | make_eh_edge (edge_cache, bb, insn); | |
403 | ||
404 | if (code == CALL_INSN && nonlocal_goto_handler_labels) | |
405 | { | |
406 | /* ??? This could be made smarter: in some cases it's possible | |
407 | to tell that certain calls will not do a nonlocal goto. | |
402209ff JH |
408 | For example, if the nested functions that do the nonlocal |
409 | gotos do not have their addresses taken, then only calls to | |
410 | those functions or to other nested functions that use them | |
411 | could possibly do nonlocal gotos. */ | |
4891442b | 412 | |
402209ff JH |
413 | /* We do know that a REG_EH_REGION note with a value less |
414 | than 0 is guaranteed not to perform a non-local goto. */ | |
415 | rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX); | |
4891442b | 416 | |
402209ff JH |
417 | if (!note || INTVAL (XEXP (note, 0)) >= 0) |
418 | for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1)) | |
419 | make_label_edge (edge_cache, bb, XEXP (x, 0), | |
420 | EDGE_ABNORMAL | EDGE_ABNORMAL_CALL); | |
421 | } | |
422 | } | |
423 | ||
424 | /* Find out if we can drop through to the next block. */ | |
26f74aa3 JH |
425 | insn = NEXT_INSN (insn); |
426 | while (insn | |
427 | && GET_CODE (insn) == NOTE | |
428 | && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK) | |
429 | insn = NEXT_INSN (insn); | |
430 | ||
f6366fc7 | 431 | if (!insn || (bb->next_bb == EXIT_BLOCK_PTR && force_fallthru)) |
7ded4467 | 432 | cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU); |
f6366fc7 | 433 | else if (bb->next_bb != EXIT_BLOCK_PTR) |
402209ff | 434 | { |
a813c111 | 435 | if (force_fallthru || insn == BB_HEAD (bb->next_bb)) |
f6366fc7 | 436 | cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU); |
402209ff JH |
437 | } |
438 | } | |
439 | ||
440 | if (edge_cache) | |
441 | sbitmap_vector_free (edge_cache); | |
442 | } | |
443 | \f | |
444 | /* Find all basic blocks of the function whose first insn is F. | |
445 | ||
446 | Collect and return a list of labels whose addresses are taken. This | |
447 | will be used in make_edges for use with computed gotos. */ | |
448 | ||
449 | static void | |
d329e058 | 450 | find_basic_blocks_1 (rtx f) |
402209ff | 451 | { |
b3694847 | 452 | rtx insn, next; |
402209ff JH |
453 | rtx bb_note = NULL_RTX; |
454 | rtx lvl = NULL_RTX; | |
455 | rtx trll = NULL_RTX; | |
456 | rtx head = NULL_RTX; | |
457 | rtx end = NULL_RTX; | |
918ed612 | 458 | basic_block prev = ENTRY_BLOCK_PTR; |
402209ff JH |
459 | |
460 | /* We process the instructions in a slightly different way than we did | |
461 | previously. This is so that we see a NOTE_BASIC_BLOCK after we have | |
462 | closed out the previous block, so that it gets attached at the proper | |
463 | place. Since this form should be equivalent to the previous, | |
464 | count_basic_blocks continues to use the old form as a check. */ | |
465 | ||
466 | for (insn = f; insn; insn = next) | |
467 | { | |
468 | enum rtx_code code = GET_CODE (insn); | |
469 | ||
470 | next = NEXT_INSN (insn); | |
471 | ||
9cd56be1 JH |
472 | if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER) |
473 | && head) | |
474 | { | |
852c6ec7 | 475 | prev = create_basic_block_structure (head, end, bb_note, prev); |
9cd56be1 JH |
476 | head = end = NULL_RTX; |
477 | bb_note = NULL_RTX; | |
478 | } | |
4891442b | 479 | |
9cd56be1 JH |
480 | if (inside_basic_block_p (insn)) |
481 | { | |
482 | if (head == NULL_RTX) | |
483 | head = insn; | |
484 | end = insn; | |
485 | } | |
4891442b | 486 | |
9cd56be1 JH |
487 | if (head && control_flow_insn_p (insn)) |
488 | { | |
852c6ec7 | 489 | prev = create_basic_block_structure (head, end, bb_note, prev); |
9cd56be1 JH |
490 | head = end = NULL_RTX; |
491 | bb_note = NULL_RTX; | |
492 | } | |
493 | ||
402209ff JH |
494 | switch (code) |
495 | { | |
496 | case NOTE: | |
497 | { | |
498 | int kind = NOTE_LINE_NUMBER (insn); | |
499 | ||
500 | /* Look for basic block notes with which to keep the | |
501 | basic_block_info pointers stable. Unthread the note now; | |
502 | we'll put it back at the right place in create_basic_block. | |
503 | Or not at all if we've already found a note in this block. */ | |
504 | if (kind == NOTE_INSN_BASIC_BLOCK) | |
505 | { | |
506 | if (bb_note == NULL_RTX) | |
507 | bb_note = insn; | |
508 | else | |
53c17031 | 509 | next = delete_insn (insn); |
402209ff JH |
510 | } |
511 | break; | |
512 | } | |
513 | ||
514 | case CODE_LABEL: | |
402209ff | 515 | case JUMP_INSN: |
9cd56be1 | 516 | case INSN: |
402209ff | 517 | case BARRIER: |
9cd56be1 | 518 | break; |
402209ff JH |
519 | |
520 | case CALL_INSN: | |
9cd56be1 JH |
521 | if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER) |
522 | { | |
523 | /* Scan each of the alternatives for label refs. */ | |
524 | lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl); | |
525 | lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl); | |
526 | lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl); | |
527 | /* Record its tail recursion label, if any. */ | |
528 | if (XEXP (PATTERN (insn), 3) != NULL_RTX) | |
529 | trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll); | |
530 | } | |
402209ff JH |
531 | break; |
532 | ||
533 | default: | |
534 | abort (); | |
535 | } | |
536 | ||
537 | if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN) | |
538 | { | |
539 | rtx note; | |
540 | ||
541 | /* Make a list of all labels referred to other than by jumps. | |
542 | ||
543 | Make a special exception for labels followed by an ADDR*VEC, | |
544 | as this would be a part of the tablejump setup code. | |
545 | ||
546 | Make a special exception to registers loaded with label | |
547 | values just before jump insns that use them. */ | |
548 | ||
549 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) | |
550 | if (REG_NOTE_KIND (note) == REG_LABEL) | |
551 | { | |
552 | rtx lab = XEXP (note, 0), next; | |
553 | ||
554 | if ((next = next_nonnote_insn (lab)) != NULL | |
555 | && GET_CODE (next) == JUMP_INSN | |
556 | && (GET_CODE (PATTERN (next)) == ADDR_VEC | |
557 | || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC)) | |
558 | ; | |
559 | else if (GET_CODE (lab) == NOTE) | |
560 | ; | |
561 | else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN | |
562 | && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab)) | |
563 | ; | |
564 | else | |
565 | lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl); | |
566 | } | |
567 | } | |
568 | } | |
569 | ||
570 | if (head != NULL_RTX) | |
852c6ec7 | 571 | create_basic_block_structure (head, end, bb_note, prev); |
402209ff | 572 | else if (bb_note) |
53c17031 | 573 | delete_insn (bb_note); |
402209ff | 574 | |
bf77398c | 575 | if (last_basic_block != n_basic_blocks) |
402209ff JH |
576 | abort (); |
577 | ||
578 | label_value_list = lvl; | |
579 | tail_recursion_label_list = trll; | |
4ab95d82 | 580 | clear_aux_for_blocks (); |
402209ff JH |
581 | } |
582 | ||
583 | ||
584 | /* Find basic blocks of the current function. | |
585 | F is the first insn of the function and NREGS the number of register | |
586 | numbers in use. */ | |
587 | ||
588 | void | |
d329e058 AJ |
589 | find_basic_blocks (rtx f, int nregs ATTRIBUTE_UNUSED, |
590 | FILE *file ATTRIBUTE_UNUSED) | |
402209ff | 591 | { |
e0082a72 ZD |
592 | basic_block bb; |
593 | ||
402209ff JH |
594 | timevar_push (TV_CFG); |
595 | ||
596 | /* Flush out existing data. */ | |
597 | if (basic_block_info != NULL) | |
598 | { | |
402209ff JH |
599 | clear_edges (); |
600 | ||
601 | /* Clear bb->aux on all extant basic blocks. We'll use this as a | |
602 | tag for reuse during create_basic_block, just in case some pass | |
603 | copies around basic block notes improperly. */ | |
e0082a72 | 604 | FOR_EACH_BB (bb) |
d329e058 | 605 | bb->aux = NULL; |
402209ff JH |
606 | |
607 | VARRAY_FREE (basic_block_info); | |
608 | } | |
609 | ||
0b17ab2f | 610 | n_basic_blocks = count_basic_blocks (f); |
bf77398c | 611 | last_basic_block = 0; |
918ed612 ZD |
612 | ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR; |
613 | EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR; | |
402209ff JH |
614 | |
615 | /* Size the basic block table. The actual structures will be allocated | |
616 | by find_basic_blocks_1, since we want to keep the structure pointers | |
617 | stable across calls to find_basic_blocks. */ | |
618 | /* ??? This whole issue would be much simpler if we called find_basic_blocks | |
619 | exactly once, and thereafter we don't have a single long chain of | |
620 | instructions at all until close to the end of compilation when we | |
621 | actually lay them out. */ | |
622 | ||
0b17ab2f | 623 | VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info"); |
402209ff JH |
624 | |
625 | find_basic_blocks_1 (f); | |
626 | ||
402209ff | 627 | /* Discover the edges of our cfg. */ |
f6366fc7 | 628 | make_edges (label_value_list, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, 0); |
402209ff JH |
629 | |
630 | /* Do very simple cleanup now, for the benefit of code that runs between | |
631 | here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */ | |
632 | tidy_fallthru_edges (); | |
633 | ||
402209ff JH |
634 | #ifdef ENABLE_CHECKING |
635 | verify_flow_info (); | |
636 | #endif | |
637 | timevar_pop (TV_CFG); | |
638 | } | |
639 | \f | |
b932f770 | 640 | /* State of basic block as seen by find_sub_basic_blocks. */ |
4891442b RK |
641 | enum state {BLOCK_NEW = 0, BLOCK_ORIGINAL, BLOCK_TO_SPLIT}; |
642 | ||
643 | #define STATE(BB) (enum state) ((size_t) (BB)->aux) | |
644 | #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE)) | |
b932f770 JH |
645 | |
646 | /* Scan basic block BB for possible BB boundaries inside the block | |
647 | and create new basic blocks in the progress. */ | |
648 | ||
649 | static void | |
d329e058 | 650 | find_bb_boundaries (basic_block bb) |
402209ff | 651 | { |
a813c111 SB |
652 | rtx insn = BB_HEAD (bb); |
653 | rtx end = BB_END (bb); | |
f068df3f RH |
654 | rtx flow_transfer_insn = NULL_RTX; |
655 | edge fallthru = NULL; | |
402209ff | 656 | |
a813c111 | 657 | if (insn == BB_END (bb)) |
402209ff JH |
658 | return; |
659 | ||
660 | if (GET_CODE (insn) == CODE_LABEL) | |
661 | insn = NEXT_INSN (insn); | |
662 | ||
663 | /* Scan insn chain and try to find new basic block boundaries. */ | |
664 | while (1) | |
665 | { | |
666 | enum rtx_code code = GET_CODE (insn); | |
f068df3f | 667 | |
9cd56be1 JH |
668 | /* On code label, split current basic block. */ |
669 | if (code == CODE_LABEL) | |
402209ff | 670 | { |
f068df3f RH |
671 | fallthru = split_block (bb, PREV_INSN (insn)); |
672 | if (flow_transfer_insn) | |
a813c111 | 673 | BB_END (bb) = flow_transfer_insn; |
4891442b | 674 | |
f068df3f RH |
675 | bb = fallthru->dest; |
676 | remove_edge (fallthru); | |
677 | flow_transfer_insn = NULL_RTX; | |
0dc36574 | 678 | if (LABEL_ALT_ENTRY_P (insn)) |
7ded4467 | 679 | make_edge (ENTRY_BLOCK_PTR, bb, 0); |
402209ff | 680 | } |
4891442b | 681 | |
9cd56be1 JH |
682 | /* In case we've previously seen an insn that effects a control |
683 | flow transfer, split the block. */ | |
684 | if (flow_transfer_insn && inside_basic_block_p (insn)) | |
685 | { | |
686 | fallthru = split_block (bb, PREV_INSN (insn)); | |
a813c111 | 687 | BB_END (bb) = flow_transfer_insn; |
9cd56be1 JH |
688 | bb = fallthru->dest; |
689 | remove_edge (fallthru); | |
690 | flow_transfer_insn = NULL_RTX; | |
691 | } | |
4891442b | 692 | |
9cd56be1 JH |
693 | if (control_flow_insn_p (insn)) |
694 | flow_transfer_insn = insn; | |
402209ff JH |
695 | if (insn == end) |
696 | break; | |
697 | insn = NEXT_INSN (insn); | |
698 | } | |
699 | ||
700 | /* In case expander replaced normal insn by sequence terminating by | |
701 | return and barrier, or possibly other sequence not behaving like | |
702 | ordinary jump, we need to take care and move basic block boundary. */ | |
f068df3f | 703 | if (flow_transfer_insn) |
a813c111 | 704 | BB_END (bb) = flow_transfer_insn; |
402209ff JH |
705 | |
706 | /* We've possibly replaced the conditional jump by conditional jump | |
707 | followed by cleanup at fallthru edge, so the outgoing edges may | |
708 | be dead. */ | |
709 | purge_dead_edges (bb); | |
b932f770 JH |
710 | } |
711 | ||
712 | /* Assume that frequency of basic block B is known. Compute frequencies | |
713 | and probabilities of outgoing edges. */ | |
714 | ||
715 | static void | |
d329e058 | 716 | compute_outgoing_frequencies (basic_block b) |
b932f770 JH |
717 | { |
718 | edge e, f; | |
4891442b | 719 | |
b932f770 JH |
720 | if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next) |
721 | { | |
a813c111 | 722 | rtx note = find_reg_note (BB_END (b), REG_BR_PROB, NULL); |
b932f770 JH |
723 | int probability; |
724 | ||
725 | if (!note) | |
726 | return; | |
4891442b | 727 | |
441c9a12 | 728 | probability = INTVAL (XEXP (note, 0)); |
b932f770 JH |
729 | e = BRANCH_EDGE (b); |
730 | e->probability = probability; | |
731 | e->count = ((b->count * probability + REG_BR_PROB_BASE / 2) | |
732 | / REG_BR_PROB_BASE); | |
733 | f = FALLTHRU_EDGE (b); | |
734 | f->probability = REG_BR_PROB_BASE - probability; | |
735 | f->count = b->count - e->count; | |
736 | } | |
4891442b | 737 | |
b932f770 JH |
738 | if (b->succ && !b->succ->succ_next) |
739 | { | |
740 | e = b->succ; | |
741 | e->probability = REG_BR_PROB_BASE; | |
742 | e->count = b->count; | |
743 | } | |
744 | } | |
745 | ||
746 | /* Assume that someone emitted code with control flow instructions to the | |
747 | basic block. Update the data structure. */ | |
748 | ||
749 | void | |
d329e058 | 750 | find_many_sub_basic_blocks (sbitmap blocks) |
b932f770 | 751 | { |
e0082a72 | 752 | basic_block bb, min, max; |
b932f770 | 753 | |
e0082a72 ZD |
754 | FOR_EACH_BB (bb) |
755 | SET_STATE (bb, | |
756 | TEST_BIT (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL); | |
b932f770 | 757 | |
e0082a72 ZD |
758 | FOR_EACH_BB (bb) |
759 | if (STATE (bb) == BLOCK_TO_SPLIT) | |
760 | find_bb_boundaries (bb); | |
b932f770 | 761 | |
e0082a72 ZD |
762 | FOR_EACH_BB (bb) |
763 | if (STATE (bb) != BLOCK_ORIGINAL) | |
b932f770 | 764 | break; |
4891442b | 765 | |
e0082a72 ZD |
766 | min = max = bb; |
767 | for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb) | |
768 | if (STATE (bb) != BLOCK_ORIGINAL) | |
769 | max = bb; | |
402209ff JH |
770 | |
771 | /* Now re-scan and wire in all edges. This expect simple (conditional) | |
772 | jumps at the end of each new basic blocks. */ | |
e0082a72 | 773 | make_edges (NULL, min, max, 1); |
402209ff JH |
774 | |
775 | /* Update branch probabilities. Expect only (un)conditional jumps | |
776 | to be created with only the forward edges. */ | |
e0082a72 | 777 | FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb) |
402209ff | 778 | { |
b932f770 | 779 | edge e; |
b932f770 | 780 | |
e0082a72 | 781 | if (STATE (bb) == BLOCK_ORIGINAL) |
b932f770 | 782 | continue; |
e0082a72 | 783 | if (STATE (bb) == BLOCK_NEW) |
402209ff | 784 | { |
e0082a72 ZD |
785 | bb->count = 0; |
786 | bb->frequency = 0; | |
6a87d634 | 787 | for (e = bb->pred; e; e = e->pred_next) |
402209ff | 788 | { |
e0082a72 ZD |
789 | bb->count += e->count; |
790 | bb->frequency += EDGE_FREQUENCY (e); | |
402209ff JH |
791 | } |
792 | } | |
4891442b | 793 | |
e0082a72 | 794 | compute_outgoing_frequencies (bb); |
b932f770 | 795 | } |
4891442b | 796 | |
e0082a72 ZD |
797 | FOR_EACH_BB (bb) |
798 | SET_STATE (bb, 0); | |
b932f770 JH |
799 | } |
800 | ||
801 | /* Like above but for single basic block only. */ | |
802 | ||
803 | void | |
d329e058 | 804 | find_sub_basic_blocks (basic_block bb) |
b932f770 | 805 | { |
e0082a72 | 806 | basic_block min, max, b; |
f6366fc7 | 807 | basic_block next = bb->next_bb; |
b932f770 | 808 | |
e0082a72 | 809 | min = bb; |
b932f770 | 810 | find_bb_boundaries (bb); |
e0082a72 | 811 | max = next->prev_bb; |
b932f770 JH |
812 | |
813 | /* Now re-scan and wire in all edges. This expect simple (conditional) | |
814 | jumps at the end of each new basic blocks. */ | |
e0082a72 | 815 | make_edges (NULL, min, max, 1); |
b932f770 JH |
816 | |
817 | /* Update branch probabilities. Expect only (un)conditional jumps | |
818 | to be created with only the forward edges. */ | |
e0082a72 | 819 | FOR_BB_BETWEEN (b, min, max->next_bb, next_bb) |
b932f770 JH |
820 | { |
821 | edge e; | |
b932f770 | 822 | |
e0082a72 | 823 | if (b != min) |
402209ff | 824 | { |
b932f770 JH |
825 | b->count = 0; |
826 | b->frequency = 0; | |
6a87d634 | 827 | for (e = b->pred; e; e = e->pred_next) |
b932f770 JH |
828 | { |
829 | b->count += e->count; | |
830 | b->frequency += EDGE_FREQUENCY (e); | |
831 | } | |
402209ff | 832 | } |
4891442b | 833 | |
b932f770 | 834 | compute_outgoing_frequencies (b); |
402209ff JH |
835 | } |
836 | } |