]>
Commit | Line | Data |
---|---|---|
402209ff | 1 | /* Control flow graph building code for GNU compiler. |
aeee4812 | 2 | Copyright (C) 1987-2023 Free Software Foundation, Inc. |
402209ff JH |
3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify it under | |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
402209ff JH |
9 | version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
402209ff | 19 | |
402209ff JH |
20 | \f |
21 | #include "config.h" | |
22 | #include "system.h" | |
4977bab6 | 23 | #include "coretypes.h" |
c7131fb2 | 24 | #include "backend.h" |
957060b5 | 25 | #include "rtl.h" |
9fdcd34e | 26 | #include "cfghooks.h" |
4d0cdd0c | 27 | #include "memmodel.h" |
957060b5 | 28 | #include "emit-rtl.h" |
60393bbc AM |
29 | #include "cfgrtl.h" |
30 | #include "cfganal.h" | |
31 | #include "cfgbuild.h" | |
402209ff | 32 | #include "except.h" |
36566b39 | 33 | #include "stmt.h" |
4891442b | 34 | |
2df6cea5 | 35 | static void make_edges (basic_block, basic_block, int); |
a6ee1a15 | 36 | static void make_label_edge (sbitmap, basic_block, rtx, int); |
d329e058 AJ |
37 | static void find_bb_boundaries (basic_block); |
38 | static void compute_outgoing_frequencies (basic_block); | |
4891442b | 39 | \f |
9cd56be1 JH |
40 | /* Return true if insn is something that should be contained inside basic |
41 | block. */ | |
42 | ||
8f62128d | 43 | bool |
eefc72ed | 44 | inside_basic_block_p (const rtx_insn *insn) |
9cd56be1 JH |
45 | { |
46 | switch (GET_CODE (insn)) | |
47 | { | |
48 | case CODE_LABEL: | |
49 | /* Avoid creating of basic block for jumptables. */ | |
4891442b | 50 | return (NEXT_INSN (insn) == 0 |
6388c738 | 51 | || ! JUMP_TABLE_DATA_P (NEXT_INSN (insn))); |
9cd56be1 JH |
52 | |
53 | case JUMP_INSN: | |
9cd56be1 JH |
54 | case CALL_INSN: |
55 | case INSN: | |
b5b8b0ac | 56 | case DEBUG_INSN: |
9cd56be1 JH |
57 | return true; |
58 | ||
39718607 | 59 | case JUMP_TABLE_DATA: |
9cd56be1 JH |
60 | case BARRIER: |
61 | case NOTE: | |
62 | return false; | |
63 | ||
64 | default: | |
341c100f | 65 | gcc_unreachable (); |
9cd56be1 JH |
66 | } |
67 | } | |
68 | ||
4891442b RK |
69 | /* Return true if INSN may cause control flow transfer, so it should be last in |
70 | the basic block. */ | |
9cd56be1 | 71 | |
4a69cf79 | 72 | bool |
43f9bab0 | 73 | control_flow_insn_p (const rtx_insn *insn) |
9cd56be1 | 74 | { |
9cd56be1 JH |
75 | switch (GET_CODE (insn)) |
76 | { | |
f87c27b4 KH |
77 | case NOTE: |
78 | case CODE_LABEL: | |
b5b8b0ac | 79 | case DEBUG_INSN: |
f87c27b4 KH |
80 | return false; |
81 | ||
82 | case JUMP_INSN: | |
39718607 | 83 | return true; |
f87c27b4 KH |
84 | |
85 | case CALL_INSN: | |
baf8706c JH |
86 | /* Noreturn and sibling call instructions terminate the basic blocks |
87 | (but only if they happen unconditionally). */ | |
88 | if ((SIBLING_CALL_P (insn) | |
89 | || find_reg_note (insn, REG_NORETURN, 0)) | |
90 | && GET_CODE (PATTERN (insn)) != COND_EXEC) | |
91 | return true; | |
1d65f45c | 92 | |
f87c27b4 | 93 | /* Call insn may return to the nonlocal goto handler. */ |
1d65f45c RH |
94 | if (can_nonlocal_goto (insn)) |
95 | return true; | |
96 | break; | |
f87c27b4 KH |
97 | |
98 | case INSN: | |
d47a8b83 EB |
99 | /* Treat trap instructions like noreturn calls (same provision). */ |
100 | if (GET_CODE (PATTERN (insn)) == TRAP_IF | |
101 | && XEXP (PATTERN (insn), 0) == const1_rtx) | |
102 | return true; | |
8f4f502f | 103 | if (!cfun->can_throw_non_call_exceptions) |
1d65f45c RH |
104 | return false; |
105 | break; | |
f87c27b4 | 106 | |
39718607 | 107 | case JUMP_TABLE_DATA: |
f87c27b4 | 108 | case BARRIER: |
39718607 | 109 | /* It is nonsense to reach this when looking for the |
c22cacf3 MS |
110 | end of basic block, but before dead code is eliminated |
111 | this may happen. */ | |
f87c27b4 KH |
112 | return false; |
113 | ||
114 | default: | |
341c100f | 115 | gcc_unreachable (); |
9cd56be1 | 116 | } |
1d65f45c RH |
117 | |
118 | return can_throw_internal (insn); | |
9cd56be1 | 119 | } |
402209ff | 120 | |
402209ff JH |
121 | \f |
122 | /* Create an edge between two basic blocks. FLAGS are auxiliary information | |
123 | about the edge that is accumulated between calls. */ | |
124 | ||
125 | /* Create an edge from a basic block to a label. */ | |
126 | ||
127 | static void | |
a6ee1a15 | 128 | make_label_edge (sbitmap edge_cache, basic_block src, rtx label, int flags) |
402209ff | 129 | { |
341c100f | 130 | gcc_assert (LABEL_P (label)); |
402209ff JH |
131 | |
132 | /* If the label was never emitted, this insn is junk, but avoid a | |
133 | crash trying to refer to BLOCK_FOR_INSN (label). This can happen | |
134 | as a result of a syntax error and a diagnostic has already been | |
135 | printed. */ | |
136 | ||
137 | if (INSN_UID (label) == 0) | |
138 | return; | |
139 | ||
7ded4467 | 140 | cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags); |
402209ff JH |
141 | } |
142 | ||
143 | /* Create the edges generated by INSN in REGION. */ | |
144 | ||
12c3874e | 145 | void |
a6ee1a15 | 146 | rtl_make_eh_edge (sbitmap edge_cache, basic_block src, rtx insn) |
402209ff | 147 | { |
1d65f45c | 148 | eh_landing_pad lp = get_eh_landing_pad_from_rtx (insn); |
402209ff | 149 | |
1d65f45c RH |
150 | if (lp) |
151 | { | |
e67d1102 | 152 | rtx_insn *label = lp->landing_pad; |
402209ff | 153 | |
1d65f45c RH |
154 | /* During initial rtl generation, use the post_landing_pad. */ |
155 | if (label == NULL) | |
156 | { | |
157 | gcc_assert (lp->post_landing_pad); | |
158 | label = label_rtx (lp->post_landing_pad); | |
159 | } | |
402209ff | 160 | |
1d65f45c RH |
161 | make_label_edge (edge_cache, src, label, |
162 | EDGE_ABNORMAL | EDGE_EH | |
163 | | (CALL_P (insn) ? EDGE_ABNORMAL_CALL : 0)); | |
164 | } | |
402209ff | 165 | } |
4891442b | 166 | |
55b18c19 KH |
167 | /* States of basic block as seen by find_many_sub_basic_blocks. */ |
168 | enum state { | |
169 | /* Basic blocks created via split_block belong to this state. | |
170 | make_edges will examine these basic blocks to see if we need to | |
171 | create edges going out of them. */ | |
172 | BLOCK_NEW = 0, | |
173 | ||
174 | /* Basic blocks that do not need examining belong to this state. | |
175 | These blocks will be left intact. In particular, make_edges will | |
176 | not create edges going out of these basic blocks. */ | |
177 | BLOCK_ORIGINAL, | |
178 | ||
179 | /* Basic blocks that may need splitting (due to a label appearing in | |
180 | the middle, etc) belong to this state. After splitting them, | |
0fa2e4df | 181 | make_edges will create edges going out of them as needed. */ |
55b18c19 KH |
182 | BLOCK_TO_SPLIT |
183 | }; | |
5e91f7a3 KH |
184 | |
185 | #define STATE(BB) (enum state) ((size_t) (BB)->aux) | |
186 | #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE)) | |
187 | ||
188 | /* Used internally by purge_dead_tablejump_edges, ORed into state. */ | |
189 | #define BLOCK_USED_BY_TABLEJUMP 32 | |
190 | #define FULL_STATE(BB) ((size_t) (BB)->aux) | |
191 | ||
55b18c19 KH |
192 | /* Identify the edges going out of basic blocks between MIN and MAX, |
193 | inclusive, that have their states set to BLOCK_NEW or | |
194 | BLOCK_TO_SPLIT. | |
402209ff | 195 | |
55b18c19 KH |
196 | UPDATE_P should be nonzero if we are updating CFG and zero if we |
197 | are building CFG from scratch. */ | |
402209ff JH |
198 | |
199 | static void | |
2df6cea5 | 200 | make_edges (basic_block min, basic_block max, int update_p) |
402209ff | 201 | { |
e0082a72 | 202 | basic_block bb; |
a6ee1a15 | 203 | sbitmap edge_cache = NULL; |
402209ff | 204 | |
402209ff JH |
205 | /* Heavy use of computed goto in machine-generated code can lead to |
206 | nearly fully-connected CFGs. In that case we spend a significant | |
207 | amount of time searching the edge lists for duplicates. */ | |
6f7eba34 TS |
208 | if (!vec_safe_is_empty (forced_labels) |
209 | || cfun->cfg->max_jumptable_ents > 100) | |
8b1c6fd7 | 210 | edge_cache = sbitmap_alloc (last_basic_block_for_fn (cfun)); |
402209ff | 211 | |
e0082a72 ZD |
212 | /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block |
213 | is always the entry. */ | |
fefa31b5 DM |
214 | if (min == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) |
215 | make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), min, EDGE_FALLTHRU); | |
402209ff | 216 | |
e0082a72 | 217 | FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb) |
402209ff | 218 | { |
3bbd5815 | 219 | rtx_insn *insn; |
402209ff | 220 | enum rtx_code code; |
623a66fa | 221 | edge e; |
a6ee1a15 | 222 | edge_iterator ei; |
402209ff | 223 | |
5e91f7a3 KH |
224 | if (STATE (bb) == BLOCK_ORIGINAL) |
225 | continue; | |
226 | ||
a6ee1a15 KH |
227 | /* If we have an edge cache, cache edges going out of BB. */ |
228 | if (edge_cache) | |
229 | { | |
f61e445a | 230 | bitmap_clear (edge_cache); |
a6ee1a15 KH |
231 | if (update_p) |
232 | { | |
233 | FOR_EACH_EDGE (e, ei, bb->succs) | |
fefa31b5 | 234 | if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)) |
d7c028c0 | 235 | bitmap_set_bit (edge_cache, e->dest->index); |
a6ee1a15 KH |
236 | } |
237 | } | |
238 | ||
4b4bf941 | 239 | if (LABEL_P (BB_HEAD (bb)) |
a813c111 | 240 | && LABEL_ALT_ENTRY_P (BB_HEAD (bb))) |
fefa31b5 | 241 | cached_make_edge (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, 0); |
402209ff JH |
242 | |
243 | /* Examine the last instruction of the block, and discover the | |
244 | ways we can leave the block. */ | |
245 | ||
a813c111 | 246 | insn = BB_END (bb); |
402209ff JH |
247 | code = GET_CODE (insn); |
248 | ||
249 | /* A branch. */ | |
250 | if (code == JUMP_INSN) | |
251 | { | |
252 | rtx tmp; | |
8942ee0f | 253 | rtx_jump_table_data *table; |
402209ff | 254 | |
402209ff JH |
255 | /* Recognize a non-local goto as a branch outside the |
256 | current function. */ | |
1d65f45c | 257 | if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX)) |
402209ff JH |
258 | ; |
259 | ||
e1233a7d | 260 | /* Recognize a tablejump and do the right thing. */ |
8942ee0f | 261 | else if (tablejump_p (insn, NULL, &table)) |
402209ff | 262 | { |
95c43227 | 263 | rtvec vec = table->get_labels (); |
402209ff JH |
264 | int j; |
265 | ||
402209ff JH |
266 | for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j) |
267 | make_label_edge (edge_cache, bb, | |
268 | XEXP (RTVEC_ELT (vec, j), 0), 0); | |
269 | ||
270 | /* Some targets (eg, ARM) emit a conditional jump that also | |
271 | contains the out-of-range target. Scan for these and | |
272 | add an edge if necessary. */ | |
273 | if ((tmp = single_set (insn)) != NULL | |
274 | && SET_DEST (tmp) == pc_rtx | |
275 | && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE | |
276 | && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF) | |
277 | make_label_edge (edge_cache, bb, | |
04a121a7 | 278 | label_ref_label (XEXP (SET_SRC (tmp), 2)), 0); |
402209ff JH |
279 | } |
280 | ||
281 | /* If this is a computed jump, then mark it as reaching | |
2df6cea5 | 282 | everything on the forced_labels list. */ |
402209ff JH |
283 | else if (computed_jump_p (insn)) |
284 | { | |
6f7eba34 TS |
285 | rtx_insn *insn; |
286 | unsigned int i; | |
287 | FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn) | |
288 | make_label_edge (edge_cache, bb, insn, EDGE_ABNORMAL); | |
402209ff JH |
289 | } |
290 | ||
291 | /* Returns create an exit out. */ | |
292 | else if (returnjump_p (insn)) | |
fefa31b5 | 293 | cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0); |
402209ff | 294 | |
1c384bf1 RH |
295 | /* Recognize asm goto and do the right thing. */ |
296 | else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL) | |
297 | { | |
298 | int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp); | |
299 | for (i = 0; i < n; ++i) | |
300 | make_label_edge (edge_cache, bb, | |
301 | XEXP (ASM_OPERANDS_LABEL (tmp, i), 0), 0); | |
302 | } | |
303 | ||
402209ff JH |
304 | /* Otherwise, we have a plain conditional or unconditional jump. */ |
305 | else | |
306 | { | |
341c100f | 307 | gcc_assert (JUMP_LABEL (insn)); |
402209ff JH |
308 | make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0); |
309 | } | |
310 | } | |
311 | ||
4891442b RK |
312 | /* If this is a sibling call insn, then this is in effect a combined call |
313 | and return, and so we need an edge to the exit block. No need to | |
314 | worry about EH edges, since we wouldn't have created the sibling call | |
315 | in the first place. */ | |
402209ff | 316 | if (code == CALL_INSN && SIBLING_CALL_P (insn)) |
fefa31b5 | 317 | cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun), |
792bb204 | 318 | EDGE_SIBCALL | EDGE_ABNORMAL); |
402209ff JH |
319 | |
320 | /* If this is a CALL_INSN, then mark it as reaching the active EH | |
321 | handler for this CALL_INSN. If we're handling non-call | |
322 | exceptions then any insn can reach any of the active handlers. | |
402209ff | 323 | Also mark the CALL_INSN as reaching any nonlocal goto handler. */ |
8f4f502f | 324 | else if (code == CALL_INSN || cfun->can_throw_non_call_exceptions) |
402209ff JH |
325 | { |
326 | /* Add any appropriate EH edges. */ | |
6de9cd9a | 327 | rtl_make_eh_edge (edge_cache, bb, insn); |
402209ff | 328 | |
0a35513e | 329 | if (code == CALL_INSN) |
402209ff | 330 | { |
1d65f45c | 331 | if (can_nonlocal_goto (insn)) |
0a35513e AH |
332 | { |
333 | /* ??? This could be made smarter: in some cases it's | |
334 | possible to tell that certain calls will not do a | |
335 | nonlocal goto. For example, if the nested functions | |
336 | that do the nonlocal gotos do not have their addresses | |
337 | taken, then only calls to those functions or to other | |
338 | nested functions that use them could possibly do | |
339 | nonlocal gotos. */ | |
b5241a5a | 340 | for (rtx_insn_list *x = nonlocal_goto_handler_labels; |
2382940b DM |
341 | x; |
342 | x = x->next ()) | |
b5241a5a | 343 | make_label_edge (edge_cache, bb, x->insn (), |
0a35513e AH |
344 | EDGE_ABNORMAL | EDGE_ABNORMAL_CALL); |
345 | } | |
346 | ||
347 | if (flag_tm) | |
348 | { | |
349 | rtx note; | |
350 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) | |
351 | if (REG_NOTE_KIND (note) == REG_TM) | |
352 | make_label_edge (edge_cache, bb, XEXP (note, 0), | |
353 | EDGE_ABNORMAL | EDGE_ABNORMAL_CALL); | |
354 | } | |
402209ff JH |
355 | } |
356 | } | |
357 | ||
358 | /* Find out if we can drop through to the next block. */ | |
26f74aa3 | 359 | insn = NEXT_INSN (insn); |
fefa31b5 | 360 | e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun)); |
9ff3d2de JL |
361 | if (e && e->flags & EDGE_FALLTHRU) |
362 | insn = NULL; | |
363 | ||
26f74aa3 | 364 | while (insn |
4b4bf941 | 365 | && NOTE_P (insn) |
a38e7aa5 | 366 | && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK) |
26f74aa3 JH |
367 | insn = NEXT_INSN (insn); |
368 | ||
6be85b25 | 369 | if (!insn) |
fefa31b5 DM |
370 | cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun), |
371 | EDGE_FALLTHRU); | |
372 | else if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun)) | |
402209ff | 373 | { |
6be85b25 | 374 | if (insn == BB_HEAD (bb->next_bb)) |
f6366fc7 | 375 | cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU); |
402209ff JH |
376 | } |
377 | } | |
378 | ||
379 | if (edge_cache) | |
f61e445a | 380 | sbitmap_free (edge_cache); |
402209ff JH |
381 | } |
382 | \f | |
0210ae14 JJ |
383 | static void |
384 | mark_tablejump_edge (rtx label) | |
385 | { | |
386 | basic_block bb; | |
387 | ||
388 | gcc_assert (LABEL_P (label)); | |
389 | /* See comment in make_label_edge. */ | |
390 | if (INSN_UID (label) == 0) | |
391 | return; | |
392 | bb = BLOCK_FOR_INSN (label); | |
393 | SET_STATE (bb, FULL_STATE (bb) | BLOCK_USED_BY_TABLEJUMP); | |
394 | } | |
395 | ||
396 | static void | |
95c43227 | 397 | purge_dead_tablejump_edges (basic_block bb, rtx_jump_table_data *table) |
0210ae14 | 398 | { |
3bbd5815 DM |
399 | rtx_insn *insn = BB_END (bb); |
400 | rtx tmp; | |
0210ae14 JJ |
401 | rtvec vec; |
402 | int j; | |
403 | edge_iterator ei; | |
404 | edge e; | |
405 | ||
95c43227 | 406 | vec = table->get_labels (); |
0210ae14 JJ |
407 | |
408 | for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j) | |
409 | mark_tablejump_edge (XEXP (RTVEC_ELT (vec, j), 0)); | |
410 | ||
411 | /* Some targets (eg, ARM) emit a conditional jump that also | |
412 | contains the out-of-range target. Scan for these and | |
413 | add an edge if necessary. */ | |
414 | if ((tmp = single_set (insn)) != NULL | |
415 | && SET_DEST (tmp) == pc_rtx | |
416 | && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE | |
417 | && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF) | |
04a121a7 | 418 | mark_tablejump_edge (label_ref_label (XEXP (SET_SRC (tmp), 2))); |
0210ae14 JJ |
419 | |
420 | for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | |
421 | { | |
422 | if (FULL_STATE (e->dest) & BLOCK_USED_BY_TABLEJUMP) | |
c22cacf3 MS |
423 | SET_STATE (e->dest, FULL_STATE (e->dest) |
424 | & ~(size_t) BLOCK_USED_BY_TABLEJUMP); | |
0210ae14 | 425 | else if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH))) |
c22cacf3 MS |
426 | { |
427 | remove_edge (e); | |
428 | continue; | |
429 | } | |
0210ae14 JJ |
430 | ei_next (&ei); |
431 | } | |
432 | } | |
433 | ||
b932f770 JH |
434 | /* Scan basic block BB for possible BB boundaries inside the block |
435 | and create new basic blocks in the progress. */ | |
436 | ||
437 | static void | |
d329e058 | 438 | find_bb_boundaries (basic_block bb) |
402209ff | 439 | { |
0210ae14 | 440 | basic_block orig_bb = bb; |
3bbd5815 DM |
441 | rtx_insn *insn = BB_HEAD (bb); |
442 | rtx_insn *end = BB_END (bb), *x; | |
8942ee0f | 443 | rtx_jump_table_data *table; |
3bbd5815 | 444 | rtx_insn *flow_transfer_insn = NULL; |
c5f59763 | 445 | rtx_insn *debug_insn = NULL; |
f068df3f | 446 | edge fallthru = NULL; |
eb8b3474 | 447 | bool skip_purge; |
d9f9d5d3 | 448 | bool seen_note_after_debug = false; |
402209ff | 449 | |
c5f59763 | 450 | if (insn == end) |
402209ff JH |
451 | return; |
452 | ||
eb8b3474 AO |
453 | if (DEBUG_INSN_P (insn) || DEBUG_INSN_P (end)) |
454 | { | |
455 | /* Check whether, without debug insns, the insn==end test above | |
456 | would have caused us to return immediately, and behave the | |
457 | same way even with debug insns. If we don't do this, debug | |
458 | insns could cause us to purge dead edges at different times, | |
459 | which could in turn change the cfg and affect codegen | |
460 | decisions in subtle but undesirable ways. */ | |
461 | while (insn != end && DEBUG_INSN_P (insn)) | |
462 | insn = NEXT_INSN (insn); | |
463 | rtx_insn *e = end; | |
464 | while (insn != e && DEBUG_INSN_P (e)) | |
465 | e = PREV_INSN (e); | |
466 | if (insn == e) | |
467 | { | |
468 | /* If there are debug insns after a single insn that is a | |
469 | control flow insn in the block, we'd have left right | |
470 | away, but we should clean up the debug insns after the | |
471 | control flow insn, because they can't remain in the same | |
472 | block. So, do the debug insn cleaning up, but then bail | |
473 | out without purging dead edges as we would if the debug | |
474 | insns hadn't been there. */ | |
475 | if (e != end && !DEBUG_INSN_P (e) && control_flow_insn_p (e)) | |
476 | { | |
477 | skip_purge = true; | |
478 | flow_transfer_insn = e; | |
479 | goto clean_up_debug_after_control_flow; | |
480 | } | |
481 | return; | |
482 | } | |
483 | } | |
484 | ||
4b4bf941 | 485 | if (LABEL_P (insn)) |
402209ff JH |
486 | insn = NEXT_INSN (insn); |
487 | ||
488 | /* Scan insn chain and try to find new basic block boundaries. */ | |
489 | while (1) | |
490 | { | |
491 | enum rtx_code code = GET_CODE (insn); | |
f068df3f | 492 | |
c5f59763 JJ |
493 | if (code == DEBUG_INSN) |
494 | { | |
495 | if (flow_transfer_insn && !debug_insn) | |
d9f9d5d3 JJ |
496 | { |
497 | debug_insn = insn; | |
498 | seen_note_after_debug = false; | |
499 | } | |
c5f59763 | 500 | } |
1d65f45c RH |
501 | /* In case we've previously seen an insn that effects a control |
502 | flow transfer, split the block. */ | |
c5f59763 JJ |
503 | else if ((flow_transfer_insn || code == CODE_LABEL) |
504 | && inside_basic_block_p (insn)) | |
402209ff | 505 | { |
c5f59763 JJ |
506 | rtx_insn *prev = PREV_INSN (insn); |
507 | ||
508 | /* If the first non-debug inside_basic_block_p insn after a control | |
509 | flow transfer is not a label, split the block before the debug | |
510 | insn instead of before the non-debug insn, so that the debug | |
511 | insns are not lost. */ | |
512 | if (debug_insn && code != CODE_LABEL && code != BARRIER) | |
d9f9d5d3 JJ |
513 | { |
514 | prev = PREV_INSN (debug_insn); | |
515 | if (seen_note_after_debug) | |
516 | { | |
517 | /* Though, if there are NOTEs intermixed with DEBUG_INSNs, | |
518 | move the NOTEs before the DEBUG_INSNs and split after | |
519 | the last NOTE. */ | |
520 | rtx_insn *first = NULL, *last = NULL; | |
521 | for (x = debug_insn; x != insn; x = NEXT_INSN (x)) | |
522 | { | |
523 | if (NOTE_P (x)) | |
524 | { | |
525 | if (first == NULL) | |
526 | first = x; | |
527 | last = x; | |
528 | } | |
529 | else | |
530 | { | |
531 | gcc_assert (DEBUG_INSN_P (x)); | |
532 | if (first) | |
533 | { | |
534 | reorder_insns_nobb (first, last, prev); | |
535 | prev = last; | |
536 | first = last = NULL; | |
537 | } | |
538 | } | |
539 | } | |
540 | if (first) | |
541 | { | |
542 | reorder_insns_nobb (first, last, prev); | |
543 | prev = last; | |
544 | } | |
545 | } | |
546 | } | |
c5f59763 | 547 | fallthru = split_block (bb, prev); |
f068df3f | 548 | if (flow_transfer_insn) |
a7b87f73 | 549 | { |
1130d5e3 | 550 | BB_END (bb) = flow_transfer_insn; |
a7b87f73 | 551 | |
c5f59763 | 552 | rtx_insn *next; |
a7b87f73 ZD |
553 | /* Clean up the bb field for the insns between the blocks. */ |
554 | for (x = NEXT_INSN (flow_transfer_insn); | |
555 | x != BB_HEAD (fallthru->dest); | |
c5f59763 JJ |
556 | x = next) |
557 | { | |
558 | next = NEXT_INSN (x); | |
559 | /* Debug insns should not be in between basic blocks, | |
560 | drop them on the floor. */ | |
561 | if (DEBUG_INSN_P (x)) | |
562 | delete_insn (x); | |
563 | else if (!BARRIER_P (x)) | |
564 | set_block_for_insn (x, NULL); | |
565 | } | |
a7b87f73 | 566 | } |
4891442b | 567 | |
f068df3f RH |
568 | bb = fallthru->dest; |
569 | remove_edge (fallthru); | |
6fcdf714 JH |
570 | /* BB is unreachable at this point - we need to determine its profile |
571 | once edges are built. */ | |
6fcdf714 | 572 | bb->count = profile_count::uninitialized (); |
3bbd5815 | 573 | flow_transfer_insn = NULL; |
c5f59763 | 574 | debug_insn = NULL; |
1d65f45c | 575 | if (code == CODE_LABEL && LABEL_ALT_ENTRY_P (insn)) |
fefa31b5 | 576 | make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, 0); |
402209ff | 577 | } |
79876307 RH |
578 | else if (code == BARRIER) |
579 | { | |
580 | /* __builtin_unreachable () may cause a barrier to be emitted in | |
581 | the middle of a BB. We need to split it in the same manner as | |
582 | if the barrier were preceded by a control_flow_insn_p insn. */ | |
583 | if (!flow_transfer_insn) | |
f40dd646 | 584 | flow_transfer_insn = prev_nonnote_nondebug_insn_bb (insn); |
9c445c07 | 585 | debug_insn = NULL; |
79876307 | 586 | } |
d9f9d5d3 JJ |
587 | else if (debug_insn) |
588 | { | |
589 | if (code == NOTE) | |
590 | seen_note_after_debug = true; | |
591 | else | |
592 | /* Jump tables. */ | |
593 | debug_insn = NULL; | |
594 | } | |
79876307 | 595 | |
27b4689f RH |
596 | if (control_flow_insn_p (insn)) |
597 | flow_transfer_insn = insn; | |
402209ff JH |
598 | if (insn == end) |
599 | break; | |
600 | insn = NEXT_INSN (insn); | |
601 | } | |
602 | ||
603 | /* In case expander replaced normal insn by sequence terminating by | |
604 | return and barrier, or possibly other sequence not behaving like | |
605 | ordinary jump, we need to take care and move basic block boundary. */ | |
c5f59763 | 606 | if (flow_transfer_insn && flow_transfer_insn != end) |
a7b87f73 | 607 | { |
eb8b3474 AO |
608 | skip_purge = false; |
609 | ||
610 | clean_up_debug_after_control_flow: | |
1130d5e3 | 611 | BB_END (bb) = flow_transfer_insn; |
a7b87f73 ZD |
612 | |
613 | /* Clean up the bb field for the insns that do not belong to BB. */ | |
c5f59763 JJ |
614 | rtx_insn *next; |
615 | for (x = NEXT_INSN (flow_transfer_insn); ; x = next) | |
a7b87f73 | 616 | { |
c5f59763 JJ |
617 | next = NEXT_INSN (x); |
618 | /* Debug insns should not be in between basic blocks, | |
619 | drop them on the floor. */ | |
620 | if (DEBUG_INSN_P (x)) | |
621 | delete_insn (x); | |
622 | else if (!BARRIER_P (x)) | |
a7b87f73 | 623 | set_block_for_insn (x, NULL); |
c5f59763 JJ |
624 | if (x == end) |
625 | break; | |
a7b87f73 | 626 | } |
eb8b3474 AO |
627 | |
628 | if (skip_purge) | |
629 | return; | |
a7b87f73 | 630 | } |
402209ff JH |
631 | |
632 | /* We've possibly replaced the conditional jump by conditional jump | |
633 | followed by cleanup at fallthru edge, so the outgoing edges may | |
634 | be dead. */ | |
635 | purge_dead_edges (bb); | |
0210ae14 JJ |
636 | |
637 | /* purge_dead_edges doesn't handle tablejump's, but if we have split the | |
638 | basic block, we might need to kill some edges. */ | |
639 | if (bb != orig_bb && tablejump_p (BB_END (bb), NULL, &table)) | |
640 | purge_dead_tablejump_edges (bb, table); | |
b932f770 JH |
641 | } |
642 | ||
643 | /* Assume that frequency of basic block B is known. Compute frequencies | |
644 | and probabilities of outgoing edges. */ | |
645 | ||
646 | static void | |
d329e058 | 647 | compute_outgoing_frequencies (basic_block b) |
b932f770 JH |
648 | { |
649 | edge e, f; | |
628f6a4e | 650 | edge_iterator ei; |
4891442b | 651 | |
628f6a4e | 652 | if (EDGE_COUNT (b->succs) == 2) |
b932f770 | 653 | { |
a813c111 | 654 | rtx note = find_reg_note (BB_END (b), REG_BR_PROB, NULL); |
b932f770 JH |
655 | int probability; |
656 | ||
87022a6b JH |
657 | if (note) |
658 | { | |
e5af9ddd | 659 | probability = XINT (note, 0); |
87022a6b | 660 | e = BRANCH_EDGE (b); |
357067f2 | 661 | e->probability |
5fa396ad | 662 | = profile_probability::from_reg_br_prob_note (probability); |
87022a6b | 663 | f = FALLTHRU_EDGE (b); |
5fa396ad | 664 | f->probability = e->probability.invert (); |
87022a6b JH |
665 | return; |
666 | } | |
a4da41e1 ER |
667 | else |
668 | { | |
669 | guess_outgoing_edge_probabilities (b); | |
670 | } | |
b932f770 | 671 | } |
a4da41e1 | 672 | else if (single_succ_p (b)) |
b932f770 | 673 | { |
c5cbcccf | 674 | e = single_succ_edge (b); |
357067f2 | 675 | e->probability = profile_probability::always (); |
87022a6b | 676 | return; |
b932f770 | 677 | } |
a4da41e1 ER |
678 | else |
679 | { | |
680 | /* We rely on BBs with more than two successors to have sane probabilities | |
681 | and do not guess them here. For BBs terminated by switch statements | |
682 | expanded to jump-table jump, we have done the right thing during | |
683 | expansion. For EH edges, we still guess the probabilities here. */ | |
684 | bool complex_edge = false; | |
685 | FOR_EACH_EDGE (e, ei, b->succs) | |
686 | if (e->flags & EDGE_COMPLEX) | |
687 | { | |
688 | complex_edge = true; | |
689 | break; | |
690 | } | |
691 | if (complex_edge) | |
692 | guess_outgoing_edge_probabilities (b); | |
693 | } | |
b932f770 JH |
694 | } |
695 | ||
55b18c19 KH |
696 | /* Assume that some pass has inserted labels or control flow |
697 | instructions within a basic block. Split basic blocks as needed | |
698 | and create edges. */ | |
b932f770 JH |
699 | |
700 | void | |
d329e058 | 701 | find_many_sub_basic_blocks (sbitmap blocks) |
b932f770 | 702 | { |
e0082a72 | 703 | basic_block bb, min, max; |
6fcdf714 JH |
704 | bool found = false; |
705 | auto_vec<unsigned int> n_succs; | |
cb3874dc | 706 | n_succs.safe_grow_cleared (last_basic_block_for_fn (cfun), true); |
b932f770 | 707 | |
11cd3bed | 708 | FOR_EACH_BB_FN (bb, cfun) |
e0082a72 | 709 | SET_STATE (bb, |
d7c028c0 | 710 | bitmap_bit_p (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL); |
b932f770 | 711 | |
11cd3bed | 712 | FOR_EACH_BB_FN (bb, cfun) |
e0082a72 | 713 | if (STATE (bb) == BLOCK_TO_SPLIT) |
6fcdf714 JH |
714 | { |
715 | int n = last_basic_block_for_fn (cfun); | |
716 | unsigned int ns = EDGE_COUNT (bb->succs); | |
717 | ||
718 | find_bb_boundaries (bb); | |
719 | if (n == last_basic_block_for_fn (cfun) && ns == EDGE_COUNT (bb->succs)) | |
720 | n_succs[bb->index] = EDGE_COUNT (bb->succs); | |
721 | } | |
b932f770 | 722 | |
11cd3bed | 723 | FOR_EACH_BB_FN (bb, cfun) |
e0082a72 | 724 | if (STATE (bb) != BLOCK_ORIGINAL) |
6fcdf714 JH |
725 | { |
726 | found = true; | |
727 | break; | |
728 | } | |
729 | ||
730 | if (!found) | |
731 | return; | |
4891442b | 732 | |
e0082a72 | 733 | min = max = bb; |
fefa31b5 | 734 | for (; bb != EXIT_BLOCK_PTR_FOR_FN (cfun); bb = bb->next_bb) |
e0082a72 ZD |
735 | if (STATE (bb) != BLOCK_ORIGINAL) |
736 | max = bb; | |
402209ff JH |
737 | |
738 | /* Now re-scan and wire in all edges. This expect simple (conditional) | |
739 | jumps at the end of each new basic blocks. */ | |
2df6cea5 | 740 | make_edges (min, max, 1); |
402209ff JH |
741 | |
742 | /* Update branch probabilities. Expect only (un)conditional jumps | |
743 | to be created with only the forward edges. */ | |
0a6a6ac9 | 744 | if (profile_status_for_fn (cfun) != PROFILE_ABSENT) |
87022a6b JH |
745 | FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb) |
746 | { | |
747 | edge e; | |
628f6a4e | 748 | edge_iterator ei; |
87022a6b JH |
749 | |
750 | if (STATE (bb) == BLOCK_ORIGINAL) | |
751 | continue; | |
752 | if (STATE (bb) == BLOCK_NEW) | |
753 | { | |
6fcdf714 | 754 | bool initialized_src = false, uninitialized_src = false; |
3995f3a2 | 755 | bb->count = profile_count::zero (); |
628f6a4e | 756 | FOR_EACH_EDGE (e, ei, bb->preds) |
87022a6b | 757 | { |
ef30ab83 | 758 | if (e->count ().initialized_p ()) |
6fcdf714 | 759 | { |
ef30ab83 | 760 | bb->count += e->count (); |
6fcdf714 JH |
761 | initialized_src = true; |
762 | } | |
763 | else | |
1972030c | 764 | uninitialized_src = true; |
87022a6b | 765 | } |
6fcdf714 JH |
766 | /* When some edges are missing with read profile, this is |
767 | most likely because RTL expansion introduced loop. | |
768 | When profile is guessed we may have BB that is reachable | |
769 | from unlikely path as well as from normal path. | |
770 | ||
771 | TODO: We should handle loops created during BB expansion | |
772 | correctly here. For now we assume all those loop to cycle | |
773 | precisely once. */ | |
774 | if (!initialized_src | |
775 | || (uninitialized_src | |
e7a74006 | 776 | && profile_status_for_fn (cfun) < PROFILE_GUESSED)) |
6fcdf714 | 777 | bb->count = profile_count::uninitialized (); |
87022a6b | 778 | } |
77e5edaf JH |
779 | /* If nothing changed, there is no need to create new BBs. */ |
780 | else if (EDGE_COUNT (bb->succs) == n_succs[bb->index]) | |
781 | { | |
782 | /* In rare occassions RTL expansion might have mistakely assigned | |
783 | a probabilities different from what is in CFG. This happens | |
784 | when we try to split branch to two but optimize out the | |
785 | second branch during the way. See PR81030. */ | |
786 | if (JUMP_P (BB_END (bb)) && any_condjump_p (BB_END (bb)) | |
787 | && EDGE_COUNT (bb->succs) >= 2) | |
788 | update_br_prob_note (bb); | |
6fcdf714 | 789 | continue; |
77e5edaf | 790 | } |
4891442b | 791 | |
87022a6b JH |
792 | compute_outgoing_frequencies (bb); |
793 | } | |
4891442b | 794 | |
11cd3bed | 795 | FOR_EACH_BB_FN (bb, cfun) |
e0082a72 | 796 | SET_STATE (bb, 0); |
b932f770 | 797 | } |