]>
Commit | Line | Data |
---|---|---|
c9784e6d | 1 | /* CFG cleanup for trees. |
c75c517d | 2 | Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 |
6ac01510 | 3 | Free Software Foundation, Inc. |
c9784e6d KH |
4 | |
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9dcd6f09 | 9 | the Free Software Foundation; either version 3, or (at your option) |
c9784e6d KH |
10 | any later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
c9784e6d KH |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
25 | #include "tree.h" | |
c9784e6d | 26 | #include "tm_p.h" |
c9784e6d | 27 | #include "basic-block.h" |
718f9c0f | 28 | #include "diagnostic-core.h" |
c9784e6d KH |
29 | #include "flags.h" |
30 | #include "function.h" | |
c9784e6d KH |
31 | #include "ggc.h" |
32 | #include "langhooks.h" | |
c9784e6d | 33 | #include "tree-flow.h" |
c9784e6d | 34 | #include "tree-pass.h" |
c9784e6d KH |
35 | #include "except.h" |
36 | #include "cfgloop.h" | |
c9784e6d KH |
37 | #include "hashtab.h" |
38 | #include "tree-ssa-propagate.h" | |
17684618 | 39 | #include "tree-scalar-evolution.h" |
c9784e6d | 40 | |
672987e8 ZD |
41 | /* The set of blocks in that at least one of the following changes happened: |
42 | -- the statement at the end of the block was changed | |
43 | -- the block was newly created | |
44 | -- the set of the predecessors of the block changed | |
45 | -- the set of the successors of the block changed | |
46 | ??? Maybe we could track these changes separately, since they determine | |
47 | what cleanups it makes sense to try on the block. */ | |
48 | bitmap cfgcleanup_altered_bbs; | |
49 | ||
c9784e6d KH |
50 | /* Remove any fallthru edge from EV. Return true if an edge was removed. */ |
51 | ||
52 | static bool | |
53 | remove_fallthru_edge (VEC(edge,gc) *ev) | |
54 | { | |
55 | edge_iterator ei; | |
56 | edge e; | |
57 | ||
58 | FOR_EACH_EDGE (e, ei, ev) | |
59 | if ((e->flags & EDGE_FALLTHRU) != 0) | |
60 | { | |
672987e8 | 61 | remove_edge_and_dominated_blocks (e); |
c9784e6d KH |
62 | return true; |
63 | } | |
64 | return false; | |
65 | } | |
66 | ||
726a989a | 67 | |
c9784e6d KH |
68 | /* Disconnect an unreachable block in the control expression starting |
69 | at block BB. */ | |
70 | ||
71 | static bool | |
726a989a | 72 | cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi) |
c9784e6d KH |
73 | { |
74 | edge taken_edge; | |
75 | bool retval = false; | |
726a989a RB |
76 | gimple stmt = gsi_stmt (gsi); |
77 | tree val; | |
c9784e6d KH |
78 | |
79 | if (!single_succ_p (bb)) | |
80 | { | |
81 | edge e; | |
82 | edge_iterator ei; | |
6ac01510 | 83 | bool warned; |
37530014 | 84 | location_t loc; |
6ac01510 ILT |
85 | |
86 | fold_defer_overflow_warnings (); | |
37530014 RG |
87 | loc = gimple_location (stmt); |
88 | switch (gimple_code (stmt)) | |
89 | { | |
90 | case GIMPLE_COND: | |
91 | { | |
92 | tree lhs = gimple_cond_lhs (stmt); | |
93 | tree rhs = gimple_cond_rhs (stmt); | |
94 | /* For conditions try harder and lookup single-argument | |
95 | PHI nodes. Only do so from the same basic-block though | |
96 | as other basic-blocks may be dead already. */ | |
eb45755f MM |
97 | if (TREE_CODE (lhs) == SSA_NAME |
98 | && !name_registered_for_update_p (lhs)) | |
37530014 RG |
99 | { |
100 | gimple def_stmt = SSA_NAME_DEF_STMT (lhs); | |
101 | if (gimple_code (def_stmt) == GIMPLE_PHI | |
102 | && gimple_phi_num_args (def_stmt) == 1 | |
eb45755f MM |
103 | && gimple_bb (def_stmt) == gimple_bb (stmt) |
104 | && (TREE_CODE (PHI_ARG_DEF (def_stmt, 0)) != SSA_NAME | |
105 | || !name_registered_for_update_p (PHI_ARG_DEF (def_stmt, | |
106 | 0)))) | |
37530014 RG |
107 | lhs = PHI_ARG_DEF (def_stmt, 0); |
108 | } | |
eb45755f MM |
109 | if (TREE_CODE (rhs) == SSA_NAME |
110 | && !name_registered_for_update_p (rhs)) | |
37530014 RG |
111 | { |
112 | gimple def_stmt = SSA_NAME_DEF_STMT (rhs); | |
113 | if (gimple_code (def_stmt) == GIMPLE_PHI | |
114 | && gimple_phi_num_args (def_stmt) == 1 | |
eb45755f MM |
115 | && gimple_bb (def_stmt) == gimple_bb (stmt) |
116 | && (TREE_CODE (PHI_ARG_DEF (def_stmt, 0)) != SSA_NAME | |
117 | || !name_registered_for_update_p (PHI_ARG_DEF (def_stmt, | |
118 | 0)))) | |
37530014 RG |
119 | rhs = PHI_ARG_DEF (def_stmt, 0); |
120 | } | |
121 | val = fold_binary_loc (loc, gimple_cond_code (stmt), | |
122 | boolean_type_node, lhs, rhs); | |
123 | break; | |
124 | } | |
125 | ||
126 | case GIMPLE_SWITCH: | |
127 | val = gimple_switch_index (stmt); | |
128 | break; | |
129 | ||
130 | default: | |
131 | val = NULL_TREE; | |
132 | } | |
c9784e6d KH |
133 | taken_edge = find_taken_edge (bb, val); |
134 | if (!taken_edge) | |
6ac01510 ILT |
135 | { |
136 | fold_undefer_and_ignore_overflow_warnings (); | |
137 | return false; | |
138 | } | |
c9784e6d KH |
139 | |
140 | /* Remove all the edges except the one that is always executed. */ | |
6ac01510 | 141 | warned = false; |
c9784e6d KH |
142 | for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) |
143 | { | |
144 | if (e != taken_edge) | |
145 | { | |
6ac01510 ILT |
146 | if (!warned) |
147 | { | |
148 | fold_undefer_overflow_warnings | |
726a989a | 149 | (true, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL); |
6ac01510 ILT |
150 | warned = true; |
151 | } | |
152 | ||
c9784e6d KH |
153 | taken_edge->probability += e->probability; |
154 | taken_edge->count += e->count; | |
672987e8 | 155 | remove_edge_and_dominated_blocks (e); |
c9784e6d KH |
156 | retval = true; |
157 | } | |
158 | else | |
159 | ei_next (&ei); | |
160 | } | |
6ac01510 ILT |
161 | if (!warned) |
162 | fold_undefer_and_ignore_overflow_warnings (); | |
c9784e6d KH |
163 | if (taken_edge->probability > REG_BR_PROB_BASE) |
164 | taken_edge->probability = REG_BR_PROB_BASE; | |
165 | } | |
166 | else | |
167 | taken_edge = single_succ_edge (bb); | |
168 | ||
672987e8 | 169 | bitmap_set_bit (cfgcleanup_altered_bbs, bb->index); |
726a989a | 170 | gsi_remove (&gsi, true); |
c9784e6d KH |
171 | taken_edge->flags = EDGE_FALLTHRU; |
172 | ||
c9784e6d KH |
173 | return retval; |
174 | } | |
175 | ||
672987e8 ZD |
176 | /* Try to remove superfluous control structures in basic block BB. Returns |
177 | true if anything changes. */ | |
c9784e6d KH |
178 | |
179 | static bool | |
672987e8 | 180 | cleanup_control_flow_bb (basic_block bb) |
c9784e6d | 181 | { |
726a989a | 182 | gimple_stmt_iterator gsi; |
c9784e6d | 183 | bool retval = false; |
726a989a | 184 | gimple stmt; |
c9784e6d | 185 | |
672987e8 ZD |
186 | /* If the last statement of the block could throw and now cannot, |
187 | we need to prune cfg. */ | |
726a989a | 188 | retval |= gimple_purge_dead_eh_edges (bb); |
672987e8 | 189 | |
726a989a RB |
190 | gsi = gsi_last_bb (bb); |
191 | if (gsi_end_p (gsi)) | |
672987e8 ZD |
192 | return retval; |
193 | ||
726a989a | 194 | stmt = gsi_stmt (gsi); |
672987e8 | 195 | |
726a989a RB |
196 | if (gimple_code (stmt) == GIMPLE_COND |
197 | || gimple_code (stmt) == GIMPLE_SWITCH) | |
198 | retval |= cleanup_control_expr_graph (bb, gsi); | |
199 | else if (gimple_code (stmt) == GIMPLE_GOTO | |
200 | && TREE_CODE (gimple_goto_dest (stmt)) == ADDR_EXPR | |
201 | && (TREE_CODE (TREE_OPERAND (gimple_goto_dest (stmt), 0)) | |
672987e8 | 202 | == LABEL_DECL)) |
c9784e6d | 203 | { |
726a989a RB |
204 | /* If we had a computed goto which has a compile-time determinable |
205 | destination, then we can eliminate the goto. */ | |
672987e8 ZD |
206 | edge e; |
207 | tree label; | |
208 | edge_iterator ei; | |
209 | basic_block target_block; | |
c9784e6d | 210 | |
672987e8 ZD |
211 | /* First look at all the outgoing edges. Delete any outgoing |
212 | edges which do not go to the right block. For the one | |
213 | edge which goes to the right block, fix up its flags. */ | |
726a989a | 214 | label = TREE_OPERAND (gimple_goto_dest (stmt), 0); |
672987e8 ZD |
215 | target_block = label_to_block (label); |
216 | for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | |
c9784e6d | 217 | { |
672987e8 ZD |
218 | if (e->dest != target_block) |
219 | remove_edge_and_dominated_blocks (e); | |
220 | else | |
c9784e6d | 221 | { |
672987e8 ZD |
222 | /* Turn off the EDGE_ABNORMAL flag. */ |
223 | e->flags &= ~EDGE_ABNORMAL; | |
c9784e6d | 224 | |
672987e8 ZD |
225 | /* And set EDGE_FALLTHRU. */ |
226 | e->flags |= EDGE_FALLTHRU; | |
227 | ei_next (&ei); | |
228 | } | |
c9784e6d KH |
229 | } |
230 | ||
672987e8 ZD |
231 | bitmap_set_bit (cfgcleanup_altered_bbs, bb->index); |
232 | bitmap_set_bit (cfgcleanup_altered_bbs, target_block->index); | |
233 | ||
234 | /* Remove the GOTO_EXPR as it is not needed. The CFG has all the | |
235 | relevant information we need. */ | |
726a989a | 236 | gsi_remove (&gsi, true); |
672987e8 | 237 | retval = true; |
c9784e6d | 238 | } |
672987e8 ZD |
239 | |
240 | /* Check for indirect calls that have been turned into | |
241 | noreturn calls. */ | |
726a989a RB |
242 | else if (is_gimple_call (stmt) |
243 | && gimple_call_noreturn_p (stmt) | |
244 | && remove_fallthru_edge (bb->succs)) | |
672987e8 ZD |
245 | retval = true; |
246 | ||
c9784e6d KH |
247 | return retval; |
248 | } | |
249 | ||
250 | /* Return true if basic block BB does nothing except pass control | |
251 | flow to another block and that we can safely insert a label at | |
252 | the start of the successor block. | |
253 | ||
254 | As a precondition, we require that BB be not equal to | |
255 | ENTRY_BLOCK_PTR. */ | |
256 | ||
257 | static bool | |
258 | tree_forwarder_block_p (basic_block bb, bool phi_wanted) | |
259 | { | |
726a989a | 260 | gimple_stmt_iterator gsi; |
f99fcb3b | 261 | location_t locus; |
c9784e6d KH |
262 | |
263 | /* BB must have a single outgoing edge. */ | |
264 | if (single_succ_p (bb) != 1 | |
265 | /* If PHI_WANTED is false, BB must not have any PHI nodes. | |
266 | Otherwise, BB must have PHI nodes. */ | |
726a989a | 267 | || gimple_seq_empty_p (phi_nodes (bb)) == phi_wanted |
c9784e6d KH |
268 | /* BB may not be a predecessor of EXIT_BLOCK_PTR. */ |
269 | || single_succ (bb) == EXIT_BLOCK_PTR | |
270 | /* Nor should this be an infinite loop. */ | |
271 | || single_succ (bb) == bb | |
272 | /* BB may not have an abnormal outgoing edge. */ | |
273 | || (single_succ_edge (bb)->flags & EDGE_ABNORMAL)) | |
274 | return false; | |
275 | ||
77a74ed7 | 276 | gcc_checking_assert (bb != ENTRY_BLOCK_PTR); |
c9784e6d | 277 | |
f99fcb3b JJ |
278 | locus = single_succ_edge (bb)->goto_locus; |
279 | ||
1d65f45c RH |
280 | /* There should not be an edge coming from entry, or an EH edge. */ |
281 | { | |
282 | edge_iterator ei; | |
283 | edge e; | |
284 | ||
285 | FOR_EACH_EDGE (e, ei, bb->preds) | |
286 | if (e->src == ENTRY_BLOCK_PTR || (e->flags & EDGE_EH)) | |
287 | return false; | |
f99fcb3b JJ |
288 | /* If goto_locus of any of the edges differs, prevent removing |
289 | the forwarder block for -O0. */ | |
290 | else if (optimize == 0 && e->goto_locus != locus) | |
291 | return false; | |
1d65f45c RH |
292 | } |
293 | ||
c9784e6d KH |
294 | /* Now walk through the statements backward. We can ignore labels, |
295 | anything else means this is not a forwarder block. */ | |
726a989a | 296 | for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi)) |
c9784e6d | 297 | { |
726a989a | 298 | gimple stmt = gsi_stmt (gsi); |
c9784e6d | 299 | |
726a989a | 300 | switch (gimple_code (stmt)) |
c9784e6d | 301 | { |
726a989a RB |
302 | case GIMPLE_LABEL: |
303 | if (DECL_NONLOCAL (gimple_label_label (stmt))) | |
c9784e6d | 304 | return false; |
f99fcb3b JJ |
305 | if (optimize == 0 && gimple_location (stmt) != locus) |
306 | return false; | |
c9784e6d KH |
307 | break; |
308 | ||
b5b8b0ac AO |
309 | /* ??? For now, hope there's a corresponding debug |
310 | assignment at the destination. */ | |
311 | case GIMPLE_DEBUG: | |
312 | break; | |
313 | ||
c9784e6d KH |
314 | default: |
315 | return false; | |
316 | } | |
317 | } | |
318 | ||
c9784e6d KH |
319 | if (current_loops) |
320 | { | |
321 | basic_block dest; | |
322 | /* Protect loop latches, headers and preheaders. */ | |
323 | if (bb->loop_father->header == bb) | |
324 | return false; | |
325 | dest = EDGE_SUCC (bb, 0)->dest; | |
326 | ||
327 | if (dest->loop_father->header == dest) | |
328 | return false; | |
329 | } | |
c9784e6d KH |
330 | return true; |
331 | } | |
332 | ||
c9784e6d KH |
333 | /* If all the PHI nodes in DEST have alternatives for E1 and E2 and |
334 | those alternatives are equal in each of the PHI nodes, then return | |
335 | true, else return false. */ | |
336 | ||
337 | static bool | |
338 | phi_alternatives_equal (basic_block dest, edge e1, edge e2) | |
339 | { | |
340 | int n1 = e1->dest_idx; | |
341 | int n2 = e2->dest_idx; | |
726a989a | 342 | gimple_stmt_iterator gsi; |
c9784e6d | 343 | |
726a989a | 344 | for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi)) |
c9784e6d | 345 | { |
726a989a RB |
346 | gimple phi = gsi_stmt (gsi); |
347 | tree val1 = gimple_phi_arg_def (phi, n1); | |
348 | tree val2 = gimple_phi_arg_def (phi, n2); | |
c9784e6d KH |
349 | |
350 | gcc_assert (val1 != NULL_TREE); | |
351 | gcc_assert (val2 != NULL_TREE); | |
352 | ||
353 | if (!operand_equal_for_phi_arg_p (val1, val2)) | |
354 | return false; | |
355 | } | |
356 | ||
357 | return true; | |
358 | } | |
359 | ||
672987e8 | 360 | /* Removes forwarder block BB. Returns false if this failed. */ |
c9784e6d KH |
361 | |
362 | static bool | |
672987e8 | 363 | remove_forwarder_block (basic_block bb) |
c9784e6d KH |
364 | { |
365 | edge succ = single_succ_edge (bb), e, s; | |
366 | basic_block dest = succ->dest; | |
726a989a | 367 | gimple label; |
c9784e6d | 368 | edge_iterator ei; |
726a989a | 369 | gimple_stmt_iterator gsi, gsi_to; |
70235ab9 | 370 | bool can_move_debug_stmts; |
c9784e6d KH |
371 | |
372 | /* We check for infinite loops already in tree_forwarder_block_p. | |
373 | However it may happen that the infinite loop is created | |
374 | afterwards due to removal of forwarders. */ | |
375 | if (dest == bb) | |
376 | return false; | |
377 | ||
28e5ca15 RB |
378 | /* If the destination block consists of a nonlocal label or is a |
379 | EH landing pad, do not merge it. */ | |
c9784e6d KH |
380 | label = first_stmt (dest); |
381 | if (label | |
726a989a | 382 | && gimple_code (label) == GIMPLE_LABEL |
28e5ca15 RB |
383 | && (DECL_NONLOCAL (gimple_label_label (label)) |
384 | || EH_LANDING_PAD_NR (gimple_label_label (label)) != 0)) | |
c9784e6d KH |
385 | return false; |
386 | ||
387 | /* If there is an abnormal edge to basic block BB, but not into | |
388 | dest, problems might occur during removal of the phi node at out | |
389 | of ssa due to overlapping live ranges of registers. | |
390 | ||
391 | If there is an abnormal edge in DEST, the problems would occur | |
392 | anyway since cleanup_dead_labels would then merge the labels for | |
393 | two different eh regions, and rest of exception handling code | |
394 | does not like it. | |
395 | ||
396 | So if there is an abnormal edge to BB, proceed only if there is | |
397 | no abnormal edge to DEST and there are no phi nodes in DEST. */ | |
31ff2426 NF |
398 | if (bb_has_abnormal_pred (bb) |
399 | && (bb_has_abnormal_pred (dest) | |
1197e789 RG |
400 | || !gimple_seq_empty_p (phi_nodes (dest)))) |
401 | return false; | |
c9784e6d KH |
402 | |
403 | /* If there are phi nodes in DEST, and some of the blocks that are | |
404 | predecessors of BB are also predecessors of DEST, check that the | |
405 | phi node arguments match. */ | |
726a989a | 406 | if (!gimple_seq_empty_p (phi_nodes (dest))) |
c9784e6d KH |
407 | { |
408 | FOR_EACH_EDGE (e, ei, bb->preds) | |
409 | { | |
410 | s = find_edge (e->src, dest); | |
411 | if (!s) | |
412 | continue; | |
413 | ||
414 | if (!phi_alternatives_equal (dest, succ, s)) | |
415 | return false; | |
416 | } | |
417 | } | |
418 | ||
dc764d10 | 419 | can_move_debug_stmts = MAY_HAVE_DEBUG_STMTS && single_pred_p (dest); |
70235ab9 | 420 | |
c9784e6d KH |
421 | /* Redirect the edges. */ |
422 | for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) | |
423 | { | |
672987e8 ZD |
424 | bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index); |
425 | ||
c9784e6d KH |
426 | if (e->flags & EDGE_ABNORMAL) |
427 | { | |
428 | /* If there is an abnormal edge, redirect it anyway, and | |
429 | move the labels to the new block to make it legal. */ | |
430 | s = redirect_edge_succ_nodup (e, dest); | |
431 | } | |
432 | else | |
433 | s = redirect_edge_and_branch (e, dest); | |
434 | ||
435 | if (s == e) | |
436 | { | |
437 | /* Create arguments for the phi nodes, since the edge was not | |
438 | here before. */ | |
726a989a RB |
439 | for (gsi = gsi_start_phis (dest); |
440 | !gsi_end_p (gsi); | |
441 | gsi_next (&gsi)) | |
442 | { | |
443 | gimple phi = gsi_stmt (gsi); | |
f5045c96 | 444 | source_location l = gimple_phi_arg_location_from_edge (phi, succ); |
9e227d60 | 445 | add_phi_arg (phi, gimple_phi_arg_def (phi, succ->dest_idx), s, l); |
726a989a | 446 | } |
c9784e6d | 447 | } |
c9784e6d KH |
448 | } |
449 | ||
1197e789 RG |
450 | /* Move nonlocal labels and computed goto targets as well as user |
451 | defined labels and labels with an EH landing pad number to the | |
452 | new block, so that the redirection of the abnormal edges works, | |
453 | jump targets end up in a sane place and debug information for | |
454 | labels is retained. */ | |
455 | gsi_to = gsi_start_bb (dest); | |
456 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); ) | |
457 | { | |
458 | tree decl; | |
459 | label = gsi_stmt (gsi); | |
460 | if (is_gimple_debug (label)) | |
461 | break; | |
462 | decl = gimple_label_label (label); | |
463 | if (EH_LANDING_PAD_NR (decl) != 0 | |
464 | || DECL_NONLOCAL (decl) | |
465 | || FORCED_LABEL (decl) | |
466 | || !DECL_ARTIFICIAL (decl)) | |
467 | { | |
468 | gsi_remove (&gsi, false); | |
469 | gsi_insert_before (&gsi_to, label, GSI_SAME_STMT); | |
470 | } | |
471 | else | |
472 | gsi_next (&gsi); | |
473 | } | |
474 | ||
dc764d10 | 475 | /* Move debug statements if the destination has a single predecessor. */ |
70235ab9 | 476 | if (can_move_debug_stmts) |
c9784e6d | 477 | { |
1197e789 RG |
478 | gsi_to = gsi_after_labels (dest); |
479 | for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); ) | |
c9784e6d | 480 | { |
70235ab9 JJ |
481 | gimple debug = gsi_stmt (gsi); |
482 | if (!is_gimple_debug (debug)) | |
1197e789 | 483 | break; |
726a989a | 484 | gsi_remove (&gsi, false); |
70235ab9 | 485 | gsi_insert_before (&gsi_to, debug, GSI_SAME_STMT); |
c9784e6d KH |
486 | } |
487 | } | |
488 | ||
672987e8 ZD |
489 | bitmap_set_bit (cfgcleanup_altered_bbs, dest->index); |
490 | ||
c9784e6d KH |
491 | /* Update the dominators. */ |
492 | if (dom_info_available_p (CDI_DOMINATORS)) | |
493 | { | |
494 | basic_block dom, dombb, domdest; | |
495 | ||
496 | dombb = get_immediate_dominator (CDI_DOMINATORS, bb); | |
497 | domdest = get_immediate_dominator (CDI_DOMINATORS, dest); | |
498 | if (domdest == bb) | |
499 | { | |
500 | /* Shortcut to avoid calling (relatively expensive) | |
501 | nearest_common_dominator unless necessary. */ | |
502 | dom = dombb; | |
503 | } | |
504 | else | |
505 | dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb); | |
506 | ||
507 | set_immediate_dominator (CDI_DOMINATORS, dest, dom); | |
508 | } | |
509 | ||
510 | /* And kill the forwarder block. */ | |
511 | delete_basic_block (bb); | |
512 | ||
513 | return true; | |
514 | } | |
515 | ||
566d09ef JH |
516 | /* STMT is a call that has been discovered noreturn. Fixup the CFG |
517 | and remove LHS. Return true if something changed. */ | |
518 | ||
519 | bool | |
520 | fixup_noreturn_call (gimple stmt) | |
521 | { | |
522 | basic_block bb = gimple_bb (stmt); | |
523 | bool changed = false; | |
524 | ||
525 | if (gimple_call_builtin_p (stmt, BUILT_IN_RETURN)) | |
526 | return false; | |
527 | ||
528 | /* First split basic block if stmt is not last. */ | |
529 | if (stmt != gsi_stmt (gsi_last_bb (bb))) | |
530 | split_block (bb, stmt); | |
531 | ||
532 | changed |= remove_fallthru_edge (bb->succs); | |
533 | ||
534 | /* If there is LHS, remove it. */ | |
535 | if (gimple_call_lhs (stmt)) | |
536 | { | |
537 | tree op = gimple_call_lhs (stmt); | |
538 | gimple_call_set_lhs (stmt, NULL_TREE); | |
02d635a2 | 539 | |
acb6411a | 540 | /* We need to remove SSA name to avoid checking errors. |
566d09ef | 541 | All uses are dominated by the noreturn and thus will |
02d635a2 JH |
542 | be removed afterwards. |
543 | We proactively remove affected non-PHI statements to avoid | |
544 | fixup_cfg from trying to update them and crashing. */ | |
566d09ef JH |
545 | if (TREE_CODE (op) == SSA_NAME) |
546 | { | |
547 | use_operand_p use_p; | |
548 | imm_use_iterator iter; | |
549 | gimple use_stmt; | |
02d635a2 JH |
550 | bitmap_iterator bi; |
551 | unsigned int bb_index; | |
552 | ||
553 | bitmap blocks = BITMAP_ALLOC (NULL); | |
566d09ef JH |
554 | |
555 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, op) | |
02d635a2 JH |
556 | { |
557 | if (gimple_code (use_stmt) != GIMPLE_PHI) | |
558 | bitmap_set_bit (blocks, gimple_bb (use_stmt)->index); | |
559 | else | |
560 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
561 | SET_USE (use_p, error_mark_node); | |
562 | } | |
563 | EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi) | |
564 | delete_basic_block (BASIC_BLOCK (bb_index)); | |
565 | BITMAP_FREE (blocks); | |
566 | release_ssa_name (op); | |
566d09ef JH |
567 | } |
568 | update_stmt (stmt); | |
569 | changed = true; | |
570 | } | |
8ba0479e JJ |
571 | /* Similarly remove VDEF if there is any. */ |
572 | else if (gimple_vdef (stmt)) | |
573 | update_stmt (stmt); | |
566d09ef JH |
574 | return changed; |
575 | } | |
576 | ||
577 | ||
672987e8 ZD |
578 | /* Split basic blocks on calls in the middle of a basic block that are now |
579 | known not to return, and remove the unreachable code. */ | |
c9784e6d KH |
580 | |
581 | static bool | |
672987e8 | 582 | split_bbs_on_noreturn_calls (void) |
c9784e6d | 583 | { |
c9784e6d | 584 | bool changed = false; |
726a989a | 585 | gimple stmt; |
672987e8 | 586 | basic_block bb; |
c9784e6d | 587 | |
672987e8 ZD |
588 | /* Detect cases where a mid-block call is now known not to return. */ |
589 | if (cfun->gimple_df) | |
726a989a | 590 | while (VEC_length (gimple, MODIFIED_NORETURN_CALLS (cfun))) |
672987e8 | 591 | { |
726a989a RB |
592 | stmt = VEC_pop (gimple, MODIFIED_NORETURN_CALLS (cfun)); |
593 | bb = gimple_bb (stmt); | |
4d731f17 JJ |
594 | /* BB might be deleted at this point, so verify first |
595 | BB is present in the cfg. */ | |
672987e8 | 596 | if (bb == NULL |
4d731f17 | 597 | || bb->index < NUM_FIXED_BLOCKS |
30f1e6de | 598 | || bb->index >= last_basic_block |
4d731f17 | 599 | || BASIC_BLOCK (bb->index) != bb |
726a989a | 600 | || !gimple_call_noreturn_p (stmt)) |
672987e8 ZD |
601 | continue; |
602 | ||
566d09ef | 603 | changed |= fixup_noreturn_call (stmt); |
672987e8 | 604 | } |
c9784e6d | 605 | |
c9784e6d KH |
606 | return changed; |
607 | } | |
608 | ||
672987e8 ZD |
609 | /* Tries to cleanup cfg in basic block BB. Returns true if anything |
610 | changes. */ | |
c9784e6d | 611 | |
89e80dd4 | 612 | static bool |
672987e8 | 613 | cleanup_tree_cfg_bb (basic_block bb) |
c9784e6d | 614 | { |
65e7bfe3 | 615 | bool retval = cleanup_control_flow_bb (bb); |
b8698a0f | 616 | |
f99fcb3b | 617 | if (tree_forwarder_block_p (bb, false) |
672987e8 ZD |
618 | && remove_forwarder_block (bb)) |
619 | return true; | |
c9784e6d | 620 | |
89e80dd4 DN |
621 | /* Merging the blocks may create new opportunities for folding |
622 | conditional branches (due to the elimination of single-valued PHI | |
623 | nodes). */ | |
672987e8 ZD |
624 | if (single_succ_p (bb) |
625 | && can_merge_blocks_p (bb, single_succ (bb))) | |
626 | { | |
627 | merge_blocks (bb, single_succ (bb)); | |
628 | return true; | |
629 | } | |
630 | ||
631 | return retval; | |
632 | } | |
633 | ||
634 | /* Iterate the cfg cleanups, while anything changes. */ | |
635 | ||
636 | static bool | |
637 | cleanup_tree_cfg_1 (void) | |
638 | { | |
639 | bool retval = false; | |
640 | basic_block bb; | |
641 | unsigned i, n; | |
642 | ||
643 | retval |= split_bbs_on_noreturn_calls (); | |
644 | ||
645 | /* Prepare the worklists of altered blocks. */ | |
646 | cfgcleanup_altered_bbs = BITMAP_ALLOC (NULL); | |
647 | ||
648 | /* During forwarder block cleanup, we may redirect edges out of | |
649 | SWITCH_EXPRs, which can get expensive. So we want to enable | |
650 | recording of edge to CASE_LABEL_EXPR. */ | |
651 | start_recording_case_labels (); | |
89e80dd4 | 652 | |
672987e8 ZD |
653 | /* Start by iterating over all basic blocks. We cannot use FOR_EACH_BB, |
654 | since the basic blocks may get removed. */ | |
655 | n = last_basic_block; | |
656 | for (i = NUM_FIXED_BLOCKS; i < n; i++) | |
657 | { | |
658 | bb = BASIC_BLOCK (i); | |
659 | if (bb) | |
660 | retval |= cleanup_tree_cfg_bb (bb); | |
661 | } | |
662 | ||
663 | /* Now process the altered blocks, as long as any are available. */ | |
664 | while (!bitmap_empty_p (cfgcleanup_altered_bbs)) | |
665 | { | |
666 | i = bitmap_first_set_bit (cfgcleanup_altered_bbs); | |
667 | bitmap_clear_bit (cfgcleanup_altered_bbs, i); | |
668 | if (i < NUM_FIXED_BLOCKS) | |
669 | continue; | |
670 | ||
671 | bb = BASIC_BLOCK (i); | |
672 | if (!bb) | |
673 | continue; | |
674 | ||
675 | retval |= cleanup_tree_cfg_bb (bb); | |
676 | ||
677 | /* Rerun split_bbs_on_noreturn_calls, in case we have altered any noreturn | |
678 | calls. */ | |
679 | retval |= split_bbs_on_noreturn_calls (); | |
680 | } | |
b8698a0f | 681 | |
672987e8 ZD |
682 | end_recording_case_labels (); |
683 | BITMAP_FREE (cfgcleanup_altered_bbs); | |
89e80dd4 DN |
684 | return retval; |
685 | } | |
686 | ||
687 | ||
e3594cb3 DN |
688 | /* Remove unreachable blocks and other miscellaneous clean up work. |
689 | Return true if the flowgraph was modified, false otherwise. */ | |
89e80dd4 | 690 | |
592c303d ZD |
691 | static bool |
692 | cleanup_tree_cfg_noloop (void) | |
89e80dd4 | 693 | { |
672987e8 | 694 | bool changed; |
89e80dd4 DN |
695 | |
696 | timevar_push (TV_TREE_CLEANUP_CFG); | |
697 | ||
e3594cb3 | 698 | /* Iterate until there are no more cleanups left to do. If any |
672987e8 ZD |
699 | iteration changed the flowgraph, set CHANGED to true. |
700 | ||
701 | If dominance information is available, there cannot be any unreachable | |
702 | blocks. */ | |
2b28c07a | 703 | if (!dom_info_available_p (CDI_DOMINATORS)) |
e3594cb3 | 704 | { |
672987e8 ZD |
705 | changed = delete_unreachable_blocks (); |
706 | calculate_dominance_info (CDI_DOMINATORS); | |
e3594cb3 | 707 | } |
672987e8 | 708 | else |
30251f7a ZD |
709 | { |
710 | #ifdef ENABLE_CHECKING | |
711 | verify_dominators (CDI_DOMINATORS); | |
712 | #endif | |
713 | changed = false; | |
714 | } | |
c9784e6d | 715 | |
672987e8 ZD |
716 | changed |= cleanup_tree_cfg_1 (); |
717 | ||
2b28c07a | 718 | gcc_assert (dom_info_available_p (CDI_DOMINATORS)); |
c9784e6d KH |
719 | compact_blocks (); |
720 | ||
721 | #ifdef ENABLE_CHECKING | |
722 | verify_flow_info (); | |
723 | #endif | |
89e80dd4 | 724 | |
c9784e6d | 725 | timevar_pop (TV_TREE_CLEANUP_CFG); |
89e80dd4 | 726 | |
592c303d | 727 | if (changed && current_loops) |
f87000d0 | 728 | loops_state_set (LOOPS_NEED_FIXUP); |
592c303d | 729 | |
e3594cb3 | 730 | return changed; |
c9784e6d KH |
731 | } |
732 | ||
592c303d | 733 | /* Repairs loop structures. */ |
c9784e6d | 734 | |
592c303d ZD |
735 | static void |
736 | repair_loop_structures (void) | |
c9784e6d | 737 | { |
a222c01a MM |
738 | bitmap changed_bbs; |
739 | ||
cc360b36 SB |
740 | calculate_dominance_info (CDI_DOMINATORS); |
741 | ||
a222c01a MM |
742 | timevar_push (TV_REPAIR_LOOPS); |
743 | changed_bbs = BITMAP_ALLOC (NULL); | |
592c303d | 744 | fix_loop_structure (changed_bbs); |
c9784e6d | 745 | |
592c303d ZD |
746 | /* This usually does nothing. But sometimes parts of cfg that originally |
747 | were inside a loop get out of it due to edge removal (since they | |
748 | become unreachable by back edges from latch). */ | |
f87000d0 | 749 | if (loops_state_satisfies_p (LOOP_CLOSED_SSA)) |
592c303d | 750 | rewrite_into_loop_closed_ssa (changed_bbs, TODO_update_ssa); |
c9784e6d | 751 | |
592c303d | 752 | BITMAP_FREE (changed_bbs); |
c9784e6d KH |
753 | |
754 | #ifdef ENABLE_CHECKING | |
592c303d | 755 | verify_loop_structure (); |
c9784e6d | 756 | #endif |
592c303d ZD |
757 | scev_reset (); |
758 | ||
a222c01a | 759 | timevar_pop (TV_REPAIR_LOOPS); |
592c303d ZD |
760 | } |
761 | ||
762 | /* Cleanup cfg and repair loop structures. */ | |
763 | ||
764 | bool | |
765 | cleanup_tree_cfg (void) | |
766 | { | |
767 | bool changed = cleanup_tree_cfg_noloop (); | |
768 | ||
769 | if (current_loops != NULL | |
f87000d0 | 770 | && loops_state_satisfies_p (LOOPS_NEED_FIXUP)) |
592c303d ZD |
771 | repair_loop_structures (); |
772 | ||
1994bfea | 773 | return changed; |
c9784e6d KH |
774 | } |
775 | ||
776 | /* Merge the PHI nodes at BB into those at BB's sole successor. */ | |
777 | ||
778 | static void | |
779 | remove_forwarder_block_with_phi (basic_block bb) | |
780 | { | |
781 | edge succ = single_succ_edge (bb); | |
782 | basic_block dest = succ->dest; | |
726a989a | 783 | gimple label; |
c9784e6d KH |
784 | basic_block dombb, domdest, dom; |
785 | ||
786 | /* We check for infinite loops already in tree_forwarder_block_p. | |
787 | However it may happen that the infinite loop is created | |
788 | afterwards due to removal of forwarders. */ | |
789 | if (dest == bb) | |
790 | return; | |
791 | ||
792 | /* If the destination block consists of a nonlocal label, do not | |
793 | merge it. */ | |
794 | label = first_stmt (dest); | |
795 | if (label | |
726a989a RB |
796 | && gimple_code (label) == GIMPLE_LABEL |
797 | && DECL_NONLOCAL (gimple_label_label (label))) | |
c9784e6d KH |
798 | return; |
799 | ||
800 | /* Redirect each incoming edge to BB to DEST. */ | |
801 | while (EDGE_COUNT (bb->preds) > 0) | |
802 | { | |
803 | edge e = EDGE_PRED (bb, 0), s; | |
726a989a | 804 | gimple_stmt_iterator gsi; |
c9784e6d KH |
805 | |
806 | s = find_edge (e->src, dest); | |
807 | if (s) | |
808 | { | |
809 | /* We already have an edge S from E->src to DEST. If S and | |
810 | E->dest's sole successor edge have the same PHI arguments | |
811 | at DEST, redirect S to DEST. */ | |
812 | if (phi_alternatives_equal (dest, s, succ)) | |
813 | { | |
814 | e = redirect_edge_and_branch (e, dest); | |
ea7e6d5a | 815 | redirect_edge_var_map_clear (e); |
c9784e6d KH |
816 | continue; |
817 | } | |
818 | ||
819 | /* PHI arguments are different. Create a forwarder block by | |
820 | splitting E so that we can merge PHI arguments on E to | |
821 | DEST. */ | |
822 | e = single_succ_edge (split_edge (e)); | |
823 | } | |
824 | ||
825 | s = redirect_edge_and_branch (e, dest); | |
826 | ||
827 | /* redirect_edge_and_branch must not create a new edge. */ | |
828 | gcc_assert (s == e); | |
829 | ||
830 | /* Add to the PHI nodes at DEST each PHI argument removed at the | |
831 | destination of E. */ | |
726a989a RB |
832 | for (gsi = gsi_start_phis (dest); |
833 | !gsi_end_p (gsi); | |
834 | gsi_next (&gsi)) | |
c9784e6d | 835 | { |
726a989a RB |
836 | gimple phi = gsi_stmt (gsi); |
837 | tree def = gimple_phi_arg_def (phi, succ->dest_idx); | |
f5045c96 | 838 | source_location locus = gimple_phi_arg_location_from_edge (phi, succ); |
c9784e6d KH |
839 | |
840 | if (TREE_CODE (def) == SSA_NAME) | |
841 | { | |
ea7e6d5a AH |
842 | edge_var_map_vector head; |
843 | edge_var_map *vm; | |
844 | size_t i; | |
c9784e6d KH |
845 | |
846 | /* If DEF is one of the results of PHI nodes removed during | |
847 | redirection, replace it with the PHI argument that used | |
848 | to be on E. */ | |
ea7e6d5a | 849 | head = redirect_edge_var_map_vector (e); |
ac47786e | 850 | FOR_EACH_VEC_ELT (edge_var_map, head, i, vm) |
c9784e6d | 851 | { |
ea7e6d5a AH |
852 | tree old_arg = redirect_edge_var_map_result (vm); |
853 | tree new_arg = redirect_edge_var_map_def (vm); | |
c9784e6d KH |
854 | |
855 | if (def == old_arg) | |
856 | { | |
857 | def = new_arg; | |
f5045c96 | 858 | locus = redirect_edge_var_map_location (vm); |
c9784e6d KH |
859 | break; |
860 | } | |
861 | } | |
862 | } | |
863 | ||
9e227d60 | 864 | add_phi_arg (phi, def, s, locus); |
c9784e6d KH |
865 | } |
866 | ||
ea7e6d5a | 867 | redirect_edge_var_map_clear (e); |
c9784e6d KH |
868 | } |
869 | ||
870 | /* Update the dominators. */ | |
871 | dombb = get_immediate_dominator (CDI_DOMINATORS, bb); | |
872 | domdest = get_immediate_dominator (CDI_DOMINATORS, dest); | |
873 | if (domdest == bb) | |
874 | { | |
875 | /* Shortcut to avoid calling (relatively expensive) | |
876 | nearest_common_dominator unless necessary. */ | |
877 | dom = dombb; | |
878 | } | |
879 | else | |
880 | dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb); | |
881 | ||
882 | set_immediate_dominator (CDI_DOMINATORS, dest, dom); | |
883 | ||
884 | /* Remove BB since all of BB's incoming edges have been redirected | |
885 | to DEST. */ | |
886 | delete_basic_block (bb); | |
887 | } | |
888 | ||
889 | /* This pass merges PHI nodes if one feeds into another. For example, | |
890 | suppose we have the following: | |
891 | ||
892 | goto <bb 9> (<L9>); | |
893 | ||
894 | <L8>:; | |
895 | tem_17 = foo (); | |
896 | ||
897 | # tem_6 = PHI <tem_17(8), tem_23(7)>; | |
898 | <L9>:; | |
899 | ||
900 | # tem_3 = PHI <tem_6(9), tem_2(5)>; | |
901 | <L10>:; | |
902 | ||
903 | Then we merge the first PHI node into the second one like so: | |
904 | ||
905 | goto <bb 9> (<L10>); | |
906 | ||
907 | <L8>:; | |
908 | tem_17 = foo (); | |
909 | ||
910 | # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>; | |
911 | <L10>:; | |
912 | */ | |
913 | ||
c2924966 | 914 | static unsigned int |
c9784e6d KH |
915 | merge_phi_nodes (void) |
916 | { | |
5ed6ace5 | 917 | basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks); |
c9784e6d KH |
918 | basic_block *current = worklist; |
919 | basic_block bb; | |
920 | ||
921 | calculate_dominance_info (CDI_DOMINATORS); | |
922 | ||
923 | /* Find all PHI nodes that we may be able to merge. */ | |
924 | FOR_EACH_BB (bb) | |
925 | { | |
926 | basic_block dest; | |
927 | ||
928 | /* Look for a forwarder block with PHI nodes. */ | |
929 | if (!tree_forwarder_block_p (bb, true)) | |
930 | continue; | |
931 | ||
932 | dest = single_succ (bb); | |
933 | ||
934 | /* We have to feed into another basic block with PHI | |
935 | nodes. */ | |
8eacd016 | 936 | if (gimple_seq_empty_p (phi_nodes (dest)) |
c9784e6d KH |
937 | /* We don't want to deal with a basic block with |
938 | abnormal edges. */ | |
31ff2426 | 939 | || bb_has_abnormal_pred (bb)) |
c9784e6d KH |
940 | continue; |
941 | ||
942 | if (!dominated_by_p (CDI_DOMINATORS, dest, bb)) | |
943 | { | |
944 | /* If BB does not dominate DEST, then the PHI nodes at | |
945 | DEST must be the only users of the results of the PHI | |
946 | nodes at BB. */ | |
947 | *current++ = bb; | |
948 | } | |
ea65cd37 JL |
949 | else |
950 | { | |
726a989a | 951 | gimple_stmt_iterator gsi; |
338b5886 | 952 | unsigned int dest_idx = single_succ_edge (bb)->dest_idx; |
ea65cd37 JL |
953 | |
954 | /* BB dominates DEST. There may be many users of the PHI | |
955 | nodes in BB. However, there is still a trivial case we | |
956 | can handle. If the result of every PHI in BB is used | |
957 | only by a PHI in DEST, then we can trivially merge the | |
958 | PHI nodes from BB into DEST. */ | |
726a989a RB |
959 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); |
960 | gsi_next (&gsi)) | |
ea65cd37 | 961 | { |
726a989a RB |
962 | gimple phi = gsi_stmt (gsi); |
963 | tree result = gimple_phi_result (phi); | |
ea65cd37 | 964 | use_operand_p imm_use; |
726a989a | 965 | gimple use_stmt; |
ea65cd37 JL |
966 | |
967 | /* If the PHI's result is never used, then we can just | |
968 | ignore it. */ | |
bfc646bf | 969 | if (has_zero_uses (result)) |
ea65cd37 JL |
970 | continue; |
971 | ||
972 | /* Get the single use of the result of this PHI node. */ | |
973 | if (!single_imm_use (result, &imm_use, &use_stmt) | |
726a989a RB |
974 | || gimple_code (use_stmt) != GIMPLE_PHI |
975 | || gimple_bb (use_stmt) != dest | |
976 | || gimple_phi_arg_def (use_stmt, dest_idx) != result) | |
ea65cd37 JL |
977 | break; |
978 | } | |
979 | ||
c0220ea4 | 980 | /* If the loop above iterated through all the PHI nodes |
ea65cd37 | 981 | in BB, then we can merge the PHIs from BB into DEST. */ |
726a989a | 982 | if (gsi_end_p (gsi)) |
ea65cd37 JL |
983 | *current++ = bb; |
984 | } | |
c9784e6d KH |
985 | } |
986 | ||
987 | /* Now let's drain WORKLIST. */ | |
988 | while (current != worklist) | |
989 | { | |
990 | bb = *--current; | |
991 | remove_forwarder_block_with_phi (bb); | |
992 | } | |
993 | ||
994 | free (worklist); | |
c2924966 | 995 | return 0; |
c9784e6d KH |
996 | } |
997 | ||
998 | static bool | |
999 | gate_merge_phi (void) | |
1000 | { | |
1001 | return 1; | |
1002 | } | |
1003 | ||
b8698a0f | 1004 | struct gimple_opt_pass pass_merge_phi = |
8ddbbcae JH |
1005 | { |
1006 | { | |
1007 | GIMPLE_PASS, | |
c9784e6d KH |
1008 | "mergephi", /* name */ |
1009 | gate_merge_phi, /* gate */ | |
1010 | merge_phi_nodes, /* execute */ | |
1011 | NULL, /* sub */ | |
1012 | NULL, /* next */ | |
1013 | 0, /* static_pass_number */ | |
1014 | TV_TREE_MERGE_PHI, /* tv_id */ | |
1015 | PROP_cfg | PROP_ssa, /* properties_required */ | |
1016 | 0, /* properties_provided */ | |
1017 | 0, /* properties_destroyed */ | |
1018 | 0, /* todo_flags_start */ | |
22c5fa5f | 1019 | TODO_ggc_collect /* todo_flags_finish */ |
8ddbbcae JH |
1020 | | TODO_verify_ssa |
1021 | } | |
c9784e6d | 1022 | }; |