]>
Commit | Line | Data |
---|---|---|
c9784e6d KH |
1 | /* CFG cleanup for trees. |
2 | Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. | |
3 | ||
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8 | the Free Software Foundation; either version 2, or (at your option) | |
9 | any later version. | |
10 | ||
11 | GCC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GCC; see the file COPYING. If not, write to | |
366ccddb KC |
18 | the Free Software Foundation, 51 Franklin Street, Fifth Floor, |
19 | Boston, MA 02110-1301, USA. */ | |
c9784e6d KH |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
25 | #include "tree.h" | |
26 | #include "rtl.h" | |
27 | #include "tm_p.h" | |
28 | #include "hard-reg-set.h" | |
29 | #include "basic-block.h" | |
30 | #include "output.h" | |
31 | #include "errors.h" | |
32 | #include "flags.h" | |
33 | #include "function.h" | |
34 | #include "expr.h" | |
35 | #include "ggc.h" | |
36 | #include "langhooks.h" | |
37 | #include "diagnostic.h" | |
38 | #include "tree-flow.h" | |
39 | #include "timevar.h" | |
40 | #include "tree-dump.h" | |
41 | #include "tree-pass.h" | |
42 | #include "toplev.h" | |
43 | #include "except.h" | |
44 | #include "cfgloop.h" | |
45 | #include "cfglayout.h" | |
46 | #include "hashtab.h" | |
47 | #include "tree-ssa-propagate.h" | |
17684618 | 48 | #include "tree-scalar-evolution.h" |
c9784e6d KH |
49 | |
50 | /* Remove any fallthru edge from EV. Return true if an edge was removed. */ | |
51 | ||
52 | static bool | |
53 | remove_fallthru_edge (VEC(edge,gc) *ev) | |
54 | { | |
55 | edge_iterator ei; | |
56 | edge e; | |
57 | ||
58 | FOR_EACH_EDGE (e, ei, ev) | |
59 | if ((e->flags & EDGE_FALLTHRU) != 0) | |
60 | { | |
61 | remove_edge (e); | |
62 | return true; | |
63 | } | |
64 | return false; | |
65 | } | |
66 | ||
67 | /* Disconnect an unreachable block in the control expression starting | |
68 | at block BB. */ | |
69 | ||
70 | static bool | |
71 | cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi) | |
72 | { | |
73 | edge taken_edge; | |
74 | bool retval = false; | |
75 | tree expr = bsi_stmt (bsi), val; | |
76 | ||
77 | if (!single_succ_p (bb)) | |
78 | { | |
79 | edge e; | |
80 | edge_iterator ei; | |
81 | ||
82 | switch (TREE_CODE (expr)) | |
83 | { | |
84 | case COND_EXPR: | |
52270a3c | 85 | val = fold (COND_EXPR_COND (expr)); |
c9784e6d KH |
86 | break; |
87 | ||
88 | case SWITCH_EXPR: | |
52270a3c | 89 | val = fold (SWITCH_COND (expr)); |
c9784e6d KH |
90 | if (TREE_CODE (val) != INTEGER_CST) |
91 | return false; | |
92 | break; | |
93 | ||
94 | default: | |
95 | gcc_unreachable (); | |
96 | } | |
97 | ||
98 | taken_edge = find_taken_edge (bb, val); | |
99 | if (!taken_edge) | |
100 | return false; | |
101 | ||
102 | /* Remove all the edges except the one that is always executed. */ | |
103 | for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | |
104 | { | |
105 | if (e != taken_edge) | |
106 | { | |
107 | taken_edge->probability += e->probability; | |
108 | taken_edge->count += e->count; | |
109 | remove_edge (e); | |
110 | retval = true; | |
111 | } | |
112 | else | |
113 | ei_next (&ei); | |
114 | } | |
115 | if (taken_edge->probability > REG_BR_PROB_BASE) | |
116 | taken_edge->probability = REG_BR_PROB_BASE; | |
117 | } | |
118 | else | |
119 | taken_edge = single_succ_edge (bb); | |
120 | ||
736432ee | 121 | bsi_remove (&bsi, true); |
c9784e6d KH |
122 | taken_edge->flags = EDGE_FALLTHRU; |
123 | ||
124 | /* We removed some paths from the cfg. */ | |
125 | free_dominance_info (CDI_DOMINATORS); | |
126 | ||
127 | return retval; | |
128 | } | |
129 | ||
130 | /* A list of all the noreturn calls passed to modify_stmt. | |
131 | cleanup_control_flow uses it to detect cases where a mid-block | |
132 | indirect call has been turned into a noreturn call. When this | |
133 | happens, all the instructions after the call are no longer | |
134 | reachable and must be deleted as dead. */ | |
135 | ||
136 | VEC(tree,gc) *modified_noreturn_calls; | |
137 | ||
138 | /* Try to remove superfluous control structures. */ | |
139 | ||
140 | static bool | |
141 | cleanup_control_flow (void) | |
142 | { | |
143 | basic_block bb; | |
144 | block_stmt_iterator bsi; | |
145 | bool retval = false; | |
146 | tree stmt; | |
147 | ||
148 | /* Detect cases where a mid-block call is now known not to return. */ | |
149 | while (VEC_length (tree, modified_noreturn_calls)) | |
150 | { | |
151 | stmt = VEC_pop (tree, modified_noreturn_calls); | |
152 | bb = bb_for_stmt (stmt); | |
153 | if (bb != NULL && last_stmt (bb) != stmt && noreturn_call_p (stmt)) | |
154 | split_block (bb, stmt); | |
155 | } | |
156 | ||
157 | FOR_EACH_BB (bb) | |
158 | { | |
159 | bsi = bsi_last (bb); | |
160 | ||
43ec2467 ZD |
161 | /* If the last statement of the block could throw and now cannot, |
162 | we need to prune cfg. */ | |
163 | tree_purge_dead_eh_edges (bb); | |
164 | ||
c9784e6d KH |
165 | if (bsi_end_p (bsi)) |
166 | continue; | |
167 | ||
168 | stmt = bsi_stmt (bsi); | |
43ec2467 | 169 | |
c9784e6d KH |
170 | if (TREE_CODE (stmt) == COND_EXPR |
171 | || TREE_CODE (stmt) == SWITCH_EXPR) | |
172 | retval |= cleanup_control_expr_graph (bb, bsi); | |
c9784e6d KH |
173 | /* If we had a computed goto which has a compile-time determinable |
174 | destination, then we can eliminate the goto. */ | |
43ec2467 ZD |
175 | else if (TREE_CODE (stmt) == GOTO_EXPR |
176 | && TREE_CODE (GOTO_DESTINATION (stmt)) == ADDR_EXPR | |
177 | && (TREE_CODE (TREE_OPERAND (GOTO_DESTINATION (stmt), 0)) | |
178 | == LABEL_DECL)) | |
c9784e6d KH |
179 | { |
180 | edge e; | |
181 | tree label; | |
182 | edge_iterator ei; | |
183 | basic_block target_block; | |
184 | bool removed_edge = false; | |
185 | ||
186 | /* First look at all the outgoing edges. Delete any outgoing | |
187 | edges which do not go to the right block. For the one | |
188 | edge which goes to the right block, fix up its flags. */ | |
189 | label = TREE_OPERAND (GOTO_DESTINATION (stmt), 0); | |
190 | target_block = label_to_block (label); | |
191 | for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | |
192 | { | |
193 | if (e->dest != target_block) | |
194 | { | |
195 | removed_edge = true; | |
196 | remove_edge (e); | |
197 | } | |
198 | else | |
199 | { | |
200 | /* Turn off the EDGE_ABNORMAL flag. */ | |
201 | e->flags &= ~EDGE_ABNORMAL; | |
202 | ||
203 | /* And set EDGE_FALLTHRU. */ | |
204 | e->flags |= EDGE_FALLTHRU; | |
205 | ei_next (&ei); | |
206 | } | |
207 | } | |
208 | ||
209 | /* If we removed one or more edges, then we will need to fix the | |
210 | dominators. It may be possible to incrementally update them. */ | |
211 | if (removed_edge) | |
212 | free_dominance_info (CDI_DOMINATORS); | |
213 | ||
214 | /* Remove the GOTO_EXPR as it is not needed. The CFG has all the | |
215 | relevant information we need. */ | |
736432ee | 216 | bsi_remove (&bsi, true); |
c9784e6d KH |
217 | retval = true; |
218 | } | |
219 | ||
220 | /* Check for indirect calls that have been turned into | |
221 | noreturn calls. */ | |
43ec2467 | 222 | else if (noreturn_call_p (stmt) && remove_fallthru_edge (bb->succs)) |
c9784e6d KH |
223 | { |
224 | free_dominance_info (CDI_DOMINATORS); | |
225 | retval = true; | |
226 | } | |
227 | } | |
228 | return retval; | |
229 | } | |
230 | ||
231 | /* Return true if basic block BB does nothing except pass control | |
232 | flow to another block and that we can safely insert a label at | |
233 | the start of the successor block. | |
234 | ||
235 | As a precondition, we require that BB be not equal to | |
236 | ENTRY_BLOCK_PTR. */ | |
237 | ||
238 | static bool | |
239 | tree_forwarder_block_p (basic_block bb, bool phi_wanted) | |
240 | { | |
241 | block_stmt_iterator bsi; | |
242 | ||
243 | /* BB must have a single outgoing edge. */ | |
244 | if (single_succ_p (bb) != 1 | |
245 | /* If PHI_WANTED is false, BB must not have any PHI nodes. | |
246 | Otherwise, BB must have PHI nodes. */ | |
247 | || (phi_nodes (bb) != NULL_TREE) != phi_wanted | |
248 | /* BB may not be a predecessor of EXIT_BLOCK_PTR. */ | |
249 | || single_succ (bb) == EXIT_BLOCK_PTR | |
250 | /* Nor should this be an infinite loop. */ | |
251 | || single_succ (bb) == bb | |
252 | /* BB may not have an abnormal outgoing edge. */ | |
253 | || (single_succ_edge (bb)->flags & EDGE_ABNORMAL)) | |
254 | return false; | |
255 | ||
256 | #if ENABLE_CHECKING | |
257 | gcc_assert (bb != ENTRY_BLOCK_PTR); | |
258 | #endif | |
259 | ||
260 | /* Now walk through the statements backward. We can ignore labels, | |
261 | anything else means this is not a forwarder block. */ | |
262 | for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi)) | |
263 | { | |
264 | tree stmt = bsi_stmt (bsi); | |
265 | ||
266 | switch (TREE_CODE (stmt)) | |
267 | { | |
268 | case LABEL_EXPR: | |
269 | if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))) | |
270 | return false; | |
271 | break; | |
272 | ||
273 | default: | |
274 | return false; | |
275 | } | |
276 | } | |
277 | ||
278 | if (find_edge (ENTRY_BLOCK_PTR, bb)) | |
279 | return false; | |
280 | ||
281 | if (current_loops) | |
282 | { | |
283 | basic_block dest; | |
284 | /* Protect loop latches, headers and preheaders. */ | |
285 | if (bb->loop_father->header == bb) | |
286 | return false; | |
287 | dest = EDGE_SUCC (bb, 0)->dest; | |
288 | ||
289 | if (dest->loop_father->header == dest) | |
290 | return false; | |
291 | } | |
292 | ||
293 | return true; | |
294 | } | |
295 | ||
296 | /* Return true if BB has at least one abnormal incoming edge. */ | |
297 | ||
298 | static inline bool | |
299 | has_abnormal_incoming_edge_p (basic_block bb) | |
300 | { | |
301 | edge e; | |
302 | edge_iterator ei; | |
303 | ||
304 | FOR_EACH_EDGE (e, ei, bb->preds) | |
305 | if (e->flags & EDGE_ABNORMAL) | |
306 | return true; | |
307 | ||
308 | return false; | |
309 | } | |
310 | ||
311 | /* If all the PHI nodes in DEST have alternatives for E1 and E2 and | |
312 | those alternatives are equal in each of the PHI nodes, then return | |
313 | true, else return false. */ | |
314 | ||
315 | static bool | |
316 | phi_alternatives_equal (basic_block dest, edge e1, edge e2) | |
317 | { | |
318 | int n1 = e1->dest_idx; | |
319 | int n2 = e2->dest_idx; | |
320 | tree phi; | |
321 | ||
322 | for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi)) | |
323 | { | |
324 | tree val1 = PHI_ARG_DEF (phi, n1); | |
325 | tree val2 = PHI_ARG_DEF (phi, n2); | |
326 | ||
327 | gcc_assert (val1 != NULL_TREE); | |
328 | gcc_assert (val2 != NULL_TREE); | |
329 | ||
330 | if (!operand_equal_for_phi_arg_p (val1, val2)) | |
331 | return false; | |
332 | } | |
333 | ||
334 | return true; | |
335 | } | |
336 | ||
337 | /* Removes forwarder block BB. Returns false if this failed. If a new | |
338 | forwarder block is created due to redirection of edges, it is | |
339 | stored to worklist. */ | |
340 | ||
341 | static bool | |
342 | remove_forwarder_block (basic_block bb, basic_block **worklist) | |
343 | { | |
344 | edge succ = single_succ_edge (bb), e, s; | |
345 | basic_block dest = succ->dest; | |
346 | tree label; | |
347 | tree phi; | |
348 | edge_iterator ei; | |
349 | block_stmt_iterator bsi, bsi_to; | |
350 | bool seen_abnormal_edge = false; | |
351 | ||
352 | /* We check for infinite loops already in tree_forwarder_block_p. | |
353 | However it may happen that the infinite loop is created | |
354 | afterwards due to removal of forwarders. */ | |
355 | if (dest == bb) | |
356 | return false; | |
357 | ||
358 | /* If the destination block consists of a nonlocal label, do not merge | |
359 | it. */ | |
360 | label = first_stmt (dest); | |
361 | if (label | |
362 | && TREE_CODE (label) == LABEL_EXPR | |
363 | && DECL_NONLOCAL (LABEL_EXPR_LABEL (label))) | |
364 | return false; | |
365 | ||
366 | /* If there is an abnormal edge to basic block BB, but not into | |
367 | dest, problems might occur during removal of the phi node at out | |
368 | of ssa due to overlapping live ranges of registers. | |
369 | ||
370 | If there is an abnormal edge in DEST, the problems would occur | |
371 | anyway since cleanup_dead_labels would then merge the labels for | |
372 | two different eh regions, and rest of exception handling code | |
373 | does not like it. | |
374 | ||
375 | So if there is an abnormal edge to BB, proceed only if there is | |
376 | no abnormal edge to DEST and there are no phi nodes in DEST. */ | |
377 | if (has_abnormal_incoming_edge_p (bb)) | |
378 | { | |
379 | seen_abnormal_edge = true; | |
380 | ||
381 | if (has_abnormal_incoming_edge_p (dest) | |
382 | || phi_nodes (dest) != NULL_TREE) | |
383 | return false; | |
384 | } | |
385 | ||
386 | /* If there are phi nodes in DEST, and some of the blocks that are | |
387 | predecessors of BB are also predecessors of DEST, check that the | |
388 | phi node arguments match. */ | |
389 | if (phi_nodes (dest)) | |
390 | { | |
391 | FOR_EACH_EDGE (e, ei, bb->preds) | |
392 | { | |
393 | s = find_edge (e->src, dest); | |
394 | if (!s) | |
395 | continue; | |
396 | ||
397 | if (!phi_alternatives_equal (dest, succ, s)) | |
398 | return false; | |
399 | } | |
400 | } | |
401 | ||
402 | /* Redirect the edges. */ | |
403 | for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) | |
404 | { | |
405 | if (e->flags & EDGE_ABNORMAL) | |
406 | { | |
407 | /* If there is an abnormal edge, redirect it anyway, and | |
408 | move the labels to the new block to make it legal. */ | |
409 | s = redirect_edge_succ_nodup (e, dest); | |
410 | } | |
411 | else | |
412 | s = redirect_edge_and_branch (e, dest); | |
413 | ||
414 | if (s == e) | |
415 | { | |
416 | /* Create arguments for the phi nodes, since the edge was not | |
417 | here before. */ | |
418 | for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi)) | |
419 | add_phi_arg (phi, PHI_ARG_DEF (phi, succ->dest_idx), s); | |
420 | } | |
421 | else | |
422 | { | |
423 | /* The source basic block might become a forwarder. We know | |
424 | that it was not a forwarder before, since it used to have | |
425 | at least two outgoing edges, so we may just add it to | |
426 | worklist. */ | |
427 | if (tree_forwarder_block_p (s->src, false)) | |
428 | *(*worklist)++ = s->src; | |
429 | } | |
430 | } | |
431 | ||
432 | if (seen_abnormal_edge) | |
433 | { | |
434 | /* Move the labels to the new block, so that the redirection of | |
435 | the abnormal edges works. */ | |
436 | ||
437 | bsi_to = bsi_start (dest); | |
438 | for (bsi = bsi_start (bb); !bsi_end_p (bsi); ) | |
439 | { | |
440 | label = bsi_stmt (bsi); | |
441 | gcc_assert (TREE_CODE (label) == LABEL_EXPR); | |
736432ee | 442 | bsi_remove (&bsi, false); |
c9784e6d KH |
443 | bsi_insert_before (&bsi_to, label, BSI_CONTINUE_LINKING); |
444 | } | |
445 | } | |
446 | ||
447 | /* Update the dominators. */ | |
448 | if (dom_info_available_p (CDI_DOMINATORS)) | |
449 | { | |
450 | basic_block dom, dombb, domdest; | |
451 | ||
452 | dombb = get_immediate_dominator (CDI_DOMINATORS, bb); | |
453 | domdest = get_immediate_dominator (CDI_DOMINATORS, dest); | |
454 | if (domdest == bb) | |
455 | { | |
456 | /* Shortcut to avoid calling (relatively expensive) | |
457 | nearest_common_dominator unless necessary. */ | |
458 | dom = dombb; | |
459 | } | |
460 | else | |
461 | dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb); | |
462 | ||
463 | set_immediate_dominator (CDI_DOMINATORS, dest, dom); | |
464 | } | |
465 | ||
466 | /* And kill the forwarder block. */ | |
467 | delete_basic_block (bb); | |
468 | ||
469 | return true; | |
470 | } | |
471 | ||
472 | /* Removes forwarder blocks. */ | |
473 | ||
474 | static bool | |
475 | cleanup_forwarder_blocks (void) | |
476 | { | |
477 | basic_block bb; | |
478 | bool changed = false; | |
5ed6ace5 | 479 | basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks); |
c9784e6d KH |
480 | basic_block *current = worklist; |
481 | ||
482 | FOR_EACH_BB (bb) | |
483 | { | |
484 | if (tree_forwarder_block_p (bb, false)) | |
485 | *current++ = bb; | |
486 | } | |
487 | ||
488 | while (current != worklist) | |
489 | { | |
490 | bb = *--current; | |
491 | changed |= remove_forwarder_block (bb, ¤t); | |
492 | } | |
493 | ||
494 | free (worklist); | |
495 | return changed; | |
496 | } | |
497 | ||
89e80dd4 | 498 | /* Do one round of CFG cleanup. */ |
c9784e6d | 499 | |
89e80dd4 DN |
500 | static bool |
501 | cleanup_tree_cfg_1 (void) | |
c9784e6d | 502 | { |
89e80dd4 | 503 | bool retval; |
c9784e6d KH |
504 | |
505 | retval = cleanup_control_flow (); | |
506 | retval |= delete_unreachable_blocks (); | |
507 | ||
7825308e ILT |
508 | /* Forwarder blocks can carry line number information which is |
509 | useful when debugging, so we only clean them up when | |
510 | optimizing. */ | |
511 | ||
512 | if (optimize > 0) | |
513 | { | |
514 | /* cleanup_forwarder_blocks can redirect edges out of | |
515 | SWITCH_EXPRs, which can get expensive. So we want to enable | |
516 | recording of edge to CASE_LABEL_EXPR mappings around the call | |
517 | to cleanup_forwarder_blocks. */ | |
518 | start_recording_case_labels (); | |
519 | retval |= cleanup_forwarder_blocks (); | |
520 | end_recording_case_labels (); | |
521 | } | |
c9784e6d | 522 | |
89e80dd4 DN |
523 | /* Merging the blocks may create new opportunities for folding |
524 | conditional branches (due to the elimination of single-valued PHI | |
525 | nodes). */ | |
526 | retval |= merge_seq_blocks (); | |
527 | ||
528 | return retval; | |
529 | } | |
530 | ||
531 | ||
e3594cb3 DN |
532 | /* Remove unreachable blocks and other miscellaneous clean up work. |
533 | Return true if the flowgraph was modified, false otherwise. */ | |
89e80dd4 DN |
534 | |
535 | bool | |
536 | cleanup_tree_cfg (void) | |
537 | { | |
e3594cb3 | 538 | bool retval, changed; |
89e80dd4 DN |
539 | |
540 | timevar_push (TV_TREE_CLEANUP_CFG); | |
541 | ||
e3594cb3 DN |
542 | /* Iterate until there are no more cleanups left to do. If any |
543 | iteration changed the flowgraph, set CHANGED to true. */ | |
544 | changed = false; | |
78234a86 | 545 | do |
e3594cb3 DN |
546 | { |
547 | retval = cleanup_tree_cfg_1 (); | |
548 | changed |= retval; | |
549 | } | |
78234a86 | 550 | while (retval); |
c9784e6d | 551 | |
c9784e6d KH |
552 | compact_blocks (); |
553 | ||
554 | #ifdef ENABLE_CHECKING | |
555 | verify_flow_info (); | |
556 | #endif | |
89e80dd4 | 557 | |
c9784e6d | 558 | timevar_pop (TV_TREE_CLEANUP_CFG); |
89e80dd4 | 559 | |
e3594cb3 | 560 | return changed; |
c9784e6d KH |
561 | } |
562 | ||
563 | /* Cleanup cfg and repair loop structures. */ | |
564 | ||
565 | void | |
566 | cleanup_tree_cfg_loop (void) | |
567 | { | |
17684618 | 568 | bool changed = cleanup_tree_cfg (); |
c9784e6d | 569 | |
17684618 ZD |
570 | if (changed) |
571 | { | |
572 | bitmap changed_bbs = BITMAP_ALLOC (NULL); | |
573 | fix_loop_structure (current_loops, changed_bbs); | |
574 | calculate_dominance_info (CDI_DOMINATORS); | |
c9784e6d | 575 | |
17684618 ZD |
576 | /* This usually does nothing. But sometimes parts of cfg that originally |
577 | were inside a loop get out of it due to edge removal (since they | |
578 | become unreachable by back edges from latch). */ | |
579 | rewrite_into_loop_closed_ssa (changed_bbs, TODO_update_ssa); | |
c9784e6d | 580 | |
17684618 | 581 | BITMAP_FREE (changed_bbs); |
c9784e6d KH |
582 | |
583 | #ifdef ENABLE_CHECKING | |
17684618 | 584 | verify_loop_structure (current_loops); |
c9784e6d | 585 | #endif |
17684618 ZD |
586 | scev_reset (); |
587 | } | |
c9784e6d KH |
588 | } |
589 | ||
590 | /* Merge the PHI nodes at BB into those at BB's sole successor. */ | |
591 | ||
592 | static void | |
593 | remove_forwarder_block_with_phi (basic_block bb) | |
594 | { | |
595 | edge succ = single_succ_edge (bb); | |
596 | basic_block dest = succ->dest; | |
597 | tree label; | |
598 | basic_block dombb, domdest, dom; | |
599 | ||
600 | /* We check for infinite loops already in tree_forwarder_block_p. | |
601 | However it may happen that the infinite loop is created | |
602 | afterwards due to removal of forwarders. */ | |
603 | if (dest == bb) | |
604 | return; | |
605 | ||
606 | /* If the destination block consists of a nonlocal label, do not | |
607 | merge it. */ | |
608 | label = first_stmt (dest); | |
609 | if (label | |
610 | && TREE_CODE (label) == LABEL_EXPR | |
611 | && DECL_NONLOCAL (LABEL_EXPR_LABEL (label))) | |
612 | return; | |
613 | ||
614 | /* Redirect each incoming edge to BB to DEST. */ | |
615 | while (EDGE_COUNT (bb->preds) > 0) | |
616 | { | |
617 | edge e = EDGE_PRED (bb, 0), s; | |
618 | tree phi; | |
619 | ||
620 | s = find_edge (e->src, dest); | |
621 | if (s) | |
622 | { | |
623 | /* We already have an edge S from E->src to DEST. If S and | |
624 | E->dest's sole successor edge have the same PHI arguments | |
625 | at DEST, redirect S to DEST. */ | |
626 | if (phi_alternatives_equal (dest, s, succ)) | |
627 | { | |
628 | e = redirect_edge_and_branch (e, dest); | |
629 | PENDING_STMT (e) = NULL_TREE; | |
630 | continue; | |
631 | } | |
632 | ||
633 | /* PHI arguments are different. Create a forwarder block by | |
634 | splitting E so that we can merge PHI arguments on E to | |
635 | DEST. */ | |
636 | e = single_succ_edge (split_edge (e)); | |
637 | } | |
638 | ||
639 | s = redirect_edge_and_branch (e, dest); | |
640 | ||
641 | /* redirect_edge_and_branch must not create a new edge. */ | |
642 | gcc_assert (s == e); | |
643 | ||
644 | /* Add to the PHI nodes at DEST each PHI argument removed at the | |
645 | destination of E. */ | |
646 | for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi)) | |
647 | { | |
648 | tree def = PHI_ARG_DEF (phi, succ->dest_idx); | |
649 | ||
650 | if (TREE_CODE (def) == SSA_NAME) | |
651 | { | |
652 | tree var; | |
653 | ||
654 | /* If DEF is one of the results of PHI nodes removed during | |
655 | redirection, replace it with the PHI argument that used | |
656 | to be on E. */ | |
657 | for (var = PENDING_STMT (e); var; var = TREE_CHAIN (var)) | |
658 | { | |
659 | tree old_arg = TREE_PURPOSE (var); | |
660 | tree new_arg = TREE_VALUE (var); | |
661 | ||
662 | if (def == old_arg) | |
663 | { | |
664 | def = new_arg; | |
665 | break; | |
666 | } | |
667 | } | |
668 | } | |
669 | ||
670 | add_phi_arg (phi, def, s); | |
671 | } | |
672 | ||
673 | PENDING_STMT (e) = NULL; | |
674 | } | |
675 | ||
676 | /* Update the dominators. */ | |
677 | dombb = get_immediate_dominator (CDI_DOMINATORS, bb); | |
678 | domdest = get_immediate_dominator (CDI_DOMINATORS, dest); | |
679 | if (domdest == bb) | |
680 | { | |
681 | /* Shortcut to avoid calling (relatively expensive) | |
682 | nearest_common_dominator unless necessary. */ | |
683 | dom = dombb; | |
684 | } | |
685 | else | |
686 | dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb); | |
687 | ||
688 | set_immediate_dominator (CDI_DOMINATORS, dest, dom); | |
689 | ||
690 | /* Remove BB since all of BB's incoming edges have been redirected | |
691 | to DEST. */ | |
692 | delete_basic_block (bb); | |
693 | } | |
694 | ||
695 | /* This pass merges PHI nodes if one feeds into another. For example, | |
696 | suppose we have the following: | |
697 | ||
698 | goto <bb 9> (<L9>); | |
699 | ||
700 | <L8>:; | |
701 | tem_17 = foo (); | |
702 | ||
703 | # tem_6 = PHI <tem_17(8), tem_23(7)>; | |
704 | <L9>:; | |
705 | ||
706 | # tem_3 = PHI <tem_6(9), tem_2(5)>; | |
707 | <L10>:; | |
708 | ||
709 | Then we merge the first PHI node into the second one like so: | |
710 | ||
711 | goto <bb 9> (<L10>); | |
712 | ||
713 | <L8>:; | |
714 | tem_17 = foo (); | |
715 | ||
716 | # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>; | |
717 | <L10>:; | |
718 | */ | |
719 | ||
c2924966 | 720 | static unsigned int |
c9784e6d KH |
721 | merge_phi_nodes (void) |
722 | { | |
5ed6ace5 | 723 | basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks); |
c9784e6d KH |
724 | basic_block *current = worklist; |
725 | basic_block bb; | |
726 | ||
727 | calculate_dominance_info (CDI_DOMINATORS); | |
728 | ||
729 | /* Find all PHI nodes that we may be able to merge. */ | |
730 | FOR_EACH_BB (bb) | |
731 | { | |
732 | basic_block dest; | |
733 | ||
734 | /* Look for a forwarder block with PHI nodes. */ | |
735 | if (!tree_forwarder_block_p (bb, true)) | |
736 | continue; | |
737 | ||
738 | dest = single_succ (bb); | |
739 | ||
740 | /* We have to feed into another basic block with PHI | |
741 | nodes. */ | |
742 | if (!phi_nodes (dest) | |
743 | /* We don't want to deal with a basic block with | |
744 | abnormal edges. */ | |
745 | || has_abnormal_incoming_edge_p (bb)) | |
746 | continue; | |
747 | ||
748 | if (!dominated_by_p (CDI_DOMINATORS, dest, bb)) | |
749 | { | |
750 | /* If BB does not dominate DEST, then the PHI nodes at | |
751 | DEST must be the only users of the results of the PHI | |
752 | nodes at BB. */ | |
753 | *current++ = bb; | |
754 | } | |
ea65cd37 JL |
755 | else |
756 | { | |
757 | tree phi; | |
338b5886 | 758 | unsigned int dest_idx = single_succ_edge (bb)->dest_idx; |
ea65cd37 JL |
759 | |
760 | /* BB dominates DEST. There may be many users of the PHI | |
761 | nodes in BB. However, there is still a trivial case we | |
762 | can handle. If the result of every PHI in BB is used | |
763 | only by a PHI in DEST, then we can trivially merge the | |
764 | PHI nodes from BB into DEST. */ | |
765 | for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi)) | |
766 | { | |
767 | tree result = PHI_RESULT (phi); | |
ea65cd37 JL |
768 | use_operand_p imm_use; |
769 | tree use_stmt; | |
770 | ||
771 | /* If the PHI's result is never used, then we can just | |
772 | ignore it. */ | |
bfc646bf | 773 | if (has_zero_uses (result)) |
ea65cd37 JL |
774 | continue; |
775 | ||
776 | /* Get the single use of the result of this PHI node. */ | |
777 | if (!single_imm_use (result, &imm_use, &use_stmt) | |
778 | || TREE_CODE (use_stmt) != PHI_NODE | |
338b5886 KH |
779 | || bb_for_stmt (use_stmt) != dest |
780 | || PHI_ARG_DEF (use_stmt, dest_idx) != result) | |
ea65cd37 JL |
781 | break; |
782 | } | |
783 | ||
c0220ea4 | 784 | /* If the loop above iterated through all the PHI nodes |
ea65cd37 JL |
785 | in BB, then we can merge the PHIs from BB into DEST. */ |
786 | if (!phi) | |
787 | *current++ = bb; | |
788 | } | |
c9784e6d KH |
789 | } |
790 | ||
791 | /* Now let's drain WORKLIST. */ | |
792 | while (current != worklist) | |
793 | { | |
794 | bb = *--current; | |
795 | remove_forwarder_block_with_phi (bb); | |
796 | } | |
797 | ||
798 | free (worklist); | |
c2924966 | 799 | return 0; |
c9784e6d KH |
800 | } |
801 | ||
802 | static bool | |
803 | gate_merge_phi (void) | |
804 | { | |
805 | return 1; | |
806 | } | |
807 | ||
808 | struct tree_opt_pass pass_merge_phi = { | |
809 | "mergephi", /* name */ | |
810 | gate_merge_phi, /* gate */ | |
811 | merge_phi_nodes, /* execute */ | |
812 | NULL, /* sub */ | |
813 | NULL, /* next */ | |
814 | 0, /* static_pass_number */ | |
815 | TV_TREE_MERGE_PHI, /* tv_id */ | |
816 | PROP_cfg | PROP_ssa, /* properties_required */ | |
817 | 0, /* properties_provided */ | |
818 | 0, /* properties_destroyed */ | |
819 | 0, /* todo_flags_start */ | |
820 | TODO_dump_func | TODO_ggc_collect /* todo_flags_finish */ | |
821 | | TODO_verify_ssa, | |
822 | 0 /* letter */ | |
823 | }; |