]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-dce.c
basic-block.h (single_succ_p, [...]): New inline functions.
[thirdparty/gcc.git] / gcc / tree-ssa-dce.c
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
12 later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 02111-1307, USA. */
23
24 /* Dead code elimination.
25
26 References:
27
28 Building an Optimizing Compiler,
29 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30
31 Advanced Compiler Design and Implementation,
32 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33
34 Dead-code elimination is the removal of statements which have no
35 impact on the program's output. "Dead statements" have no impact
36 on the program's output, while "necessary statements" may have
37 impact on the output.
38
39 The algorithm consists of three phases:
40 1. Marking as necessary all statements known to be necessary,
41 e.g. most function calls, writing a value to memory, etc;
42 2. Propagating necessary statements, e.g., the statements
43 giving values to operands in necessary statements; and
44 3. Removing dead statements. */
45
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "errors.h"
51 #include "ggc.h"
52
53 /* These RTL headers are needed for basic-block.h. */
54 #include "rtl.h"
55 #include "tm_p.h"
56 #include "hard-reg-set.h"
57 #include "obstack.h"
58 #include "basic-block.h"
59
60 #include "tree.h"
61 #include "diagnostic.h"
62 #include "tree-flow.h"
63 #include "tree-gimple.h"
64 #include "tree-dump.h"
65 #include "tree-pass.h"
66 #include "timevar.h"
67 #include "flags.h"
68 \f
69 static struct stmt_stats
70 {
71 int total;
72 int total_phis;
73 int removed;
74 int removed_phis;
75 } stats;
76
77 static varray_type worklist;
78
79 /* Vector indicating an SSA name has already been processed and marked
80 as necessary. */
81 static sbitmap processed;
82
83 /* Vector indicating that last_stmt if a basic block has already been
84 marked as necessary. */
85 static sbitmap last_stmt_necessary;
86
87 /* Before we can determine whether a control branch is dead, we need to
88 compute which blocks are control dependent on which edges.
89
90 We expect each block to be control dependent on very few edges so we
91 use a bitmap for each block recording its edges. An array holds the
92 bitmap. The Ith bit in the bitmap is set if that block is dependent
93 on the Ith edge. */
94 static bitmap *control_dependence_map;
95
96 /* Vector indicating that a basic block has already had all the edges
97 processed that it is control dependent on. */
98 static sbitmap visited_control_parents;
99
100 /* Execute CODE for each edge (given number EDGE_NUMBER within the CODE)
101 for which the block with index N is control dependent. */
102 #define EXECUTE_IF_CONTROL_DEPENDENT(N, EDGE_NUMBER, CODE) \
103 { \
104 bitmap_iterator bi; \
105 \
106 EXECUTE_IF_SET_IN_BITMAP (control_dependence_map[N], 0, EDGE_NUMBER, bi) \
107 { \
108 CODE; \
109 } \
110 }
111
112 /* Local function prototypes. */
113 static inline void set_control_dependence_map_bit (basic_block, int);
114 static inline void clear_control_dependence_bitmap (basic_block);
115 static void find_all_control_dependences (struct edge_list *);
116 static void find_control_dependence (struct edge_list *, int);
117 static inline basic_block find_pdom (basic_block);
118
119 static inline void mark_stmt_necessary (tree, bool);
120 static inline void mark_operand_necessary (tree, bool);
121
122 static void mark_stmt_if_obviously_necessary (tree, bool);
123 static void find_obviously_necessary_stmts (struct edge_list *);
124
125 static void mark_control_dependent_edges_necessary (basic_block, struct edge_list *);
126 static void propagate_necessity (struct edge_list *);
127
128 static void eliminate_unnecessary_stmts (void);
129 static void remove_dead_phis (basic_block);
130 static void remove_dead_stmt (block_stmt_iterator *, basic_block);
131
132 static void print_stats (void);
133 static void tree_dce_init (bool);
134 static void tree_dce_done (bool);
135 \f
136 /* Indicate block BB is control dependent on an edge with index EDGE_INDEX. */
137 static inline void
138 set_control_dependence_map_bit (basic_block bb, int edge_index)
139 {
140 if (bb == ENTRY_BLOCK_PTR)
141 return;
142 gcc_assert (bb != EXIT_BLOCK_PTR);
143 bitmap_set_bit (control_dependence_map[bb->index], edge_index);
144 }
145
146 /* Clear all control dependences for block BB. */
147 static inline
148 void clear_control_dependence_bitmap (basic_block bb)
149 {
150 bitmap_clear (control_dependence_map[bb->index]);
151 }
152
153 /* Record all blocks' control dependences on all edges in the edge
154 list EL, ala Morgan, Section 3.6. */
155
156 static void
157 find_all_control_dependences (struct edge_list *el)
158 {
159 int i;
160
161 for (i = 0; i < NUM_EDGES (el); ++i)
162 find_control_dependence (el, i);
163 }
164
165 /* Determine all blocks' control dependences on the given edge with edge_list
166 EL index EDGE_INDEX, ala Morgan, Section 3.6. */
167
168 static void
169 find_control_dependence (struct edge_list *el, int edge_index)
170 {
171 basic_block current_block;
172 basic_block ending_block;
173
174 gcc_assert (INDEX_EDGE_PRED_BB (el, edge_index) != EXIT_BLOCK_PTR);
175
176 if (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR)
177 ending_block = ENTRY_BLOCK_PTR->next_bb;
178 else
179 ending_block = find_pdom (INDEX_EDGE_PRED_BB (el, edge_index));
180
181 for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index);
182 current_block != ending_block && current_block != EXIT_BLOCK_PTR;
183 current_block = find_pdom (current_block))
184 {
185 edge e = INDEX_EDGE (el, edge_index);
186
187 /* For abnormal edges, we don't make current_block control
188 dependent because instructions that throw are always necessary
189 anyway. */
190 if (e->flags & EDGE_ABNORMAL)
191 continue;
192
193 set_control_dependence_map_bit (current_block, edge_index);
194 }
195 }
196
197 /* Find the immediate postdominator PDOM of the specified basic block BLOCK.
198 This function is necessary because some blocks have negative numbers. */
199
200 static inline basic_block
201 find_pdom (basic_block block)
202 {
203 gcc_assert (block != ENTRY_BLOCK_PTR);
204
205 if (block == EXIT_BLOCK_PTR)
206 return EXIT_BLOCK_PTR;
207 else
208 {
209 basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
210 if (! bb)
211 return EXIT_BLOCK_PTR;
212 return bb;
213 }
214 }
215 \f
216 #define NECESSARY(stmt) stmt->common.asm_written_flag
217
218 /* If STMT is not already marked necessary, mark it, and add it to the
219 worklist if ADD_TO_WORKLIST is true. */
220 static inline void
221 mark_stmt_necessary (tree stmt, bool add_to_worklist)
222 {
223 gcc_assert (stmt);
224 gcc_assert (stmt != error_mark_node);
225 gcc_assert (!DECL_P (stmt));
226
227 if (NECESSARY (stmt))
228 return;
229
230 if (dump_file && (dump_flags & TDF_DETAILS))
231 {
232 fprintf (dump_file, "Marking useful stmt: ");
233 print_generic_stmt (dump_file, stmt, TDF_SLIM);
234 fprintf (dump_file, "\n");
235 }
236
237 NECESSARY (stmt) = 1;
238 if (add_to_worklist)
239 VARRAY_PUSH_TREE (worklist, stmt);
240 }
241
242 /* Mark the statement defining operand OP as necessary. PHIONLY is true
243 if we should only mark it necessary if it is a phi node. */
244
245 static inline void
246 mark_operand_necessary (tree op, bool phionly)
247 {
248 tree stmt;
249 int ver;
250
251 gcc_assert (op);
252
253 ver = SSA_NAME_VERSION (op);
254 if (TEST_BIT (processed, ver))
255 return;
256 SET_BIT (processed, ver);
257
258 stmt = SSA_NAME_DEF_STMT (op);
259 gcc_assert (stmt);
260
261 if (NECESSARY (stmt)
262 || IS_EMPTY_STMT (stmt)
263 || (phionly && TREE_CODE (stmt) != PHI_NODE))
264 return;
265
266 NECESSARY (stmt) = 1;
267 VARRAY_PUSH_TREE (worklist, stmt);
268 }
269 \f
270
271 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
272 it can make other statements necessary.
273
274 If AGGRESSIVE is false, control statements are conservatively marked as
275 necessary. */
276
277 static void
278 mark_stmt_if_obviously_necessary (tree stmt, bool aggressive)
279 {
280 stmt_ann_t ann;
281 tree op, def;
282 ssa_op_iter iter;
283
284 /* Statements that are implicitly live. Most function calls, asm and return
285 statements are required. Labels and BIND_EXPR nodes are kept because
286 they are control flow, and we have no way of knowing whether they can be
287 removed. DCE can eliminate all the other statements in a block, and CFG
288 can then remove the block and labels. */
289 switch (TREE_CODE (stmt))
290 {
291 case BIND_EXPR:
292 case LABEL_EXPR:
293 case CASE_LABEL_EXPR:
294 mark_stmt_necessary (stmt, false);
295 return;
296
297 case ASM_EXPR:
298 case RESX_EXPR:
299 case RETURN_EXPR:
300 mark_stmt_necessary (stmt, true);
301 return;
302
303 case CALL_EXPR:
304 /* Most, but not all function calls are required. Function calls that
305 produce no result and have no side effects (i.e. const pure
306 functions) are unnecessary. */
307 if (TREE_SIDE_EFFECTS (stmt))
308 mark_stmt_necessary (stmt, true);
309 return;
310
311 case MODIFY_EXPR:
312 op = get_call_expr_in (stmt);
313 if (op && TREE_SIDE_EFFECTS (op))
314 {
315 mark_stmt_necessary (stmt, true);
316 return;
317 }
318
319 /* These values are mildly magic bits of the EH runtime. We can't
320 see the entire lifetime of these values until landing pads are
321 generated. */
322 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == EXC_PTR_EXPR
323 || TREE_CODE (TREE_OPERAND (stmt, 0)) == FILTER_EXPR)
324 {
325 mark_stmt_necessary (stmt, true);
326 return;
327 }
328 break;
329
330 case GOTO_EXPR:
331 gcc_assert (!simple_goto_p (stmt));
332 mark_stmt_necessary (stmt, true);
333 return;
334
335 case COND_EXPR:
336 gcc_assert (EDGE_COUNT (bb_for_stmt (stmt)->succs) == 2);
337 /* Fall through. */
338
339 case SWITCH_EXPR:
340 if (! aggressive)
341 mark_stmt_necessary (stmt, true);
342 break;
343
344 default:
345 break;
346 }
347
348 ann = stmt_ann (stmt);
349
350 /* If the statement has volatile operands, it needs to be preserved.
351 Same for statements that can alter control flow in unpredictable
352 ways. */
353 if (ann->has_volatile_ops || is_ctrl_altering_stmt (stmt))
354 {
355 mark_stmt_necessary (stmt, true);
356 return;
357 }
358
359 get_stmt_operands (stmt);
360
361 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
362 {
363 if (is_global_var (SSA_NAME_VAR (def)))
364 {
365 mark_stmt_necessary (stmt, true);
366 return;
367 }
368 }
369 if (is_hidden_global_store (stmt))
370 {
371 mark_stmt_necessary (stmt, true);
372 return;
373 }
374
375 return;
376 }
377 \f
378 /* Find obviously necessary statements. These are things like most function
379 calls, and stores to file level variables.
380
381 If EL is NULL, control statements are conservatively marked as
382 necessary. Otherwise it contains the list of edges used by control
383 dependence analysis. */
384
385 static void
386 find_obviously_necessary_stmts (struct edge_list *el)
387 {
388 basic_block bb;
389 block_stmt_iterator i;
390 edge e;
391
392 FOR_EACH_BB (bb)
393 {
394 tree phi;
395
396 /* Check any PHI nodes in the block. */
397 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
398 {
399 NECESSARY (phi) = 0;
400
401 /* PHIs for virtual variables do not directly affect code
402 generation and need not be considered inherently necessary
403 regardless of the bits set in their decl.
404
405 Thus, we only need to mark PHIs for real variables which
406 need their result preserved as being inherently necessary. */
407 if (is_gimple_reg (PHI_RESULT (phi))
408 && is_global_var (SSA_NAME_VAR (PHI_RESULT (phi))))
409 mark_stmt_necessary (phi, true);
410 }
411
412 /* Check all statements in the block. */
413 for (i = bsi_start (bb); ! bsi_end_p (i); bsi_next (&i))
414 {
415 tree stmt = bsi_stmt (i);
416 NECESSARY (stmt) = 0;
417 mark_stmt_if_obviously_necessary (stmt, el != NULL);
418 }
419 }
420
421 if (el)
422 {
423 /* Prevent the loops from being removed. We must keep the infinite loops,
424 and we currently do not have a means to recognize the finite ones. */
425 FOR_EACH_BB (bb)
426 {
427 edge_iterator ei;
428 FOR_EACH_EDGE (e, ei, bb->succs)
429 if (e->flags & EDGE_DFS_BACK)
430 mark_control_dependent_edges_necessary (e->dest, el);
431 }
432 }
433 }
434 \f
435 /* Make corresponding control dependent edges necessary. We only
436 have to do this once for each basic block, so we clear the bitmap
437 after we're done. */
438 static void
439 mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el)
440 {
441 unsigned edge_number;
442
443 gcc_assert (bb != EXIT_BLOCK_PTR);
444
445 if (bb == ENTRY_BLOCK_PTR)
446 return;
447
448 EXECUTE_IF_CONTROL_DEPENDENT (bb->index, edge_number,
449 {
450 tree t;
451 basic_block cd_bb = INDEX_EDGE_PRED_BB (el, edge_number);
452
453 if (TEST_BIT (last_stmt_necessary, cd_bb->index))
454 continue;
455 SET_BIT (last_stmt_necessary, cd_bb->index);
456
457 t = last_stmt (cd_bb);
458 if (t && is_ctrl_stmt (t))
459 mark_stmt_necessary (t, true);
460 });
461 }
462 \f
463 /* Propagate necessity using the operands of necessary statements. Process
464 the uses on each statement in the worklist, and add all feeding statements
465 which contribute to the calculation of this value to the worklist.
466
467 In conservative mode, EL is NULL. */
468
469 static void
470 propagate_necessity (struct edge_list *el)
471 {
472 tree i;
473 bool aggressive = (el ? true : false);
474
475 if (dump_file && (dump_flags & TDF_DETAILS))
476 fprintf (dump_file, "\nProcessing worklist:\n");
477
478 while (VARRAY_ACTIVE_SIZE (worklist) > 0)
479 {
480 /* Take `i' from worklist. */
481 i = VARRAY_TOP_TREE (worklist);
482 VARRAY_POP (worklist);
483
484 if (dump_file && (dump_flags & TDF_DETAILS))
485 {
486 fprintf (dump_file, "processing: ");
487 print_generic_stmt (dump_file, i, TDF_SLIM);
488 fprintf (dump_file, "\n");
489 }
490
491 if (aggressive)
492 {
493 /* Mark the last statements of the basic blocks that the block
494 containing `i' is control dependent on, but only if we haven't
495 already done so. */
496 basic_block bb = bb_for_stmt (i);
497 if (bb != ENTRY_BLOCK_PTR
498 && ! TEST_BIT (visited_control_parents, bb->index))
499 {
500 SET_BIT (visited_control_parents, bb->index);
501 mark_control_dependent_edges_necessary (bb, el);
502 }
503 }
504
505 if (TREE_CODE (i) == PHI_NODE)
506 {
507 /* PHI nodes are somewhat special in that each PHI alternative has
508 data and control dependencies. All the statements feeding the
509 PHI node's arguments are always necessary. In aggressive mode,
510 we also consider the control dependent edges leading to the
511 predecessor block associated with each PHI alternative as
512 necessary. */
513 int k;
514 for (k = 0; k < PHI_NUM_ARGS (i); k++)
515 {
516 tree arg = PHI_ARG_DEF (i, k);
517 if (TREE_CODE (arg) == SSA_NAME)
518 mark_operand_necessary (arg, false);
519 }
520
521 if (aggressive)
522 {
523 for (k = 0; k < PHI_NUM_ARGS (i); k++)
524 {
525 basic_block arg_bb = PHI_ARG_EDGE (i, k)->src;
526 if (arg_bb != ENTRY_BLOCK_PTR
527 && ! TEST_BIT (visited_control_parents, arg_bb->index))
528 {
529 SET_BIT (visited_control_parents, arg_bb->index);
530 mark_control_dependent_edges_necessary (arg_bb, el);
531 }
532 }
533 }
534 }
535 else
536 {
537 /* Propagate through the operands. Examine all the USE, VUSE and
538 V_MAY_DEF operands in this statement. Mark all the statements
539 which feed this statement's uses as necessary. */
540 ssa_op_iter iter;
541 tree use;
542
543 get_stmt_operands (i);
544
545 /* The operands of V_MAY_DEF expressions are also needed as they
546 represent potential definitions that may reach this
547 statement (V_MAY_DEF operands allow us to follow def-def
548 links). */
549
550 FOR_EACH_SSA_TREE_OPERAND (use, i, iter, SSA_OP_ALL_USES)
551 mark_operand_necessary (use, false);
552 }
553 }
554 }
555
556
557 /* Propagate necessity around virtual phi nodes used in kill operands.
558 The reason this isn't done during propagate_necessity is because we don't
559 want to keep phis around that are just there for must-defs, unless we
560 absolutely have to. After we've rewritten the reaching definitions to be
561 correct in the previous part of the fixup routine, we can simply propagate
562 around the information about which of these virtual phi nodes are really
563 used, and set the NECESSARY flag accordingly.
564 Note that we do the minimum here to ensure that we keep alive the phis that
565 are actually used in the corrected SSA form. In particular, some of these
566 phis may now have all of the same operand, and will be deleted by some
567 other pass. */
568
569 static void
570 mark_really_necessary_kill_operand_phis (void)
571 {
572 basic_block bb;
573 int i;
574
575 /* Seed the worklist with the new virtual phi arguments and virtual
576 uses */
577 FOR_EACH_BB (bb)
578 {
579 block_stmt_iterator bsi;
580 tree phi;
581
582 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
583 {
584 if (!is_gimple_reg (PHI_RESULT (phi)) && NECESSARY (phi))
585 {
586 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
587 mark_operand_necessary (PHI_ARG_DEF (phi, i), true);
588 }
589 }
590
591 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
592 {
593 tree stmt = bsi_stmt (bsi);
594
595 if (NECESSARY (stmt))
596 {
597 use_operand_p use_p;
598 ssa_op_iter iter;
599 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
600 SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
601 {
602 tree use = USE_FROM_PTR (use_p);
603 mark_operand_necessary (use, true);
604 }
605 }
606 }
607 }
608
609 /* Mark all virtual phis still in use as necessary, and all of their
610 arguments that are phis as necessary. */
611 while (VARRAY_ACTIVE_SIZE (worklist) > 0)
612 {
613 tree use = VARRAY_TOP_TREE (worklist);
614 VARRAY_POP (worklist);
615
616 for (i = 0; i < PHI_NUM_ARGS (use); i++)
617 mark_operand_necessary (PHI_ARG_DEF (use, i), true);
618 }
619 }
620
621
622 \f
623
624 /* Eliminate unnecessary statements. Any instruction not marked as necessary
625 contributes nothing to the program, and can be deleted. */
626
627 static void
628 eliminate_unnecessary_stmts (void)
629 {
630 basic_block bb;
631 block_stmt_iterator i;
632
633 if (dump_file && (dump_flags & TDF_DETAILS))
634 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
635
636 clear_special_calls ();
637 FOR_EACH_BB (bb)
638 {
639 /* Remove dead PHI nodes. */
640 remove_dead_phis (bb);
641
642 /* Remove dead statements. */
643 for (i = bsi_start (bb); ! bsi_end_p (i) ; )
644 {
645 tree t = bsi_stmt (i);
646
647 stats.total++;
648
649 /* If `i' is not necessary then remove it. */
650 if (! NECESSARY (t))
651 remove_dead_stmt (&i, bb);
652 else
653 {
654 tree call = get_call_expr_in (t);
655 if (call)
656 notice_special_calls (call);
657 bsi_next (&i);
658 }
659 }
660 }
661 }
662 \f
663 /* Remove dead PHI nodes from block BB. */
664
665 static void
666 remove_dead_phis (basic_block bb)
667 {
668 tree prev, phi;
669
670 prev = NULL_TREE;
671 phi = phi_nodes (bb);
672 while (phi)
673 {
674 stats.total_phis++;
675
676 if (! NECESSARY (phi))
677 {
678 tree next = PHI_CHAIN (phi);
679
680 if (dump_file && (dump_flags & TDF_DETAILS))
681 {
682 fprintf (dump_file, "Deleting : ");
683 print_generic_stmt (dump_file, phi, TDF_SLIM);
684 fprintf (dump_file, "\n");
685 }
686
687 remove_phi_node (phi, prev);
688 stats.removed_phis++;
689 phi = next;
690 }
691 else
692 {
693 prev = phi;
694 phi = PHI_CHAIN (phi);
695 }
696 }
697 }
698 \f
699 /* Remove dead statement pointed by iterator I. Receives the basic block BB
700 containing I so that we don't have to look it up. */
701
702 static void
703 remove_dead_stmt (block_stmt_iterator *i, basic_block bb)
704 {
705 tree t = bsi_stmt (*i);
706 def_operand_p def_p;
707
708 ssa_op_iter iter;
709
710 if (dump_file && (dump_flags & TDF_DETAILS))
711 {
712 fprintf (dump_file, "Deleting : ");
713 print_generic_stmt (dump_file, t, TDF_SLIM);
714 fprintf (dump_file, "\n");
715 }
716
717 stats.removed++;
718
719 /* If we have determined that a conditional branch statement contributes
720 nothing to the program, then we not only remove it, but we also change
721 the flow graph so that the current block will simply fall-thru to its
722 immediate post-dominator. The blocks we are circumventing will be
723 removed by cleaup_cfg if this change in the flow graph makes them
724 unreachable. */
725 if (is_ctrl_stmt (t))
726 {
727 basic_block post_dom_bb;
728 /* The post dominance info has to be up-to-date. */
729 gcc_assert (dom_computed[CDI_POST_DOMINATORS] == DOM_OK);
730 /* Get the immediate post dominator of bb. */
731 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
732 /* Some blocks don't have an immediate post dominator. This can happen
733 for example with infinite loops. Removing an infinite loop is an
734 inappropriate transformation anyway... */
735 if (! post_dom_bb)
736 {
737 bsi_next (i);
738 return;
739 }
740
741 /* Redirect the first edge out of BB to reach POST_DOM_BB. */
742 redirect_edge_and_branch (EDGE_SUCC (bb, 0), post_dom_bb);
743 PENDING_STMT (EDGE_SUCC (bb, 0)) = NULL;
744 EDGE_SUCC (bb, 0)->probability = REG_BR_PROB_BASE;
745 EDGE_SUCC (bb, 0)->count = bb->count;
746
747 /* The edge is no longer associated with a conditional, so it does
748 not have TRUE/FALSE flags. */
749 EDGE_SUCC (bb, 0)->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
750
751 /* If the edge reaches any block other than the exit, then it is a
752 fallthru edge; if it reaches the exit, then it is not a fallthru
753 edge. */
754 if (post_dom_bb != EXIT_BLOCK_PTR)
755 EDGE_SUCC (bb, 0)->flags |= EDGE_FALLTHRU;
756 else
757 EDGE_SUCC (bb, 0)->flags &= ~EDGE_FALLTHRU;
758
759 /* Remove the remaining the outgoing edges. */
760 while (!single_succ_p (bb))
761 remove_edge (EDGE_SUCC (bb, 1));
762 }
763
764 FOR_EACH_SSA_DEF_OPERAND (def_p, t, iter,
765 SSA_OP_VIRTUAL_DEFS | SSA_OP_VIRTUAL_KILLS)
766 {
767 tree def = DEF_FROM_PTR (def_p);
768 bitmap_set_bit (vars_to_rename,
769 var_ann (SSA_NAME_VAR (def))->uid);
770 }
771 bsi_remove (i);
772 release_defs (t);
773 }
774 \f
775 /* Print out removed statement statistics. */
776
777 static void
778 print_stats (void)
779 {
780 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
781 {
782 float percg;
783
784 percg = ((float) stats.removed / (float) stats.total) * 100;
785 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
786 stats.removed, stats.total, (int) percg);
787
788 if (stats.total_phis == 0)
789 percg = 0;
790 else
791 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
792
793 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
794 stats.removed_phis, stats.total_phis, (int) percg);
795 }
796 }
797 \f
798 /* Initialization for this pass. Set up the used data structures. */
799
800 static void
801 tree_dce_init (bool aggressive)
802 {
803 memset ((void *) &stats, 0, sizeof (stats));
804
805 if (aggressive)
806 {
807 int i;
808
809 control_dependence_map
810 = xmalloc (last_basic_block * sizeof (bitmap));
811 for (i = 0; i < last_basic_block; ++i)
812 control_dependence_map[i] = BITMAP_ALLOC (NULL);
813
814 last_stmt_necessary = sbitmap_alloc (last_basic_block);
815 sbitmap_zero (last_stmt_necessary);
816 }
817
818 processed = sbitmap_alloc (num_ssa_names + 1);
819 sbitmap_zero (processed);
820
821 VARRAY_TREE_INIT (worklist, 64, "work list");
822 }
823
824 /* Cleanup after this pass. */
825
826 static void
827 tree_dce_done (bool aggressive)
828 {
829 if (aggressive)
830 {
831 int i;
832
833 for (i = 0; i < last_basic_block; ++i)
834 BITMAP_FREE (control_dependence_map[i]);
835 free (control_dependence_map);
836
837 sbitmap_free (visited_control_parents);
838 sbitmap_free (last_stmt_necessary);
839 }
840
841 sbitmap_free (processed);
842 }
843 \f
844 /* Main routine to eliminate dead code.
845
846 AGGRESSIVE controls the aggressiveness of the algorithm.
847 In conservative mode, we ignore control dependence and simply declare
848 all but the most trivially dead branches necessary. This mode is fast.
849 In aggressive mode, control dependences are taken into account, which
850 results in more dead code elimination, but at the cost of some time.
851
852 FIXME: Aggressive mode before PRE doesn't work currently because
853 the dominance info is not invalidated after DCE1. This is
854 not an issue right now because we only run aggressive DCE
855 as the last tree SSA pass, but keep this in mind when you
856 start experimenting with pass ordering. */
857
858 static void
859 perform_tree_ssa_dce (bool aggressive)
860 {
861 struct edge_list *el = NULL;
862
863 tree_dce_init (aggressive);
864
865 if (aggressive)
866 {
867 /* Compute control dependence. */
868 timevar_push (TV_CONTROL_DEPENDENCES);
869 calculate_dominance_info (CDI_POST_DOMINATORS);
870 el = create_edge_list ();
871 find_all_control_dependences (el);
872 timevar_pop (TV_CONTROL_DEPENDENCES);
873
874 visited_control_parents = sbitmap_alloc (last_basic_block);
875 sbitmap_zero (visited_control_parents);
876
877 mark_dfs_back_edges ();
878 }
879
880 find_obviously_necessary_stmts (el);
881
882 propagate_necessity (el);
883
884 mark_really_necessary_kill_operand_phis ();
885 eliminate_unnecessary_stmts ();
886
887 if (aggressive)
888 free_dominance_info (CDI_POST_DOMINATORS);
889
890 /* Debugging dumps. */
891 if (dump_file)
892 print_stats ();
893
894 tree_dce_done (aggressive);
895
896 free_edge_list (el);
897 }
898
899 /* Pass entry points. */
900 static void
901 tree_ssa_dce (void)
902 {
903 perform_tree_ssa_dce (/*aggressive=*/false);
904 }
905
906 static void
907 tree_ssa_cd_dce (void)
908 {
909 perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
910 }
911
912 static bool
913 gate_dce (void)
914 {
915 return flag_tree_dce != 0;
916 }
917
918 struct tree_opt_pass pass_dce =
919 {
920 "dce", /* name */
921 gate_dce, /* gate */
922 tree_ssa_dce, /* execute */
923 NULL, /* sub */
924 NULL, /* next */
925 0, /* static_pass_number */
926 TV_TREE_DCE, /* tv_id */
927 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
928 0, /* properties_provided */
929 0, /* properties_destroyed */
930 0, /* todo_flags_start */
931 TODO_dump_func | TODO_fix_def_def_chains | TODO_cleanup_cfg | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
932 0 /* letter */
933 };
934
935 struct tree_opt_pass pass_cd_dce =
936 {
937 "cddce", /* name */
938 gate_dce, /* gate */
939 tree_ssa_cd_dce, /* execute */
940 NULL, /* sub */
941 NULL, /* next */
942 0, /* static_pass_number */
943 TV_TREE_CD_DCE, /* tv_id */
944 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
945 0, /* properties_provided */
946 0, /* properties_destroyed */
947 0, /* todo_flags_start */
948 TODO_dump_func | TODO_fix_def_def_chains | TODO_cleanup_cfg | TODO_ggc_collect | TODO_verify_ssa | TODO_verify_flow,
949 /* todo_flags_finish */
950 0 /* letter */
951 };
952