]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-dce.c
4f72d82f92c6f6c8f6a8de94770cf937080fba1c
[thirdparty/gcc.git] / gcc / tree-ssa-dce.c
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
12 later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 02111-1307, USA. */
23
24 /* Dead code elimination.
25
26 References:
27
28 Building an Optimizing Compiler,
29 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30
31 Advanced Compiler Design and Implementation,
32 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33
34 Dead-code elimination is the removal of statements which have no
35 impact on the program's output. "Dead statements" have no impact
36 on the program's output, while "necessary statements" may have
37 impact on the output.
38
39 The algorithm consists of three phases:
40 1. Marking as necessary all statements known to be necessary,
41 e.g. most function calls, writing a value to memory, etc;
42 2. Propagating necessary statements, e.g., the statements
43 giving values to operands in necessary statements; and
44 3. Removing dead statements. */
45
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "errors.h"
51 #include "ggc.h"
52
53 /* These RTL headers are needed for basic-block.h. */
54 #include "rtl.h"
55 #include "tm_p.h"
56 #include "hard-reg-set.h"
57 #include "obstack.h"
58 #include "basic-block.h"
59
60 #include "tree.h"
61 #include "diagnostic.h"
62 #include "tree-flow.h"
63 #include "tree-gimple.h"
64 #include "tree-dump.h"
65 #include "tree-pass.h"
66 #include "timevar.h"
67 #include "flags.h"
68 \f
69 static struct stmt_stats
70 {
71 int total;
72 int total_phis;
73 int removed;
74 int removed_phis;
75 } stats;
76
77 static varray_type worklist;
78
79 /* Vector indicating an SSA name has already been processed and marked
80 as necessary. */
81 static sbitmap processed;
82
83 /* Vector indicating that last_stmt if a basic block has already been
84 marked as necessary. */
85 static sbitmap last_stmt_necessary;
86
87 /* Before we can determine whether a control branch is dead, we need to
88 compute which blocks are control dependent on which edges.
89
90 We expect each block to be control dependent on very few edges so we
91 use a bitmap for each block recording its edges. An array holds the
92 bitmap. The Ith bit in the bitmap is set if that block is dependent
93 on the Ith edge. */
94 bitmap *control_dependence_map;
95
96 /* Vector indicating that a basic block has already had all the edges
97 processed that it is control dependent on. */
98 sbitmap visited_control_parents;
99
100 /* Execute CODE for each edge (given number EDGE_NUMBER within the CODE)
101 for which the block with index N is control dependent. */
102 #define EXECUTE_IF_CONTROL_DEPENDENT(N, EDGE_NUMBER, CODE) \
103 { \
104 bitmap_iterator bi; \
105 \
106 EXECUTE_IF_SET_IN_BITMAP (control_dependence_map[N], 0, EDGE_NUMBER, bi) \
107 { \
108 CODE; \
109 } \
110 }
111
112 /* Local function prototypes. */
113 static inline void set_control_dependence_map_bit (basic_block, int);
114 static inline void clear_control_dependence_bitmap (basic_block);
115 static void find_all_control_dependences (struct edge_list *);
116 static void find_control_dependence (struct edge_list *, int);
117 static inline basic_block find_pdom (basic_block);
118
119 static inline void mark_stmt_necessary (tree, bool);
120 static inline void mark_operand_necessary (tree, bool);
121
122 static void mark_stmt_if_obviously_necessary (tree, bool);
123 static void find_obviously_necessary_stmts (struct edge_list *);
124
125 static void mark_control_dependent_edges_necessary (basic_block, struct edge_list *);
126 static void propagate_necessity (struct edge_list *);
127
128 static void eliminate_unnecessary_stmts (void);
129 static void remove_dead_phis (basic_block);
130 static void remove_dead_stmt (block_stmt_iterator *, basic_block);
131
132 static void print_stats (void);
133 static void tree_dce_init (bool);
134 static void tree_dce_done (bool);
135 \f
136 /* Indicate block BB is control dependent on an edge with index EDGE_INDEX. */
137 static inline void
138 set_control_dependence_map_bit (basic_block bb, int edge_index)
139 {
140 if (bb == ENTRY_BLOCK_PTR)
141 return;
142 gcc_assert (bb != EXIT_BLOCK_PTR);
143 bitmap_set_bit (control_dependence_map[bb->index], edge_index);
144 }
145
146 /* Clear all control dependences for block BB. */
147 static inline
148 void clear_control_dependence_bitmap (basic_block bb)
149 {
150 bitmap_clear (control_dependence_map[bb->index]);
151 }
152
153 /* Record all blocks' control dependences on all edges in the edge
154 list EL, ala Morgan, Section 3.6. */
155
156 static void
157 find_all_control_dependences (struct edge_list *el)
158 {
159 int i;
160
161 for (i = 0; i < NUM_EDGES (el); ++i)
162 find_control_dependence (el, i);
163 }
164
165 /* Determine all blocks' control dependences on the given edge with edge_list
166 EL index EDGE_INDEX, ala Morgan, Section 3.6. */
167
168 static void
169 find_control_dependence (struct edge_list *el, int edge_index)
170 {
171 basic_block current_block;
172 basic_block ending_block;
173
174 gcc_assert (INDEX_EDGE_PRED_BB (el, edge_index) != EXIT_BLOCK_PTR);
175
176 if (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR)
177 ending_block = ENTRY_BLOCK_PTR->next_bb;
178 else
179 ending_block = find_pdom (INDEX_EDGE_PRED_BB (el, edge_index));
180
181 for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index);
182 current_block != ending_block && current_block != EXIT_BLOCK_PTR;
183 current_block = find_pdom (current_block))
184 {
185 edge e = INDEX_EDGE (el, edge_index);
186
187 /* For abnormal edges, we don't make current_block control
188 dependent because instructions that throw are always necessary
189 anyway. */
190 if (e->flags & EDGE_ABNORMAL)
191 continue;
192
193 set_control_dependence_map_bit (current_block, edge_index);
194 }
195 }
196
197 /* Find the immediate postdominator PDOM of the specified basic block BLOCK.
198 This function is necessary because some blocks have negative numbers. */
199
200 static inline basic_block
201 find_pdom (basic_block block)
202 {
203 gcc_assert (block != ENTRY_BLOCK_PTR);
204
205 if (block == EXIT_BLOCK_PTR)
206 return EXIT_BLOCK_PTR;
207 else
208 {
209 basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
210 if (! bb)
211 return EXIT_BLOCK_PTR;
212 return bb;
213 }
214 }
215 \f
216 #define NECESSARY(stmt) stmt->common.asm_written_flag
217
218 /* If STMT is not already marked necessary, mark it, and add it to the
219 worklist if ADD_TO_WORKLIST is true. */
220 static inline void
221 mark_stmt_necessary (tree stmt, bool add_to_worklist)
222 {
223 gcc_assert (stmt);
224 gcc_assert (stmt != error_mark_node);
225 gcc_assert (!DECL_P (stmt));
226
227 if (NECESSARY (stmt))
228 return;
229
230 if (dump_file && (dump_flags & TDF_DETAILS))
231 {
232 fprintf (dump_file, "Marking useful stmt: ");
233 print_generic_stmt (dump_file, stmt, TDF_SLIM);
234 fprintf (dump_file, "\n");
235 }
236
237 NECESSARY (stmt) = 1;
238 if (add_to_worklist)
239 VARRAY_PUSH_TREE (worklist, stmt);
240 }
241
242 /* Mark the statement defining operand OP as necessary. PHIONLY is true
243 if we should only mark it necessary if it is a phi node. */
244
245 static inline void
246 mark_operand_necessary (tree op, bool phionly)
247 {
248 tree stmt;
249 int ver;
250
251 gcc_assert (op);
252
253 ver = SSA_NAME_VERSION (op);
254 if (TEST_BIT (processed, ver))
255 return;
256 SET_BIT (processed, ver);
257
258 stmt = SSA_NAME_DEF_STMT (op);
259 gcc_assert (stmt);
260
261 if (NECESSARY (stmt)
262 || IS_EMPTY_STMT (stmt)
263 || (phionly && TREE_CODE (stmt) != PHI_NODE))
264 return;
265
266 NECESSARY (stmt) = 1;
267 VARRAY_PUSH_TREE (worklist, stmt);
268 }
269 \f
270
271 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
272 it can make other statements necessary.
273
274 If AGGRESSIVE is false, control statements are conservatively marked as
275 necessary. */
276
277 static void
278 mark_stmt_if_obviously_necessary (tree stmt, bool aggressive)
279 {
280 v_may_def_optype v_may_defs;
281 v_must_def_optype v_must_defs;
282 stmt_ann_t ann;
283 tree op, def;
284 ssa_op_iter iter;
285
286 /* Statements that are implicitly live. Most function calls, asm and return
287 statements are required. Labels and BIND_EXPR nodes are kept because
288 they are control flow, and we have no way of knowing whether they can be
289 removed. DCE can eliminate all the other statements in a block, and CFG
290 can then remove the block and labels. */
291 switch (TREE_CODE (stmt))
292 {
293 case BIND_EXPR:
294 case LABEL_EXPR:
295 case CASE_LABEL_EXPR:
296 mark_stmt_necessary (stmt, false);
297 return;
298
299 case ASM_EXPR:
300 case RESX_EXPR:
301 case RETURN_EXPR:
302 mark_stmt_necessary (stmt, true);
303 return;
304
305 case CALL_EXPR:
306 /* Most, but not all function calls are required. Function calls that
307 produce no result and have no side effects (i.e. const pure
308 functions) are unnecessary. */
309 if (TREE_SIDE_EFFECTS (stmt))
310 mark_stmt_necessary (stmt, true);
311 return;
312
313 case MODIFY_EXPR:
314 op = get_call_expr_in (stmt);
315 if (op && TREE_SIDE_EFFECTS (op))
316 {
317 mark_stmt_necessary (stmt, true);
318 return;
319 }
320
321 /* These values are mildly magic bits of the EH runtime. We can't
322 see the entire lifetime of these values until landing pads are
323 generated. */
324 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == EXC_PTR_EXPR
325 || TREE_CODE (TREE_OPERAND (stmt, 0)) == FILTER_EXPR)
326 {
327 mark_stmt_necessary (stmt, true);
328 return;
329 }
330 break;
331
332 case GOTO_EXPR:
333 gcc_assert (!simple_goto_p (stmt));
334 mark_stmt_necessary (stmt, true);
335 return;
336
337 case COND_EXPR:
338 gcc_assert (EDGE_COUNT (bb_for_stmt (stmt)->succs) == 2);
339 /* Fall through. */
340
341 case SWITCH_EXPR:
342 if (! aggressive)
343 mark_stmt_necessary (stmt, true);
344 break;
345
346 default:
347 break;
348 }
349
350 ann = stmt_ann (stmt);
351
352 /* If the statement has volatile operands, it needs to be preserved.
353 Same for statements that can alter control flow in unpredictable
354 ways. */
355 if (ann->has_volatile_ops || is_ctrl_altering_stmt (stmt))
356 {
357 mark_stmt_necessary (stmt, true);
358 return;
359 }
360
361 get_stmt_operands (stmt);
362
363 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
364 {
365 if (is_global_var (SSA_NAME_VAR (def)))
366 {
367 mark_stmt_necessary (stmt, true);
368 return;
369 }
370 }
371
372 /* Check virtual definitions. If we get here, the only virtual
373 definitions we should see are those generated by assignment
374 statements. */
375 v_may_defs = V_MAY_DEF_OPS (ann);
376 v_must_defs = V_MUST_DEF_OPS (ann);
377 if (NUM_V_MAY_DEFS (v_may_defs) > 0 || NUM_V_MUST_DEFS (v_must_defs) > 0)
378 {
379 tree lhs;
380
381 gcc_assert (TREE_CODE (stmt) == MODIFY_EXPR);
382
383 /* Note that we must not check the individual virtual operands
384 here. In particular, if this is an aliased store, we could
385 end up with something like the following (SSA notation
386 redacted for brevity):
387
388 foo (int *p, int i)
389 {
390 int x;
391 p_1 = (i_2 > 3) ? &x : p_1;
392
393 # x_4 = V_MAY_DEF <x_3>
394 *p_1 = 5;
395
396 return 2;
397 }
398
399 Notice that the store to '*p_1' should be preserved, if we
400 were to check the virtual definitions in that store, we would
401 not mark it needed. This is because 'x' is not a global
402 variable.
403
404 Therefore, we check the base address of the LHS. If the
405 address is a pointer, we check if its name tag or type tag is
406 a global variable. Otherwise, we check if the base variable
407 is a global. */
408 lhs = TREE_OPERAND (stmt, 0);
409 if (REFERENCE_CLASS_P (lhs))
410 lhs = get_base_address (lhs);
411
412 if (lhs == NULL_TREE)
413 {
414 /* If LHS is NULL, it means that we couldn't get the base
415 address of the reference. In which case, we should not
416 remove this store. */
417 mark_stmt_necessary (stmt, true);
418 }
419 else if (DECL_P (lhs))
420 {
421 /* If the store is to a global symbol, we need to keep it. */
422 if (is_global_var (lhs))
423 mark_stmt_necessary (stmt, true);
424 }
425 else if (INDIRECT_REF_P (lhs))
426 {
427 tree ptr = TREE_OPERAND (lhs, 0);
428 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
429 tree nmt = (pi) ? pi->name_mem_tag : NULL_TREE;
430 tree tmt = var_ann (SSA_NAME_VAR (ptr))->type_mem_tag;
431
432 /* If either the name tag or the type tag for PTR is a
433 global variable, then the store is necessary. */
434 if ((nmt && is_global_var (nmt))
435 || (tmt && is_global_var (tmt)))
436 {
437 mark_stmt_necessary (stmt, true);
438 return;
439 }
440 }
441 else
442 gcc_unreachable ();
443 }
444
445 return;
446 }
447 \f
448 /* Find obviously necessary statements. These are things like most function
449 calls, and stores to file level variables.
450
451 If EL is NULL, control statements are conservatively marked as
452 necessary. Otherwise it contains the list of edges used by control
453 dependence analysis. */
454
455 static void
456 find_obviously_necessary_stmts (struct edge_list *el)
457 {
458 basic_block bb;
459 block_stmt_iterator i;
460 edge e;
461
462 FOR_EACH_BB (bb)
463 {
464 tree phi;
465
466 /* Check any PHI nodes in the block. */
467 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
468 {
469 NECESSARY (phi) = 0;
470
471 /* PHIs for virtual variables do not directly affect code
472 generation and need not be considered inherently necessary
473 regardless of the bits set in their decl.
474
475 Thus, we only need to mark PHIs for real variables which
476 need their result preserved as being inherently necessary. */
477 if (is_gimple_reg (PHI_RESULT (phi))
478 && is_global_var (SSA_NAME_VAR (PHI_RESULT (phi))))
479 mark_stmt_necessary (phi, true);
480 }
481
482 /* Check all statements in the block. */
483 for (i = bsi_start (bb); ! bsi_end_p (i); bsi_next (&i))
484 {
485 tree stmt = bsi_stmt (i);
486 NECESSARY (stmt) = 0;
487 mark_stmt_if_obviously_necessary (stmt, el != NULL);
488 }
489 }
490
491 if (el)
492 {
493 /* Prevent the loops from being removed. We must keep the infinite loops,
494 and we currently do not have a means to recognize the finite ones. */
495 FOR_EACH_BB (bb)
496 {
497 edge_iterator ei;
498 FOR_EACH_EDGE (e, ei, bb->succs)
499 if (e->flags & EDGE_DFS_BACK)
500 mark_control_dependent_edges_necessary (e->dest, el);
501 }
502 }
503 }
504 \f
505 /* Make corresponding control dependent edges necessary. We only
506 have to do this once for each basic block, so we clear the bitmap
507 after we're done. */
508 static void
509 mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el)
510 {
511 unsigned edge_number;
512
513 gcc_assert (bb != EXIT_BLOCK_PTR);
514
515 if (bb == ENTRY_BLOCK_PTR)
516 return;
517
518 EXECUTE_IF_CONTROL_DEPENDENT (bb->index, edge_number,
519 {
520 tree t;
521 basic_block cd_bb = INDEX_EDGE_PRED_BB (el, edge_number);
522
523 if (TEST_BIT (last_stmt_necessary, cd_bb->index))
524 continue;
525 SET_BIT (last_stmt_necessary, cd_bb->index);
526
527 t = last_stmt (cd_bb);
528 if (t && is_ctrl_stmt (t))
529 mark_stmt_necessary (t, true);
530 });
531 }
532 \f
533 /* Propagate necessity using the operands of necessary statements. Process
534 the uses on each statement in the worklist, and add all feeding statements
535 which contribute to the calculation of this value to the worklist.
536
537 In conservative mode, EL is NULL. */
538
539 static void
540 propagate_necessity (struct edge_list *el)
541 {
542 tree i;
543 bool aggressive = (el ? true : false);
544
545 if (dump_file && (dump_flags & TDF_DETAILS))
546 fprintf (dump_file, "\nProcessing worklist:\n");
547
548 while (VARRAY_ACTIVE_SIZE (worklist) > 0)
549 {
550 /* Take `i' from worklist. */
551 i = VARRAY_TOP_TREE (worklist);
552 VARRAY_POP (worklist);
553
554 if (dump_file && (dump_flags & TDF_DETAILS))
555 {
556 fprintf (dump_file, "processing: ");
557 print_generic_stmt (dump_file, i, TDF_SLIM);
558 fprintf (dump_file, "\n");
559 }
560
561 if (aggressive)
562 {
563 /* Mark the last statements of the basic blocks that the block
564 containing `i' is control dependent on, but only if we haven't
565 already done so. */
566 basic_block bb = bb_for_stmt (i);
567 if (bb != ENTRY_BLOCK_PTR
568 && ! TEST_BIT (visited_control_parents, bb->index))
569 {
570 SET_BIT (visited_control_parents, bb->index);
571 mark_control_dependent_edges_necessary (bb, el);
572 }
573 }
574
575 if (TREE_CODE (i) == PHI_NODE)
576 {
577 /* PHI nodes are somewhat special in that each PHI alternative has
578 data and control dependencies. All the statements feeding the
579 PHI node's arguments are always necessary. In aggressive mode,
580 we also consider the control dependent edges leading to the
581 predecessor block associated with each PHI alternative as
582 necessary. */
583 int k;
584 for (k = 0; k < PHI_NUM_ARGS (i); k++)
585 {
586 tree arg = PHI_ARG_DEF (i, k);
587 if (TREE_CODE (arg) == SSA_NAME)
588 mark_operand_necessary (arg, false);
589 }
590
591 if (aggressive)
592 {
593 for (k = 0; k < PHI_NUM_ARGS (i); k++)
594 {
595 basic_block arg_bb = PHI_ARG_EDGE (i, k)->src;
596 if (arg_bb != ENTRY_BLOCK_PTR
597 && ! TEST_BIT (visited_control_parents, arg_bb->index))
598 {
599 SET_BIT (visited_control_parents, arg_bb->index);
600 mark_control_dependent_edges_necessary (arg_bb, el);
601 }
602 }
603 }
604 }
605 else
606 {
607 /* Propagate through the operands. Examine all the USE, VUSE and
608 V_MAY_DEF operands in this statement. Mark all the statements
609 which feed this statement's uses as necessary. */
610 ssa_op_iter iter;
611 tree use;
612
613 get_stmt_operands (i);
614
615 /* The operands of V_MAY_DEF expressions are also needed as they
616 represent potential definitions that may reach this
617 statement (V_MAY_DEF operands allow us to follow def-def
618 links). */
619
620 FOR_EACH_SSA_TREE_OPERAND (use, i, iter, SSA_OP_ALL_USES)
621 mark_operand_necessary (use, false);
622 }
623 }
624 }
625
626
627 /* Propagate necessity around virtual phi nodes used in kill operands.
628 The reason this isn't done during propagate_necessity is because we don't
629 want to keep phis around that are just there for must-defs, unless we
630 absolutely have to. After we've rewritten the reaching definitions to be
631 correct in the previous part of the fixup routine, we can simply propagate
632 around the information about which of these virtual phi nodes are really
633 used, and set the NECESSARY flag accordingly.
634 Note that we do the minimum here to ensure that we keep alive the phis that
635 are actually used in the corrected SSA form. In particular, some of these
636 phis may now have all of the same operand, and will be deleted by some
637 other pass. */
638
639 static void
640 mark_really_necessary_kill_operand_phis (void)
641 {
642 basic_block bb;
643 int i;
644
645 /* Seed the worklist with the new virtual phi arguments and virtual
646 uses */
647 FOR_EACH_BB (bb)
648 {
649 block_stmt_iterator bsi;
650 tree phi;
651
652 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
653 {
654 if (!is_gimple_reg (PHI_RESULT (phi)) && NECESSARY (phi))
655 {
656 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
657 mark_operand_necessary (PHI_ARG_DEF (phi, i), true);
658 }
659 }
660
661 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
662 {
663 tree stmt = bsi_stmt (bsi);
664
665 if (NECESSARY (stmt))
666 {
667 use_operand_p use_p;
668 ssa_op_iter iter;
669 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
670 SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
671 {
672 tree use = USE_FROM_PTR (use_p);
673 mark_operand_necessary (use, true);
674 }
675 }
676 }
677 }
678
679 /* Mark all virtual phis still in use as necessary, and all of their
680 arguments that are phis as necessary. */
681 while (VARRAY_ACTIVE_SIZE (worklist) > 0)
682 {
683 tree use = VARRAY_TOP_TREE (worklist);
684 VARRAY_POP (worklist);
685
686 for (i = 0; i < PHI_NUM_ARGS (use); i++)
687 mark_operand_necessary (PHI_ARG_DEF (use, i), true);
688 }
689 }
690
691
692 \f
693
694 /* Eliminate unnecessary statements. Any instruction not marked as necessary
695 contributes nothing to the program, and can be deleted. */
696
697 static void
698 eliminate_unnecessary_stmts (void)
699 {
700 basic_block bb;
701 block_stmt_iterator i;
702
703 if (dump_file && (dump_flags & TDF_DETAILS))
704 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
705
706 clear_special_calls ();
707 FOR_EACH_BB (bb)
708 {
709 /* Remove dead PHI nodes. */
710 remove_dead_phis (bb);
711
712 /* Remove dead statements. */
713 for (i = bsi_start (bb); ! bsi_end_p (i) ; )
714 {
715 tree t = bsi_stmt (i);
716
717 stats.total++;
718
719 /* If `i' is not necessary then remove it. */
720 if (! NECESSARY (t))
721 remove_dead_stmt (&i, bb);
722 else
723 {
724 tree call = get_call_expr_in (t);
725 if (call)
726 notice_special_calls (call);
727 bsi_next (&i);
728 }
729 }
730 }
731 }
732 \f
733 /* Remove dead PHI nodes from block BB. */
734
735 static void
736 remove_dead_phis (basic_block bb)
737 {
738 tree prev, phi;
739
740 prev = NULL_TREE;
741 phi = phi_nodes (bb);
742 while (phi)
743 {
744 stats.total_phis++;
745
746 if (! NECESSARY (phi))
747 {
748 tree next = PHI_CHAIN (phi);
749
750 if (dump_file && (dump_flags & TDF_DETAILS))
751 {
752 fprintf (dump_file, "Deleting : ");
753 print_generic_stmt (dump_file, phi, TDF_SLIM);
754 fprintf (dump_file, "\n");
755 }
756
757 remove_phi_node (phi, prev, bb);
758 stats.removed_phis++;
759 phi = next;
760 }
761 else
762 {
763 prev = phi;
764 phi = PHI_CHAIN (phi);
765 }
766 }
767 }
768 \f
769 /* Remove dead statement pointed by iterator I. Receives the basic block BB
770 containing I so that we don't have to look it up. */
771
772 static void
773 remove_dead_stmt (block_stmt_iterator *i, basic_block bb)
774 {
775 tree t = bsi_stmt (*i);
776 def_operand_p def_p;
777
778 ssa_op_iter iter;
779
780 if (dump_file && (dump_flags & TDF_DETAILS))
781 {
782 fprintf (dump_file, "Deleting : ");
783 print_generic_stmt (dump_file, t, TDF_SLIM);
784 fprintf (dump_file, "\n");
785 }
786
787 stats.removed++;
788
789 /* If we have determined that a conditional branch statement contributes
790 nothing to the program, then we not only remove it, but we also change
791 the flow graph so that the current block will simply fall-thru to its
792 immediate post-dominator. The blocks we are circumventing will be
793 removed by cleaup_cfg if this change in the flow graph makes them
794 unreachable. */
795 if (is_ctrl_stmt (t))
796 {
797 basic_block post_dom_bb;
798 /* The post dominance info has to be up-to-date. */
799 gcc_assert (dom_computed[CDI_POST_DOMINATORS] == DOM_OK);
800 /* Get the immediate post dominator of bb. */
801 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
802 /* Some blocks don't have an immediate post dominator. This can happen
803 for example with infinite loops. Removing an infinite loop is an
804 inappropriate transformation anyway... */
805 if (! post_dom_bb)
806 {
807 bsi_next (i);
808 return;
809 }
810
811 /* Redirect the first edge out of BB to reach POST_DOM_BB. */
812 redirect_edge_and_branch (EDGE_SUCC (bb, 0), post_dom_bb);
813 PENDING_STMT (EDGE_SUCC (bb, 0)) = NULL;
814 EDGE_SUCC (bb, 0)->probability = REG_BR_PROB_BASE;
815 EDGE_SUCC (bb, 0)->count = bb->count;
816
817 /* The edge is no longer associated with a conditional, so it does
818 not have TRUE/FALSE flags. */
819 EDGE_SUCC (bb, 0)->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
820
821 /* If the edge reaches any block other than the exit, then it is a
822 fallthru edge; if it reaches the exit, then it is not a fallthru
823 edge. */
824 if (post_dom_bb != EXIT_BLOCK_PTR)
825 EDGE_SUCC (bb, 0)->flags |= EDGE_FALLTHRU;
826 else
827 EDGE_SUCC (bb, 0)->flags &= ~EDGE_FALLTHRU;
828
829 /* Remove the remaining the outgoing edges. */
830 while (EDGE_COUNT (bb->succs) != 1)
831 remove_edge (EDGE_SUCC (bb, 1));
832 }
833
834 FOR_EACH_SSA_DEF_OPERAND (def_p, t, iter,
835 SSA_OP_VIRTUAL_DEFS | SSA_OP_VIRTUAL_KILLS)
836 {
837 tree def = DEF_FROM_PTR (def_p);
838 bitmap_set_bit (vars_to_rename,
839 var_ann (SSA_NAME_VAR (def))->uid);
840 }
841 bsi_remove (i);
842 release_defs (t);
843 }
844 \f
845 /* Print out removed statement statistics. */
846
847 static void
848 print_stats (void)
849 {
850 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
851 {
852 float percg;
853
854 percg = ((float) stats.removed / (float) stats.total) * 100;
855 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
856 stats.removed, stats.total, (int) percg);
857
858 if (stats.total_phis == 0)
859 percg = 0;
860 else
861 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
862
863 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
864 stats.removed_phis, stats.total_phis, (int) percg);
865 }
866 }
867 \f
868 /* Initialization for this pass. Set up the used data structures. */
869
870 static void
871 tree_dce_init (bool aggressive)
872 {
873 memset ((void *) &stats, 0, sizeof (stats));
874
875 if (aggressive)
876 {
877 int i;
878
879 control_dependence_map
880 = xmalloc (last_basic_block * sizeof (bitmap));
881 for (i = 0; i < last_basic_block; ++i)
882 control_dependence_map[i] = BITMAP_XMALLOC ();
883
884 last_stmt_necessary = sbitmap_alloc (last_basic_block);
885 sbitmap_zero (last_stmt_necessary);
886 }
887
888 processed = sbitmap_alloc (num_ssa_names + 1);
889 sbitmap_zero (processed);
890
891 VARRAY_TREE_INIT (worklist, 64, "work list");
892 }
893
894 /* Cleanup after this pass. */
895
896 static void
897 tree_dce_done (bool aggressive)
898 {
899 if (aggressive)
900 {
901 int i;
902
903 for (i = 0; i < last_basic_block; ++i)
904 BITMAP_XFREE (control_dependence_map[i]);
905 free (control_dependence_map);
906
907 sbitmap_free (visited_control_parents);
908 sbitmap_free (last_stmt_necessary);
909 }
910
911 sbitmap_free (processed);
912 }
913 \f
914 /* Main routine to eliminate dead code.
915
916 AGGRESSIVE controls the aggressiveness of the algorithm.
917 In conservative mode, we ignore control dependence and simply declare
918 all but the most trivially dead branches necessary. This mode is fast.
919 In aggressive mode, control dependences are taken into account, which
920 results in more dead code elimination, but at the cost of some time.
921
922 FIXME: Aggressive mode before PRE doesn't work currently because
923 the dominance info is not invalidated after DCE1. This is
924 not an issue right now because we only run aggressive DCE
925 as the last tree SSA pass, but keep this in mind when you
926 start experimenting with pass ordering. */
927
928 static void
929 perform_tree_ssa_dce (bool aggressive)
930 {
931 struct edge_list *el = NULL;
932
933 tree_dce_init (aggressive);
934
935 if (aggressive)
936 {
937 /* Compute control dependence. */
938 timevar_push (TV_CONTROL_DEPENDENCES);
939 calculate_dominance_info (CDI_POST_DOMINATORS);
940 el = create_edge_list ();
941 find_all_control_dependences (el);
942 timevar_pop (TV_CONTROL_DEPENDENCES);
943
944 visited_control_parents = sbitmap_alloc (last_basic_block);
945 sbitmap_zero (visited_control_parents);
946
947 mark_dfs_back_edges ();
948 }
949
950 find_obviously_necessary_stmts (el);
951
952 propagate_necessity (el);
953
954 mark_really_necessary_kill_operand_phis ();
955 eliminate_unnecessary_stmts ();
956
957 if (aggressive)
958 free_dominance_info (CDI_POST_DOMINATORS);
959
960 /* Debugging dumps. */
961 if (dump_file)
962 print_stats ();
963
964 tree_dce_done (aggressive);
965
966 free_edge_list (el);
967 }
968
969 /* Pass entry points. */
970 static void
971 tree_ssa_dce (void)
972 {
973 perform_tree_ssa_dce (/*aggressive=*/false);
974 }
975
976 static void
977 tree_ssa_cd_dce (void)
978 {
979 perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
980 }
981
982 static bool
983 gate_dce (void)
984 {
985 return flag_tree_dce != 0;
986 }
987
988 struct tree_opt_pass pass_dce =
989 {
990 "dce", /* name */
991 gate_dce, /* gate */
992 tree_ssa_dce, /* execute */
993 NULL, /* sub */
994 NULL, /* next */
995 0, /* static_pass_number */
996 TV_TREE_DCE, /* tv_id */
997 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
998 0, /* properties_provided */
999 0, /* properties_destroyed */
1000 0, /* todo_flags_start */
1001 TODO_dump_func | TODO_fix_def_def_chains | TODO_cleanup_cfg | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
1002 0 /* letter */
1003 };
1004
1005 struct tree_opt_pass pass_cd_dce =
1006 {
1007 "cddce", /* name */
1008 gate_dce, /* gate */
1009 tree_ssa_cd_dce, /* execute */
1010 NULL, /* sub */
1011 NULL, /* next */
1012 0, /* static_pass_number */
1013 TV_TREE_CD_DCE, /* tv_id */
1014 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1015 0, /* properties_provided */
1016 0, /* properties_destroyed */
1017 0, /* todo_flags_start */
1018 TODO_dump_func | TODO_fix_def_def_chains | TODO_cleanup_cfg | TODO_ggc_collect | TODO_verify_ssa | TODO_verify_flow,
1019 /* todo_flags_finish */
1020 0 /* letter */
1021 };
1022