]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-dce.c
Remove trailing white spaces.
[thirdparty/gcc.git] / gcc / tree-ssa-dce.c
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Ben Elliston <bje@redhat.com>
5 and Andrew MacLeod <amacleod@redhat.com>
6 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by the
12 Free Software Foundation; either version 3, or (at your option) any
13 later version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 /* Dead code elimination.
25
26 References:
27
28 Building an Optimizing Compiler,
29 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30
31 Advanced Compiler Design and Implementation,
32 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33
34 Dead-code elimination is the removal of statements which have no
35 impact on the program's output. "Dead statements" have no impact
36 on the program's output, while "necessary statements" may have
37 impact on the output.
38
39 The algorithm consists of three phases:
40 1. Marking as necessary all statements known to be necessary,
41 e.g. most function calls, writing a value to memory, etc;
42 2. Propagating necessary statements, e.g., the statements
43 giving values to operands in necessary statements; and
44 3. Removing dead statements. */
45
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "ggc.h"
51
52 /* These RTL headers are needed for basic-block.h. */
53 #include "rtl.h"
54 #include "tm_p.h"
55 #include "hard-reg-set.h"
56 #include "obstack.h"
57 #include "basic-block.h"
58
59 #include "tree.h"
60 #include "diagnostic.h"
61 #include "tree-flow.h"
62 #include "gimple.h"
63 #include "tree-dump.h"
64 #include "tree-pass.h"
65 #include "timevar.h"
66 #include "flags.h"
67 #include "cfgloop.h"
68 #include "tree-scalar-evolution.h"
69
70 static struct stmt_stats
71 {
72 int total;
73 int total_phis;
74 int removed;
75 int removed_phis;
76 } stats;
77
78 #define STMT_NECESSARY GF_PLF_1
79
80 static VEC(gimple,heap) *worklist;
81
82 /* Vector indicating an SSA name has already been processed and marked
83 as necessary. */
84 static sbitmap processed;
85
86 /* Vector indicating that last_stmt if a basic block has already been
87 marked as necessary. */
88 static sbitmap last_stmt_necessary;
89
90 /* Vector indicating that BB contains statements that are live. */
91 static sbitmap bb_contains_live_stmts;
92
93 /* Before we can determine whether a control branch is dead, we need to
94 compute which blocks are control dependent on which edges.
95
96 We expect each block to be control dependent on very few edges so we
97 use a bitmap for each block recording its edges. An array holds the
98 bitmap. The Ith bit in the bitmap is set if that block is dependent
99 on the Ith edge. */
100 static bitmap *control_dependence_map;
101
102 /* Vector indicating that a basic block has already had all the edges
103 processed that it is control dependent on. */
104 static sbitmap visited_control_parents;
105
106 /* TRUE if this pass alters the CFG (by removing control statements).
107 FALSE otherwise.
108
109 If this pass alters the CFG, then it will arrange for the dominators
110 to be recomputed. */
111 static bool cfg_altered;
112
113 /* Execute code that follows the macro for each edge (given number
114 EDGE_NUMBER within the CODE) for which the block with index N is
115 control dependent. */
116 #define EXECUTE_IF_CONTROL_DEPENDENT(BI, N, EDGE_NUMBER) \
117 EXECUTE_IF_SET_IN_BITMAP (control_dependence_map[(N)], 0, \
118 (EDGE_NUMBER), (BI))
119
120
121 /* Indicate block BB is control dependent on an edge with index EDGE_INDEX. */
122 static inline void
123 set_control_dependence_map_bit (basic_block bb, int edge_index)
124 {
125 if (bb == ENTRY_BLOCK_PTR)
126 return;
127 gcc_assert (bb != EXIT_BLOCK_PTR);
128 bitmap_set_bit (control_dependence_map[bb->index], edge_index);
129 }
130
131 /* Clear all control dependences for block BB. */
132 static inline void
133 clear_control_dependence_bitmap (basic_block bb)
134 {
135 bitmap_clear (control_dependence_map[bb->index]);
136 }
137
138
139 /* Find the immediate postdominator PDOM of the specified basic block BLOCK.
140 This function is necessary because some blocks have negative numbers. */
141
142 static inline basic_block
143 find_pdom (basic_block block)
144 {
145 gcc_assert (block != ENTRY_BLOCK_PTR);
146
147 if (block == EXIT_BLOCK_PTR)
148 return EXIT_BLOCK_PTR;
149 else
150 {
151 basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
152 if (! bb)
153 return EXIT_BLOCK_PTR;
154 return bb;
155 }
156 }
157
158
159 /* Determine all blocks' control dependences on the given edge with edge_list
160 EL index EDGE_INDEX, ala Morgan, Section 3.6. */
161
162 static void
163 find_control_dependence (struct edge_list *el, int edge_index)
164 {
165 basic_block current_block;
166 basic_block ending_block;
167
168 gcc_assert (INDEX_EDGE_PRED_BB (el, edge_index) != EXIT_BLOCK_PTR);
169
170 if (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR)
171 ending_block = single_succ (ENTRY_BLOCK_PTR);
172 else
173 ending_block = find_pdom (INDEX_EDGE_PRED_BB (el, edge_index));
174
175 for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index);
176 current_block != ending_block && current_block != EXIT_BLOCK_PTR;
177 current_block = find_pdom (current_block))
178 {
179 edge e = INDEX_EDGE (el, edge_index);
180
181 /* For abnormal edges, we don't make current_block control
182 dependent because instructions that throw are always necessary
183 anyway. */
184 if (e->flags & EDGE_ABNORMAL)
185 continue;
186
187 set_control_dependence_map_bit (current_block, edge_index);
188 }
189 }
190
191
192 /* Record all blocks' control dependences on all edges in the edge
193 list EL, ala Morgan, Section 3.6. */
194
195 static void
196 find_all_control_dependences (struct edge_list *el)
197 {
198 int i;
199
200 for (i = 0; i < NUM_EDGES (el); ++i)
201 find_control_dependence (el, i);
202 }
203
204 /* If STMT is not already marked necessary, mark it, and add it to the
205 worklist if ADD_TO_WORKLIST is true. */
206 static inline void
207 mark_stmt_necessary (gimple stmt, bool add_to_worklist)
208 {
209 gcc_assert (stmt);
210
211 if (gimple_plf (stmt, STMT_NECESSARY))
212 return;
213
214 if (dump_file && (dump_flags & TDF_DETAILS))
215 {
216 fprintf (dump_file, "Marking useful stmt: ");
217 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
218 fprintf (dump_file, "\n");
219 }
220
221 gimple_set_plf (stmt, STMT_NECESSARY, true);
222 if (add_to_worklist)
223 VEC_safe_push (gimple, heap, worklist, stmt);
224 if (bb_contains_live_stmts && !is_gimple_debug (stmt))
225 SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index);
226 }
227
228
229 /* Mark the statement defining operand OP as necessary. */
230
231 static inline void
232 mark_operand_necessary (tree op)
233 {
234 gimple stmt;
235 int ver;
236
237 gcc_assert (op);
238
239 ver = SSA_NAME_VERSION (op);
240 if (TEST_BIT (processed, ver))
241 {
242 stmt = SSA_NAME_DEF_STMT (op);
243 gcc_assert (gimple_nop_p (stmt)
244 || gimple_plf (stmt, STMT_NECESSARY));
245 return;
246 }
247 SET_BIT (processed, ver);
248
249 stmt = SSA_NAME_DEF_STMT (op);
250 gcc_assert (stmt);
251
252 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
253 return;
254
255 if (dump_file && (dump_flags & TDF_DETAILS))
256 {
257 fprintf (dump_file, "marking necessary through ");
258 print_generic_expr (dump_file, op, 0);
259 fprintf (dump_file, " stmt ");
260 print_gimple_stmt (dump_file, stmt, 0, 0);
261 }
262
263 gimple_set_plf (stmt, STMT_NECESSARY, true);
264 if (bb_contains_live_stmts)
265 SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index);
266 VEC_safe_push (gimple, heap, worklist, stmt);
267 }
268
269
270 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
271 it can make other statements necessary.
272
273 If AGGRESSIVE is false, control statements are conservatively marked as
274 necessary. */
275
276 static void
277 mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
278 {
279 tree lhs = NULL_TREE;
280 /* With non-call exceptions, we have to assume that all statements could
281 throw. If a statement may throw, it is inherently necessary. */
282 if (flag_non_call_exceptions
283 && stmt_could_throw_p (stmt))
284 {
285 mark_stmt_necessary (stmt, true);
286 return;
287 }
288
289 /* Statements that are implicitly live. Most function calls, asm
290 and return statements are required. Labels and GIMPLE_BIND nodes
291 are kept because they are control flow, and we have no way of
292 knowing whether they can be removed. DCE can eliminate all the
293 other statements in a block, and CFG can then remove the block
294 and labels. */
295 switch (gimple_code (stmt))
296 {
297 case GIMPLE_PREDICT:
298 case GIMPLE_LABEL:
299 mark_stmt_necessary (stmt, false);
300 return;
301
302 case GIMPLE_ASM:
303 case GIMPLE_RESX:
304 case GIMPLE_RETURN:
305 mark_stmt_necessary (stmt, true);
306 return;
307
308 case GIMPLE_CALL:
309 /* Most, but not all function calls are required. Function calls that
310 produce no result and have no side effects (i.e. const pure
311 functions) are unnecessary. */
312 if (gimple_has_side_effects (stmt))
313 {
314 mark_stmt_necessary (stmt, true);
315 return;
316 }
317 if (!gimple_call_lhs (stmt))
318 return;
319 lhs = gimple_call_lhs (stmt);
320 /* Fall through */
321
322 case GIMPLE_ASSIGN:
323 if (!lhs)
324 lhs = gimple_assign_lhs (stmt);
325 break;
326
327 case GIMPLE_DEBUG:
328 /* Debug temps without a value are not useful. ??? If we could
329 easily locate the debug temp bind stmt for a use thereof,
330 would could refrain from marking all debug temps here, and
331 mark them only if they're used. */
332 if (gimple_debug_bind_has_value_p (stmt)
333 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
334 mark_stmt_necessary (stmt, false);
335 return;
336
337 case GIMPLE_GOTO:
338 gcc_assert (!simple_goto_p (stmt));
339 mark_stmt_necessary (stmt, true);
340 return;
341
342 case GIMPLE_COND:
343 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
344 /* Fall through. */
345
346 case GIMPLE_SWITCH:
347 if (! aggressive)
348 mark_stmt_necessary (stmt, true);
349 break;
350
351 default:
352 break;
353 }
354
355 /* If the statement has volatile operands, it needs to be preserved.
356 Same for statements that can alter control flow in unpredictable
357 ways. */
358 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
359 {
360 mark_stmt_necessary (stmt, true);
361 return;
362 }
363
364 if (is_hidden_global_store (stmt))
365 {
366 mark_stmt_necessary (stmt, true);
367 return;
368 }
369
370 return;
371 }
372
373
374 /* Make corresponding control dependent edges necessary. We only
375 have to do this once for each basic block, so we clear the bitmap
376 after we're done. */
377 static void
378 mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el)
379 {
380 bitmap_iterator bi;
381 unsigned edge_number;
382
383 gcc_assert (bb != EXIT_BLOCK_PTR);
384
385 if (bb == ENTRY_BLOCK_PTR)
386 return;
387
388 EXECUTE_IF_CONTROL_DEPENDENT (bi, bb->index, edge_number)
389 {
390 gimple stmt;
391 basic_block cd_bb = INDEX_EDGE_PRED_BB (el, edge_number);
392
393 if (TEST_BIT (last_stmt_necessary, cd_bb->index))
394 continue;
395 SET_BIT (last_stmt_necessary, cd_bb->index);
396 SET_BIT (bb_contains_live_stmts, cd_bb->index);
397
398 stmt = last_stmt (cd_bb);
399 if (stmt && is_ctrl_stmt (stmt))
400 mark_stmt_necessary (stmt, true);
401 }
402 }
403
404
405 /* Find obviously necessary statements. These are things like most function
406 calls, and stores to file level variables.
407
408 If EL is NULL, control statements are conservatively marked as
409 necessary. Otherwise it contains the list of edges used by control
410 dependence analysis. */
411
412 static void
413 find_obviously_necessary_stmts (struct edge_list *el)
414 {
415 basic_block bb;
416 gimple_stmt_iterator gsi;
417 edge e;
418 gimple phi, stmt;
419
420 FOR_EACH_BB (bb)
421 {
422 /* PHI nodes are never inherently necessary. */
423 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
424 {
425 phi = gsi_stmt (gsi);
426 gimple_set_plf (phi, STMT_NECESSARY, false);
427 }
428
429 /* Check all statements in the block. */
430 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
431 {
432 stmt = gsi_stmt (gsi);
433 gimple_set_plf (stmt, STMT_NECESSARY, false);
434 mark_stmt_if_obviously_necessary (stmt, el != NULL);
435 }
436 }
437
438 /* Pure and const functions are finite and thus have no infinite loops in
439 them. */
440 if ((TREE_READONLY (current_function_decl)
441 || DECL_PURE_P (current_function_decl))
442 && !DECL_LOOPING_CONST_OR_PURE_P (current_function_decl))
443 return;
444
445 /* Prevent the empty possibly infinite loops from being removed. */
446 if (el)
447 {
448 loop_iterator li;
449 struct loop *loop;
450 scev_initialize ();
451 if (mark_irreducible_loops ())
452 FOR_EACH_BB (bb)
453 {
454 edge_iterator ei;
455 FOR_EACH_EDGE (e, ei, bb->succs)
456 if ((e->flags & EDGE_DFS_BACK)
457 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
458 {
459 if (dump_file)
460 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
461 e->src->index, e->dest->index);
462 mark_control_dependent_edges_necessary (e->dest, el);
463 }
464 }
465
466 FOR_EACH_LOOP (li, loop, 0)
467 if (!finite_loop_p (loop))
468 {
469 if (dump_file)
470 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
471 mark_control_dependent_edges_necessary (loop->latch, el);
472 }
473 scev_finalize ();
474 }
475 }
476
477
478 /* Return true if REF is based on an aliased base, otherwise false. */
479
480 static bool
481 ref_may_be_aliased (tree ref)
482 {
483 while (handled_component_p (ref))
484 ref = TREE_OPERAND (ref, 0);
485 return !(DECL_P (ref)
486 && !may_be_aliased (ref));
487 }
488
489 static bitmap visited = NULL;
490 static unsigned int longest_chain = 0;
491 static unsigned int total_chain = 0;
492 static bool chain_ovfl = false;
493
494 /* Worker for the walker that marks reaching definitions of REF,
495 which is based on a non-aliased decl, necessary. It returns
496 true whenever the defining statement of the current VDEF is
497 a kill for REF, as no dominating may-defs are necessary for REF
498 anymore. DATA points to the basic-block that contains the
499 stmt that refers to REF. */
500
501 static bool
502 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
503 {
504 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
505
506 /* All stmts we visit are necessary. */
507 mark_operand_necessary (vdef);
508
509 /* If the stmt lhs kills ref, then we can stop walking. */
510 if (gimple_has_lhs (def_stmt)
511 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME)
512 {
513 tree base, lhs = gimple_get_lhs (def_stmt);
514 HOST_WIDE_INT size, offset, max_size;
515 ao_ref_base (ref);
516 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
517 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
518 so base == refd->base does not always hold. */
519 if (base == ref->base)
520 {
521 /* For a must-alias check we need to be able to constrain
522 the accesses properly. */
523 if (size != -1 && size == max_size
524 && ref->max_size != -1)
525 {
526 if (offset <= ref->offset
527 && offset + size >= ref->offset + ref->max_size)
528 return true;
529 }
530 /* Or they need to be exactly the same. */
531 else if (ref->ref
532 /* Make sure there is no induction variable involved
533 in the references (gcc.c-torture/execute/pr42142.c).
534 The simplest way is to check if the kill dominates
535 the use. */
536 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
537 gimple_bb (def_stmt))
538 && operand_equal_p (ref->ref, lhs, 0))
539 return true;
540 }
541 }
542
543 /* Otherwise keep walking. */
544 return false;
545 }
546
547 static void
548 mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
549 {
550 unsigned int chain;
551 ao_ref refd;
552 gcc_assert (!chain_ovfl);
553 ao_ref_init (&refd, ref);
554 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
555 mark_aliased_reaching_defs_necessary_1,
556 gimple_bb (stmt), NULL);
557 if (chain > longest_chain)
558 longest_chain = chain;
559 total_chain += chain;
560 }
561
562 /* Worker for the walker that marks reaching definitions of REF, which
563 is not based on a non-aliased decl. For simplicity we need to end
564 up marking all may-defs necessary that are not based on a non-aliased
565 decl. The only job of this walker is to skip may-defs based on
566 a non-aliased decl. */
567
568 static bool
569 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
570 tree vdef, void *data ATTRIBUTE_UNUSED)
571 {
572 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
573
574 /* We have to skip already visited (and thus necessary) statements
575 to make the chaining work after we dropped back to simple mode. */
576 if (chain_ovfl
577 && TEST_BIT (processed, SSA_NAME_VERSION (vdef)))
578 {
579 gcc_assert (gimple_nop_p (def_stmt)
580 || gimple_plf (def_stmt, STMT_NECESSARY));
581 return false;
582 }
583
584 /* We want to skip stores to non-aliased variables. */
585 if (!chain_ovfl
586 && gimple_assign_single_p (def_stmt))
587 {
588 tree lhs = gimple_assign_lhs (def_stmt);
589 if (!ref_may_be_aliased (lhs))
590 return false;
591 }
592
593 mark_operand_necessary (vdef);
594
595 return false;
596 }
597
598 static void
599 mark_all_reaching_defs_necessary (gimple stmt)
600 {
601 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
602 mark_all_reaching_defs_necessary_1, NULL, &visited);
603 }
604
605 /* Return true for PHI nodes with one or identical arguments
606 can be removed. */
607 static bool
608 degenerate_phi_p (gimple phi)
609 {
610 unsigned int i;
611 tree op = gimple_phi_arg_def (phi, 0);
612 for (i = 1; i < gimple_phi_num_args (phi); i++)
613 if (gimple_phi_arg_def (phi, i) != op)
614 return false;
615 return true;
616 }
617
618 /* Propagate necessity using the operands of necessary statements.
619 Process the uses on each statement in the worklist, and add all
620 feeding statements which contribute to the calculation of this
621 value to the worklist.
622
623 In conservative mode, EL is NULL. */
624
625 static void
626 propagate_necessity (struct edge_list *el)
627 {
628 gimple stmt;
629 bool aggressive = (el ? true : false);
630
631 if (dump_file && (dump_flags & TDF_DETAILS))
632 fprintf (dump_file, "\nProcessing worklist:\n");
633
634 while (VEC_length (gimple, worklist) > 0)
635 {
636 /* Take STMT from worklist. */
637 stmt = VEC_pop (gimple, worklist);
638
639 if (dump_file && (dump_flags & TDF_DETAILS))
640 {
641 fprintf (dump_file, "processing: ");
642 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
643 fprintf (dump_file, "\n");
644 }
645
646 if (aggressive)
647 {
648 /* Mark the last statements of the basic blocks that the block
649 containing STMT is control dependent on, but only if we haven't
650 already done so. */
651 basic_block bb = gimple_bb (stmt);
652 if (bb != ENTRY_BLOCK_PTR
653 && ! TEST_BIT (visited_control_parents, bb->index))
654 {
655 SET_BIT (visited_control_parents, bb->index);
656 mark_control_dependent_edges_necessary (bb, el);
657 }
658 }
659
660 if (gimple_code (stmt) == GIMPLE_PHI
661 /* We do not process virtual PHI nodes nor do we track their
662 necessity. */
663 && is_gimple_reg (gimple_phi_result (stmt)))
664 {
665 /* PHI nodes are somewhat special in that each PHI alternative has
666 data and control dependencies. All the statements feeding the
667 PHI node's arguments are always necessary. In aggressive mode,
668 we also consider the control dependent edges leading to the
669 predecessor block associated with each PHI alternative as
670 necessary. */
671 size_t k;
672
673 for (k = 0; k < gimple_phi_num_args (stmt); k++)
674 {
675 tree arg = PHI_ARG_DEF (stmt, k);
676 if (TREE_CODE (arg) == SSA_NAME)
677 mark_operand_necessary (arg);
678 }
679
680 if (aggressive && !degenerate_phi_p (stmt))
681 {
682 for (k = 0; k < gimple_phi_num_args (stmt); k++)
683 {
684 basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src;
685 if (arg_bb != ENTRY_BLOCK_PTR
686 && ! TEST_BIT (visited_control_parents, arg_bb->index))
687 {
688 SET_BIT (visited_control_parents, arg_bb->index);
689 mark_control_dependent_edges_necessary (arg_bb, el);
690 }
691 }
692 }
693 }
694 else
695 {
696 /* Propagate through the operands. Examine all the USE, VUSE and
697 VDEF operands in this statement. Mark all the statements
698 which feed this statement's uses as necessary. */
699 ssa_op_iter iter;
700 tree use;
701
702 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
703 mark_operand_necessary (use);
704
705 use = gimple_vuse (stmt);
706 if (!use)
707 continue;
708
709 /* If we dropped to simple mode make all immediately
710 reachable definitions necessary. */
711 if (chain_ovfl)
712 {
713 mark_all_reaching_defs_necessary (stmt);
714 continue;
715 }
716
717 /* For statements that may load from memory (have a VUSE) we
718 have to mark all reaching (may-)definitions as necessary.
719 We partition this task into two cases:
720 1) explicit loads based on decls that are not aliased
721 2) implicit loads (like calls) and explicit loads not
722 based on decls that are not aliased (like indirect
723 references or loads from globals)
724 For 1) we mark all reaching may-defs as necessary, stopping
725 at dominating kills. For 2) we want to mark all dominating
726 references necessary, but non-aliased ones which we handle
727 in 1). By keeping a global visited bitmap for references
728 we walk for 2) we avoid quadratic behavior for those. */
729
730 if (is_gimple_call (stmt))
731 {
732 tree callee = gimple_call_fndecl (stmt);
733 unsigned i;
734
735 /* Calls to functions that are merely acting as barriers
736 or that only store to memory do not make any previous
737 stores necessary. */
738 if (callee != NULL_TREE
739 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
740 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
741 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
742 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE))
743 continue;
744
745 /* Calls implicitly load from memory, their arguments
746 in addition may explicitly perform memory loads. */
747 mark_all_reaching_defs_necessary (stmt);
748 for (i = 0; i < gimple_call_num_args (stmt); ++i)
749 {
750 tree arg = gimple_call_arg (stmt, i);
751 if (TREE_CODE (arg) == SSA_NAME
752 || is_gimple_min_invariant (arg))
753 continue;
754 if (!ref_may_be_aliased (arg))
755 mark_aliased_reaching_defs_necessary (stmt, arg);
756 }
757 }
758 else if (gimple_assign_single_p (stmt))
759 {
760 tree rhs;
761 bool rhs_aliased = false;
762 /* If this is a load mark things necessary. */
763 rhs = gimple_assign_rhs1 (stmt);
764 if (TREE_CODE (rhs) != SSA_NAME
765 && !is_gimple_min_invariant (rhs))
766 {
767 if (!ref_may_be_aliased (rhs))
768 mark_aliased_reaching_defs_necessary (stmt, rhs);
769 else
770 rhs_aliased = true;
771 }
772 if (rhs_aliased)
773 mark_all_reaching_defs_necessary (stmt);
774 }
775 else if (gimple_code (stmt) == GIMPLE_RETURN)
776 {
777 tree rhs = gimple_return_retval (stmt);
778 /* A return statement may perform a load. */
779 if (TREE_CODE (rhs) != SSA_NAME
780 && !is_gimple_min_invariant (rhs))
781 {
782 if (!ref_may_be_aliased (rhs))
783 mark_aliased_reaching_defs_necessary (stmt, rhs);
784 else
785 mark_all_reaching_defs_necessary (stmt);
786 }
787 }
788 else if (gimple_code (stmt) == GIMPLE_ASM)
789 {
790 unsigned i;
791 mark_all_reaching_defs_necessary (stmt);
792 /* Inputs may perform loads. */
793 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
794 {
795 tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
796 if (TREE_CODE (op) != SSA_NAME
797 && !is_gimple_min_invariant (op)
798 && !ref_may_be_aliased (op))
799 mark_aliased_reaching_defs_necessary (stmt, op);
800 }
801 }
802 else
803 gcc_unreachable ();
804
805 /* If we over-used our alias oracle budget drop to simple
806 mode. The cost metric allows quadratic behavior up to
807 a constant maximal chain and after that falls back to
808 super-linear complexity. */
809 if (longest_chain > 256
810 && total_chain > 256 * longest_chain)
811 {
812 chain_ovfl = true;
813 if (visited)
814 bitmap_clear (visited);
815 }
816 }
817 }
818 }
819
820 /* Replace all uses of result of PHI by underlying variable and mark it
821 for renaming. */
822
823 void
824 mark_virtual_phi_result_for_renaming (gimple phi)
825 {
826 bool used = false;
827 imm_use_iterator iter;
828 use_operand_p use_p;
829 gimple stmt;
830 tree result_ssa, result_var;
831
832 if (dump_file && (dump_flags & TDF_DETAILS))
833 {
834 fprintf (dump_file, "Marking result for renaming : ");
835 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
836 fprintf (dump_file, "\n");
837 }
838
839 result_ssa = gimple_phi_result (phi);
840 result_var = SSA_NAME_VAR (result_ssa);
841 FOR_EACH_IMM_USE_STMT (stmt, iter, result_ssa)
842 {
843 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
844 SET_USE (use_p, result_var);
845 update_stmt (stmt);
846 used = true;
847 }
848 if (used)
849 mark_sym_for_renaming (result_var);
850 }
851
852 /* Remove dead PHI nodes from block BB. */
853
854 static bool
855 remove_dead_phis (basic_block bb)
856 {
857 bool something_changed = false;
858 gimple_seq phis;
859 gimple phi;
860 gimple_stmt_iterator gsi;
861 phis = phi_nodes (bb);
862
863 for (gsi = gsi_start (phis); !gsi_end_p (gsi);)
864 {
865 stats.total_phis++;
866 phi = gsi_stmt (gsi);
867
868 /* We do not track necessity of virtual PHI nodes. Instead do
869 very simple dead PHI removal here. */
870 if (!is_gimple_reg (gimple_phi_result (phi)))
871 {
872 /* Virtual PHI nodes with one or identical arguments
873 can be removed. */
874 if (degenerate_phi_p (phi))
875 {
876 tree vdef = gimple_phi_result (phi);
877 tree vuse = gimple_phi_arg_def (phi, 0);
878
879 use_operand_p use_p;
880 imm_use_iterator iter;
881 gimple use_stmt;
882 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
883 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
884 SET_USE (use_p, vuse);
885 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
886 && TREE_CODE (vuse) == SSA_NAME)
887 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
888 }
889 else
890 gimple_set_plf (phi, STMT_NECESSARY, true);
891 }
892
893 if (!gimple_plf (phi, STMT_NECESSARY))
894 {
895 something_changed = true;
896 if (dump_file && (dump_flags & TDF_DETAILS))
897 {
898 fprintf (dump_file, "Deleting : ");
899 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
900 fprintf (dump_file, "\n");
901 }
902
903 remove_phi_node (&gsi, true);
904 stats.removed_phis++;
905 continue;
906 }
907
908 gsi_next (&gsi);
909 }
910 return something_changed;
911 }
912
913 /* Find first live post dominator of BB. */
914
915 static basic_block
916 get_live_post_dom (basic_block bb)
917 {
918 basic_block post_dom_bb;
919
920
921 /* The post dominance info has to be up-to-date. */
922 gcc_assert (dom_info_state (CDI_POST_DOMINATORS) == DOM_OK);
923
924 /* Get the immediate post dominator of bb. */
925 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
926 /* And look for first live one. */
927 while (post_dom_bb != EXIT_BLOCK_PTR
928 && !TEST_BIT (bb_contains_live_stmts, post_dom_bb->index))
929 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, post_dom_bb);
930
931 return post_dom_bb;
932 }
933
934 /* Forward edge E to respective POST_DOM_BB and update PHIs. */
935
936 static edge
937 forward_edge_to_pdom (edge e, basic_block post_dom_bb)
938 {
939 gimple_stmt_iterator gsi;
940 edge e2 = NULL;
941 edge_iterator ei;
942
943 if (dump_file && (dump_flags & TDF_DETAILS))
944 fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
945 e->dest->index, post_dom_bb->index);
946
947 e2 = redirect_edge_and_branch (e, post_dom_bb);
948 cfg_altered = true;
949
950 /* If edge was already around, no updating is neccesary. */
951 if (e2 != e)
952 return e2;
953
954 if (phi_nodes (post_dom_bb))
955 {
956 /* We are sure that for every live PHI we are seeing control dependent BB.
957 This means that we can look up the end of control dependent path leading
958 to the PHI itself. */
959 FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
960 if (e2 != e && dominated_by_p (CDI_POST_DOMINATORS, e->src, e2->src))
961 break;
962 for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
963 {
964 gimple phi = gsi_stmt (gsi);
965 tree op;
966 source_location locus;
967
968 /* Dead PHI do not imply control dependency. */
969 if (!gimple_plf (phi, STMT_NECESSARY)
970 && is_gimple_reg (gimple_phi_result (phi)))
971 {
972 gsi_next (&gsi);
973 continue;
974 }
975 if (gimple_phi_arg_def (phi, e->dest_idx))
976 {
977 gsi_next (&gsi);
978 continue;
979 }
980
981 /* We didn't find edge to update. This can happen for PHIs on virtuals
982 since there is no control dependency relation on them. We are lost
983 here and must force renaming of the symbol. */
984 if (!is_gimple_reg (gimple_phi_result (phi)))
985 {
986 mark_virtual_phi_result_for_renaming (phi);
987 remove_phi_node (&gsi, true);
988 continue;
989 }
990 if (!e2)
991 {
992 op = gimple_phi_arg_def (phi, e->dest_idx == 0 ? 1 : 0);
993 locus = gimple_phi_arg_location (phi, e->dest_idx == 0 ? 1 : 0);
994 }
995 else
996 {
997 op = gimple_phi_arg_def (phi, e2->dest_idx);
998 locus = gimple_phi_arg_location (phi, e2->dest_idx);
999 }
1000 add_phi_arg (phi, op, e, locus);
1001 gcc_assert (e2 || degenerate_phi_p (phi));
1002 gsi_next (&gsi);
1003 }
1004 }
1005 return e;
1006 }
1007
1008 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1009 containing I so that we don't have to look it up. */
1010
1011 static void
1012 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1013 {
1014 gimple stmt = gsi_stmt (*i);
1015
1016 if (dump_file && (dump_flags & TDF_DETAILS))
1017 {
1018 fprintf (dump_file, "Deleting : ");
1019 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1020 fprintf (dump_file, "\n");
1021 }
1022
1023 stats.removed++;
1024
1025 /* If we have determined that a conditional branch statement contributes
1026 nothing to the program, then we not only remove it, but we also change
1027 the flow graph so that the current block will simply fall-thru to its
1028 immediate post-dominator. The blocks we are circumventing will be
1029 removed by cleanup_tree_cfg if this change in the flow graph makes them
1030 unreachable. */
1031 if (is_ctrl_stmt (stmt))
1032 {
1033 basic_block post_dom_bb;
1034 edge e, e2;
1035 edge_iterator ei;
1036
1037 post_dom_bb = get_live_post_dom (bb);
1038
1039 e = find_edge (bb, post_dom_bb);
1040
1041 /* If edge is already there, try to use it. This avoids need to update
1042 PHI nodes. Also watch for cases where post dominator does not exists
1043 or is exit block. These can happen for infinite loops as we create
1044 fake edges in the dominator tree. */
1045 if (e)
1046 ;
1047 else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR)
1048 e = EDGE_SUCC (bb, 0);
1049 else
1050 e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1051 gcc_assert (e);
1052 e->probability = REG_BR_PROB_BASE;
1053 e->count = bb->count;
1054
1055 /* The edge is no longer associated with a conditional, so it does
1056 not have TRUE/FALSE flags. */
1057 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1058
1059 /* The lone outgoing edge from BB will be a fallthru edge. */
1060 e->flags |= EDGE_FALLTHRU;
1061
1062 /* Remove the remaining outgoing edges. */
1063 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1064 if (e != e2)
1065 {
1066 cfg_altered = true;
1067 remove_edge (e2);
1068 }
1069 else
1070 ei_next (&ei);
1071 }
1072
1073 unlink_stmt_vdef (stmt);
1074 gsi_remove (i, true);
1075 release_defs (stmt);
1076 }
1077
1078 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1079 contributes nothing to the program, and can be deleted. */
1080
1081 static bool
1082 eliminate_unnecessary_stmts (void)
1083 {
1084 bool something_changed = false;
1085 basic_block bb;
1086 gimple_stmt_iterator gsi, psi;
1087 gimple stmt;
1088 tree call;
1089 VEC (basic_block, heap) *h;
1090
1091 if (dump_file && (dump_flags & TDF_DETAILS))
1092 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1093
1094 clear_special_calls ();
1095
1096 /* Walking basic blocks and statements in reverse order avoids
1097 releasing SSA names before any other DEFs that refer to them are
1098 released. This helps avoid loss of debug information, as we get
1099 a chance to propagate all RHSs of removed SSAs into debug uses,
1100 rather than only the latest ones. E.g., consider:
1101
1102 x_3 = y_1 + z_2;
1103 a_5 = x_3 - b_4;
1104 # DEBUG a => a_5
1105
1106 If we were to release x_3 before a_5, when we reached a_5 and
1107 tried to substitute it into the debug stmt, we'd see x_3 there,
1108 but x_3's DEF, type, etc would have already been disconnected.
1109 By going backwards, the debug stmt first changes to:
1110
1111 # DEBUG a => x_3 - b_4
1112
1113 and then to:
1114
1115 # DEBUG a => y_1 + z_2 - b_4
1116
1117 as desired. */
1118 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1119 h = get_all_dominated_blocks (CDI_DOMINATORS, single_succ (ENTRY_BLOCK_PTR));
1120
1121 while (VEC_length (basic_block, h))
1122 {
1123 bb = VEC_pop (basic_block, h);
1124
1125 /* Remove dead statements. */
1126 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1127 {
1128 stmt = gsi_stmt (gsi);
1129
1130 psi = gsi;
1131 gsi_prev (&psi);
1132
1133 stats.total++;
1134
1135 /* If GSI is not necessary then remove it. */
1136 if (!gimple_plf (stmt, STMT_NECESSARY))
1137 {
1138 if (!is_gimple_debug (stmt))
1139 something_changed = true;
1140 remove_dead_stmt (&gsi, bb);
1141 }
1142 else if (is_gimple_call (stmt))
1143 {
1144 call = gimple_call_fndecl (stmt);
1145 if (call)
1146 {
1147 tree name;
1148
1149 /* When LHS of var = call (); is dead, simplify it into
1150 call (); saving one operand. */
1151 name = gimple_call_lhs (stmt);
1152 if (name && TREE_CODE (name) == SSA_NAME
1153 && !TEST_BIT (processed, SSA_NAME_VERSION (name)))
1154 {
1155 something_changed = true;
1156 if (dump_file && (dump_flags & TDF_DETAILS))
1157 {
1158 fprintf (dump_file, "Deleting LHS of call: ");
1159 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1160 fprintf (dump_file, "\n");
1161 }
1162
1163 gimple_call_set_lhs (stmt, NULL_TREE);
1164 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1165 update_stmt (stmt);
1166 release_ssa_name (name);
1167 }
1168 notice_special_calls (stmt);
1169 }
1170 }
1171 }
1172 }
1173
1174 VEC_free (basic_block, heap, h);
1175
1176 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1177 rendered some PHI nodes unreachable while they are still in use.
1178 Mark them for renaming. */
1179 if (cfg_altered)
1180 {
1181 basic_block prev_bb;
1182
1183 find_unreachable_blocks ();
1184
1185 /* Delete all unreachable basic blocks in reverse dominator order. */
1186 for (bb = EXIT_BLOCK_PTR->prev_bb; bb != ENTRY_BLOCK_PTR; bb = prev_bb)
1187 {
1188 prev_bb = bb->prev_bb;
1189
1190 if (!TEST_BIT (bb_contains_live_stmts, bb->index)
1191 || !(bb->flags & BB_REACHABLE))
1192 {
1193 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1194 if (!is_gimple_reg (gimple_phi_result (gsi_stmt (gsi))))
1195 {
1196 bool found = false;
1197 imm_use_iterator iter;
1198
1199 FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (gsi_stmt (gsi)))
1200 {
1201 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1202 continue;
1203 if (gimple_code (stmt) == GIMPLE_PHI
1204 || gimple_plf (stmt, STMT_NECESSARY))
1205 {
1206 found = true;
1207 BREAK_FROM_IMM_USE_STMT (iter);
1208 }
1209 }
1210 if (found)
1211 mark_virtual_phi_result_for_renaming (gsi_stmt (gsi));
1212 }
1213
1214 if (!(bb->flags & BB_REACHABLE))
1215 {
1216 /* Speed up the removal of blocks that don't
1217 dominate others. Walking backwards, this should
1218 be the common case. ??? Do we need to recompute
1219 dominators because of cfg_altered? */
1220 if (!MAY_HAVE_DEBUG_STMTS
1221 || !first_dom_son (CDI_DOMINATORS, bb))
1222 delete_basic_block (bb);
1223 else
1224 {
1225 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1226
1227 while (VEC_length (basic_block, h))
1228 {
1229 bb = VEC_pop (basic_block, h);
1230 prev_bb = bb->prev_bb;
1231 /* Rearrangements to the CFG may have failed
1232 to update the dominators tree, so that
1233 formerly-dominated blocks are now
1234 otherwise reachable. */
1235 if (!!(bb->flags & BB_REACHABLE))
1236 continue;
1237 delete_basic_block (bb);
1238 }
1239
1240 VEC_free (basic_block, heap, h);
1241 }
1242 }
1243 }
1244 }
1245 }
1246 FOR_EACH_BB (bb)
1247 {
1248 /* Remove dead PHI nodes. */
1249 something_changed |= remove_dead_phis (bb);
1250 }
1251
1252 return something_changed;
1253 }
1254
1255
1256 /* Print out removed statement statistics. */
1257
1258 static void
1259 print_stats (void)
1260 {
1261 float percg;
1262
1263 percg = ((float) stats.removed / (float) stats.total) * 100;
1264 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1265 stats.removed, stats.total, (int) percg);
1266
1267 if (stats.total_phis == 0)
1268 percg = 0;
1269 else
1270 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1271
1272 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1273 stats.removed_phis, stats.total_phis, (int) percg);
1274 }
1275
1276 /* Initialization for this pass. Set up the used data structures. */
1277
1278 static void
1279 tree_dce_init (bool aggressive)
1280 {
1281 memset ((void *) &stats, 0, sizeof (stats));
1282
1283 if (aggressive)
1284 {
1285 int i;
1286
1287 control_dependence_map = XNEWVEC (bitmap, last_basic_block);
1288 for (i = 0; i < last_basic_block; ++i)
1289 control_dependence_map[i] = BITMAP_ALLOC (NULL);
1290
1291 last_stmt_necessary = sbitmap_alloc (last_basic_block);
1292 sbitmap_zero (last_stmt_necessary);
1293 bb_contains_live_stmts = sbitmap_alloc (last_basic_block);
1294 sbitmap_zero (bb_contains_live_stmts);
1295 }
1296
1297 processed = sbitmap_alloc (num_ssa_names + 1);
1298 sbitmap_zero (processed);
1299
1300 worklist = VEC_alloc (gimple, heap, 64);
1301 cfg_altered = false;
1302 }
1303
1304 /* Cleanup after this pass. */
1305
1306 static void
1307 tree_dce_done (bool aggressive)
1308 {
1309 if (aggressive)
1310 {
1311 int i;
1312
1313 for (i = 0; i < last_basic_block; ++i)
1314 BITMAP_FREE (control_dependence_map[i]);
1315 free (control_dependence_map);
1316
1317 sbitmap_free (visited_control_parents);
1318 sbitmap_free (last_stmt_necessary);
1319 sbitmap_free (bb_contains_live_stmts);
1320 bb_contains_live_stmts = NULL;
1321 }
1322
1323 sbitmap_free (processed);
1324
1325 VEC_free (gimple, heap, worklist);
1326 }
1327
1328 /* Main routine to eliminate dead code.
1329
1330 AGGRESSIVE controls the aggressiveness of the algorithm.
1331 In conservative mode, we ignore control dependence and simply declare
1332 all but the most trivially dead branches necessary. This mode is fast.
1333 In aggressive mode, control dependences are taken into account, which
1334 results in more dead code elimination, but at the cost of some time.
1335
1336 FIXME: Aggressive mode before PRE doesn't work currently because
1337 the dominance info is not invalidated after DCE1. This is
1338 not an issue right now because we only run aggressive DCE
1339 as the last tree SSA pass, but keep this in mind when you
1340 start experimenting with pass ordering. */
1341
1342 static unsigned int
1343 perform_tree_ssa_dce (bool aggressive)
1344 {
1345 struct edge_list *el = NULL;
1346 bool something_changed = 0;
1347
1348 /* Preheaders are needed for SCEV to work.
1349 Simple lateches and recorded exits improve chances that loop will
1350 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1351 if (aggressive)
1352 loop_optimizer_init (LOOPS_NORMAL
1353 | LOOPS_HAVE_RECORDED_EXITS);
1354
1355 tree_dce_init (aggressive);
1356
1357 if (aggressive)
1358 {
1359 /* Compute control dependence. */
1360 timevar_push (TV_CONTROL_DEPENDENCES);
1361 calculate_dominance_info (CDI_POST_DOMINATORS);
1362 el = create_edge_list ();
1363 find_all_control_dependences (el);
1364 timevar_pop (TV_CONTROL_DEPENDENCES);
1365
1366 visited_control_parents = sbitmap_alloc (last_basic_block);
1367 sbitmap_zero (visited_control_parents);
1368
1369 mark_dfs_back_edges ();
1370 }
1371
1372 find_obviously_necessary_stmts (el);
1373
1374 if (aggressive)
1375 loop_optimizer_finalize ();
1376
1377 longest_chain = 0;
1378 total_chain = 0;
1379 chain_ovfl = false;
1380 propagate_necessity (el);
1381 BITMAP_FREE (visited);
1382
1383 something_changed |= eliminate_unnecessary_stmts ();
1384 something_changed |= cfg_altered;
1385
1386 /* We do not update postdominators, so free them unconditionally. */
1387 free_dominance_info (CDI_POST_DOMINATORS);
1388
1389 /* If we removed paths in the CFG, then we need to update
1390 dominators as well. I haven't investigated the possibility
1391 of incrementally updating dominators. */
1392 if (cfg_altered)
1393 free_dominance_info (CDI_DOMINATORS);
1394
1395 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1396 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1397
1398 /* Debugging dumps. */
1399 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1400 print_stats ();
1401
1402 tree_dce_done (aggressive);
1403
1404 free_edge_list (el);
1405
1406 if (something_changed)
1407 return (TODO_update_ssa | TODO_cleanup_cfg | TODO_ggc_collect
1408 | TODO_remove_unused_locals);
1409 else
1410 return 0;
1411 }
1412
1413 /* Pass entry points. */
1414 static unsigned int
1415 tree_ssa_dce (void)
1416 {
1417 return perform_tree_ssa_dce (/*aggressive=*/false);
1418 }
1419
1420 static unsigned int
1421 tree_ssa_dce_loop (void)
1422 {
1423 unsigned int todo;
1424 todo = perform_tree_ssa_dce (/*aggressive=*/false);
1425 if (todo)
1426 {
1427 free_numbers_of_iterations_estimates ();
1428 scev_reset ();
1429 }
1430 return todo;
1431 }
1432
1433 static unsigned int
1434 tree_ssa_cd_dce (void)
1435 {
1436 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1437 }
1438
1439 static bool
1440 gate_dce (void)
1441 {
1442 return flag_tree_dce != 0;
1443 }
1444
1445 struct gimple_opt_pass pass_dce =
1446 {
1447 {
1448 GIMPLE_PASS,
1449 "dce", /* name */
1450 gate_dce, /* gate */
1451 tree_ssa_dce, /* execute */
1452 NULL, /* sub */
1453 NULL, /* next */
1454 0, /* static_pass_number */
1455 TV_TREE_DCE, /* tv_id */
1456 PROP_cfg | PROP_ssa, /* properties_required */
1457 0, /* properties_provided */
1458 0, /* properties_destroyed */
1459 0, /* todo_flags_start */
1460 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
1461 }
1462 };
1463
1464 struct gimple_opt_pass pass_dce_loop =
1465 {
1466 {
1467 GIMPLE_PASS,
1468 "dceloop", /* name */
1469 gate_dce, /* gate */
1470 tree_ssa_dce_loop, /* execute */
1471 NULL, /* sub */
1472 NULL, /* next */
1473 0, /* static_pass_number */
1474 TV_TREE_DCE, /* tv_id */
1475 PROP_cfg | PROP_ssa, /* properties_required */
1476 0, /* properties_provided */
1477 0, /* properties_destroyed */
1478 0, /* todo_flags_start */
1479 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
1480 }
1481 };
1482
1483 struct gimple_opt_pass pass_cd_dce =
1484 {
1485 {
1486 GIMPLE_PASS,
1487 "cddce", /* name */
1488 gate_dce, /* gate */
1489 tree_ssa_cd_dce, /* execute */
1490 NULL, /* sub */
1491 NULL, /* next */
1492 0, /* static_pass_number */
1493 TV_TREE_CD_DCE, /* tv_id */
1494 PROP_cfg | PROP_ssa, /* properties_required */
1495 0, /* properties_provided */
1496 0, /* properties_destroyed */
1497 0, /* todo_flags_start */
1498 TODO_dump_func | TODO_verify_ssa
1499 | TODO_verify_flow /* todo_flags_finish */
1500 }
1501 };