1 /* Dead and redundant store elimination
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "tree-pass.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "gimple-iterator.h"
34 #include "tree-cfgcleanup.h"
36 #include "tree-ssa-loop.h"
37 #include "tree-ssa-dse.h"
39 #include "gimple-fold.h"
44 #include "ipa-modref-tree.h"
45 #include "ipa-modref.h"
47 /* This file implements dead store elimination.
49 A dead store is a store into a memory location which will later be
50 overwritten by another store without any intervening loads. In this
51 case the earlier store can be deleted or trimmed if the store
54 A redundant store is a store into a memory location which stores
55 the exact same value as a prior store to the same memory location.
56 While this can often be handled by dead store elimination, removing
57 the redundant store is often better than removing or trimming the
60 In our SSA + virtual operand world we use immediate uses of virtual
61 operands to detect these cases. If a store's virtual definition
62 is used precisely once by a later store to the same location which
63 post dominates the first store, then the first store is dead. If
64 the data stored is the same, then the second store is redundant.
66 The single use of the store's virtual definition ensures that
67 there are no intervening aliased loads and the requirement that
68 the second load post dominate the first ensures that if the earlier
69 store executes, then the later stores will execute before the function
72 It may help to think of this as first moving the earlier store to
73 the point immediately before the later store. Again, the single
74 use of the virtual definition and the post-dominance relationship
75 ensure that such movement would be safe. Clearly if there are
76 back to back stores, then the second is makes the first dead. If
77 the second store stores the same value, then the second store is
80 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
81 may also help in understanding this code since it discusses the
82 relationship between dead store and redundant load elimination. In
83 fact, they are the same transformation applied to different views of
86 static void delete_dead_or_redundant_call (gimple_stmt_iterator
*, const char *);
88 /* Bitmap of blocks that have had EH statements cleaned. We should
89 remove their dead edges eventually. */
90 static bitmap need_eh_cleanup
;
92 /* STMT is a statement that may write into memory. Analyze it and
93 initialize WRITE to describe how STMT affects memory.
95 Return TRUE if the statement was analyzed, FALSE otherwise.
97 It is always safe to return FALSE. But typically better optimziation
98 can be achieved by analyzing more statements. */
101 initialize_ao_ref_for_dse (gimple
*stmt
, ao_ref
*write
)
103 /* It's advantageous to handle certain mem* functions. */
104 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
106 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
)))
108 case BUILT_IN_MEMCPY
:
109 case BUILT_IN_MEMMOVE
:
110 case BUILT_IN_MEMSET
:
111 case BUILT_IN_MEMCPY_CHK
:
112 case BUILT_IN_MEMMOVE_CHK
:
113 case BUILT_IN_MEMSET_CHK
:
114 case BUILT_IN_STRNCPY
:
115 case BUILT_IN_STRNCPY_CHK
:
117 tree size
= gimple_call_arg (stmt
, 2);
118 tree ptr
= gimple_call_arg (stmt
, 0);
119 ao_ref_init_from_ptr_and_size (write
, ptr
, size
);
123 /* A calloc call can never be dead, but it can make
124 subsequent stores redundant if they store 0 into
125 the same memory locations. */
126 case BUILT_IN_CALLOC
:
128 tree nelem
= gimple_call_arg (stmt
, 0);
129 tree selem
= gimple_call_arg (stmt
, 1);
131 if (TREE_CODE (nelem
) == INTEGER_CST
132 && TREE_CODE (selem
) == INTEGER_CST
133 && (lhs
= gimple_call_lhs (stmt
)) != NULL_TREE
)
135 tree size
= fold_build2 (MULT_EXPR
, TREE_TYPE (nelem
),
137 ao_ref_init_from_ptr_and_size (write
, lhs
, size
);
146 else if (tree lhs
= gimple_get_lhs (stmt
))
148 if (TREE_CODE (lhs
) != SSA_NAME
)
150 ao_ref_init (write
, lhs
);
157 /* Given REF from the alias oracle, return TRUE if it is a valid
158 memory reference for dead store elimination, false otherwise.
160 In particular, the reference must have a known base, known maximum
161 size, start at a byte offset and have a size that is one or more
165 valid_ao_ref_for_dse (ao_ref
*ref
)
167 return (ao_ref_base (ref
)
168 && known_size_p (ref
->max_size
)
169 && maybe_ne (ref
->size
, 0)
170 && known_eq (ref
->max_size
, ref
->size
)
171 && known_ge (ref
->offset
, 0)
172 && multiple_p (ref
->offset
, BITS_PER_UNIT
)
173 && multiple_p (ref
->size
, BITS_PER_UNIT
));
176 /* Try to normalize COPY (an ao_ref) relative to REF. Essentially when we are
177 done COPY will only refer bytes found within REF. Return true if COPY
178 is known to intersect at least one byte of REF. */
181 normalize_ref (ao_ref
*copy
, ao_ref
*ref
)
183 if (!ordered_p (copy
->offset
, ref
->offset
))
186 /* If COPY starts before REF, then reset the beginning of
187 COPY to match REF and decrease the size of COPY by the
188 number of bytes removed from COPY. */
189 if (maybe_lt (copy
->offset
, ref
->offset
))
191 poly_int64 diff
= ref
->offset
- copy
->offset
;
192 if (maybe_le (copy
->size
, diff
))
195 copy
->offset
= ref
->offset
;
198 poly_int64 diff
= copy
->offset
- ref
->offset
;
199 if (maybe_le (ref
->size
, diff
))
202 /* If COPY extends beyond REF, chop off its size appropriately. */
203 poly_int64 limit
= ref
->size
- diff
;
204 if (!ordered_p (limit
, copy
->size
))
207 if (maybe_gt (copy
->size
, limit
))
212 /* Update LIVE_BYTES tracking REF for write to WRITE:
213 Verify we have the same base memory address, the write
214 has a known size and overlaps with REF. */
216 clear_live_bytes_for_ref (sbitmap live_bytes
, ao_ref
*ref
, ao_ref write
)
218 HOST_WIDE_INT start
, size
;
220 if (valid_ao_ref_for_dse (&write
)
221 && operand_equal_p (write
.base
, ref
->base
, OEP_ADDRESS_OF
)
222 && known_eq (write
.size
, write
.max_size
)
223 /* normalize_ref modifies write and for that reason write is not
224 passed by reference. */
225 && normalize_ref (&write
, ref
)
226 && (write
.offset
- ref
->offset
).is_constant (&start
)
227 && write
.size
.is_constant (&size
))
228 bitmap_clear_range (live_bytes
, start
/ BITS_PER_UNIT
,
229 size
/ BITS_PER_UNIT
);
232 /* Clear any bytes written by STMT from the bitmap LIVE_BYTES. The base
233 address written by STMT must match the one found in REF, which must
234 have its base address previously initialized.
236 This routine must be conservative. If we don't know the offset or
237 actual size written, assume nothing was written. */
240 clear_bytes_written_by (sbitmap live_bytes
, gimple
*stmt
, ao_ref
*ref
)
244 if (gcall
*call
= dyn_cast
<gcall
*> (stmt
))
247 modref_summary
*summary
= get_modref_function_summary (call
, &interposed
);
249 if (summary
&& !interposed
)
250 for (auto kill
: summary
->kills
)
251 if (kill
.get_ao_ref (as_a
<gcall
*> (stmt
), &write
))
252 clear_live_bytes_for_ref (live_bytes
, ref
, write
);
254 if (!initialize_ao_ref_for_dse (stmt
, &write
))
257 clear_live_bytes_for_ref (live_bytes
, ref
, write
);
260 /* REF is a memory write. Extract relevant information from it and
261 initialize the LIVE_BYTES bitmap. If successful, return TRUE.
262 Otherwise return FALSE. */
265 setup_live_bytes_from_ref (ao_ref
*ref
, sbitmap live_bytes
)
267 HOST_WIDE_INT const_size
;
268 if (valid_ao_ref_for_dse (ref
)
269 && ref
->size
.is_constant (&const_size
)
270 && (const_size
/ BITS_PER_UNIT
271 <= param_dse_max_object_size
))
273 bitmap_clear (live_bytes
);
274 bitmap_set_range (live_bytes
, 0, const_size
/ BITS_PER_UNIT
);
280 /* Compute the number of elements that we can trim from the head and
281 tail of ORIG resulting in a bitmap that is a superset of LIVE.
283 Store the number of elements trimmed from the head and tail in
284 TRIM_HEAD and TRIM_TAIL.
286 STMT is the statement being trimmed and is used for debugging dump
290 compute_trims (ao_ref
*ref
, sbitmap live
, int *trim_head
, int *trim_tail
,
293 /* We use sbitmaps biased such that ref->offset is bit zero and the bitmap
294 extends through ref->size. So we know that in the original bitmap
295 bits 0..ref->size were true. We don't actually need the bitmap, just
296 the REF to compute the trims. */
298 /* Now identify how much, if any of the tail we can chop off. */
299 HOST_WIDE_INT const_size
;
300 int last_live
= bitmap_last_set_bit (live
);
301 if (ref
->size
.is_constant (&const_size
))
303 int last_orig
= (const_size
/ BITS_PER_UNIT
) - 1;
304 /* We can leave inconvenient amounts on the tail as
305 residual handling in mem* and str* functions is usually
306 reasonably efficient. */
307 *trim_tail
= last_orig
- last_live
;
309 /* But don't trim away out of bounds accesses, as this defeats
312 We could have a type with no TYPE_SIZE_UNIT or we could have a VLA
313 where TYPE_SIZE_UNIT is not a constant. */
315 && TYPE_SIZE_UNIT (TREE_TYPE (ref
->base
))
316 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (ref
->base
))) == INTEGER_CST
317 && compare_tree_int (TYPE_SIZE_UNIT (TREE_TYPE (ref
->base
)),
324 /* Identify how much, if any of the head we can chop off. */
326 int first_live
= bitmap_first_set_bit (live
);
327 *trim_head
= first_live
- first_orig
;
329 /* If more than a word remains, then make sure to keep the
330 starting point at least word aligned. */
331 if (last_live
- first_live
> UNITS_PER_WORD
)
332 *trim_head
&= ~(UNITS_PER_WORD
- 1);
334 if ((*trim_head
|| *trim_tail
)
335 && dump_file
&& (dump_flags
& TDF_DETAILS
))
337 fprintf (dump_file
, " Trimming statement (head = %d, tail = %d): ",
338 *trim_head
, *trim_tail
);
339 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
340 fprintf (dump_file
, "\n");
344 /* STMT initializes an object from COMPLEX_CST where one or more of the
345 bytes written may be dead stores. REF is a representation of the
346 memory written. LIVE is the bitmap of stores that are actually live.
348 Attempt to rewrite STMT so that only the real or imaginary part of
349 the object is actually stored. */
352 maybe_trim_complex_store (ao_ref
*ref
, sbitmap live
, gimple
*stmt
)
354 int trim_head
, trim_tail
;
355 compute_trims (ref
, live
, &trim_head
, &trim_tail
, stmt
);
357 /* The amount of data trimmed from the head or tail must be at
358 least half the size of the object to ensure we're trimming
359 the entire real or imaginary half. By writing things this
360 way we avoid more O(n) bitmap operations. */
361 if (known_ge (trim_tail
* 2 * BITS_PER_UNIT
, ref
->size
))
363 /* TREE_REALPART is live */
364 tree x
= TREE_REALPART (gimple_assign_rhs1 (stmt
));
365 tree y
= gimple_assign_lhs (stmt
);
366 y
= build1 (REALPART_EXPR
, TREE_TYPE (x
), y
);
367 gimple_assign_set_lhs (stmt
, y
);
368 gimple_assign_set_rhs1 (stmt
, x
);
370 else if (known_ge (trim_head
* 2 * BITS_PER_UNIT
, ref
->size
))
372 /* TREE_IMAGPART is live */
373 tree x
= TREE_IMAGPART (gimple_assign_rhs1 (stmt
));
374 tree y
= gimple_assign_lhs (stmt
);
375 y
= build1 (IMAGPART_EXPR
, TREE_TYPE (x
), y
);
376 gimple_assign_set_lhs (stmt
, y
);
377 gimple_assign_set_rhs1 (stmt
, x
);
380 /* Other cases indicate parts of both the real and imag subobjects
381 are live. We do not try to optimize those cases. */
384 /* STMT initializes an object using a CONSTRUCTOR where one or more of the
385 bytes written are dead stores. ORIG is the bitmap of bytes stored by
386 STMT. LIVE is the bitmap of stores that are actually live.
388 Attempt to rewrite STMT so that only the real or imaginary part of
389 the object is actually stored.
391 The most common case for getting here is a CONSTRUCTOR with no elements
392 being used to zero initialize an object. We do not try to handle other
393 cases as those would force us to fully cover the object with the
394 CONSTRUCTOR node except for the components that are dead. */
397 maybe_trim_constructor_store (ao_ref
*ref
, sbitmap live
, gimple
*stmt
)
399 tree ctor
= gimple_assign_rhs1 (stmt
);
401 /* This is the only case we currently handle. It actually seems to
402 catch most cases of actual interest. */
403 gcc_assert (CONSTRUCTOR_NELTS (ctor
) == 0);
407 compute_trims (ref
, live
, &head_trim
, &tail_trim
, stmt
);
409 /* Now we want to replace the constructor initializer
410 with memset (object + head_trim, 0, size - head_trim - tail_trim). */
411 if (head_trim
|| tail_trim
)
413 /* We want &lhs for the MEM_REF expression. */
414 tree lhs_addr
= build_fold_addr_expr (gimple_assign_lhs (stmt
));
416 if (! is_gimple_min_invariant (lhs_addr
))
419 /* The number of bytes for the new constructor. */
420 poly_int64 ref_bytes
= exact_div (ref
->size
, BITS_PER_UNIT
);
421 poly_int64 count
= ref_bytes
- head_trim
- tail_trim
;
423 /* And the new type for the CONSTRUCTOR. Essentially it's just
424 a char array large enough to cover the non-trimmed parts of
425 the original CONSTRUCTOR. Note we want explicit bounds here
426 so that we know how many bytes to clear when expanding the
428 tree type
= build_array_type_nelts (char_type_node
, count
);
430 /* Build a suitable alias type rather than using alias set zero
431 to avoid pessimizing. */
432 tree alias_type
= reference_alias_ptr_type (gimple_assign_lhs (stmt
));
434 /* Build a MEM_REF representing the whole accessed area, starting
435 at the first byte not trimmed. */
436 tree exp
= fold_build2 (MEM_REF
, type
, lhs_addr
,
437 build_int_cst (alias_type
, head_trim
));
439 /* Now update STMT with a new RHS and LHS. */
440 gimple_assign_set_lhs (stmt
, exp
);
441 gimple_assign_set_rhs1 (stmt
, build_constructor (type
, NULL
));
445 /* STMT is a memcpy, memmove or memset. Decrement the number of bytes
446 copied/set by DECREMENT. */
448 decrement_count (gimple
*stmt
, int decrement
)
450 tree
*countp
= gimple_call_arg_ptr (stmt
, 2);
451 gcc_assert (TREE_CODE (*countp
) == INTEGER_CST
);
452 *countp
= wide_int_to_tree (TREE_TYPE (*countp
), (TREE_INT_CST_LOW (*countp
)
457 increment_start_addr (gimple
*stmt
, tree
*where
, int increment
)
459 if (tree lhs
= gimple_call_lhs (stmt
))
460 if (where
== gimple_call_arg_ptr (stmt
, 0))
462 gassign
*newop
= gimple_build_assign (lhs
, unshare_expr (*where
));
463 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
464 gsi_insert_after (&gsi
, newop
, GSI_SAME_STMT
);
465 gimple_call_set_lhs (stmt
, NULL_TREE
);
469 if (TREE_CODE (*where
) == SSA_NAME
)
471 tree tem
= make_ssa_name (TREE_TYPE (*where
));
473 = gimple_build_assign (tem
, POINTER_PLUS_EXPR
, *where
,
474 build_int_cst (sizetype
, increment
));
475 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
476 gsi_insert_before (&gsi
, newop
, GSI_SAME_STMT
);
482 *where
= build_fold_addr_expr (fold_build2 (MEM_REF
, char_type_node
,
484 build_int_cst (ptr_type_node
,
488 /* STMT is builtin call that writes bytes in bitmap ORIG, some bytes are dead
489 (ORIG & ~NEW) and need not be stored. Try to rewrite STMT to reduce
490 the amount of data it actually writes.
492 Right now we only support trimming from the head or the tail of the
493 memory region. In theory we could split the mem* call, but it's
494 likely of marginal value. */
497 maybe_trim_memstar_call (ao_ref
*ref
, sbitmap live
, gimple
*stmt
)
499 int head_trim
, tail_trim
;
500 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
)))
502 case BUILT_IN_STRNCPY
:
503 case BUILT_IN_STRNCPY_CHK
:
504 compute_trims (ref
, live
, &head_trim
, &tail_trim
, stmt
);
507 /* Head trimming of strncpy is only possible if we can
508 prove all bytes we would trim are non-zero (or we could
509 turn the strncpy into memset if there must be zero
510 among the head trimmed bytes). If we don't know anything
511 about those bytes, the presence or absence of '\0' bytes
512 in there will affect whether it acts for the non-trimmed
513 bytes as memset or memcpy/strncpy. */
514 c_strlen_data lendata
= { };
515 int orig_head_trim
= head_trim
;
516 tree srcstr
= gimple_call_arg (stmt
, 1);
517 if (!get_range_strlen (srcstr
, &lendata
, /*eltsize=*/1)
518 || !tree_fits_uhwi_p (lendata
.minlen
))
520 else if (tree_to_uhwi (lendata
.minlen
) < (unsigned) head_trim
)
522 head_trim
= tree_to_uhwi (lendata
.minlen
);
523 if ((orig_head_trim
& (UNITS_PER_WORD
- 1)) == 0)
524 head_trim
&= ~(UNITS_PER_WORD
- 1);
526 if (orig_head_trim
!= head_trim
528 && (dump_flags
& TDF_DETAILS
))
530 " Adjusting strncpy trimming to (head = %d,"
531 " tail = %d)\n", head_trim
, tail_trim
);
535 case BUILT_IN_MEMCPY
:
536 case BUILT_IN_MEMMOVE
:
537 case BUILT_IN_MEMCPY_CHK
:
538 case BUILT_IN_MEMMOVE_CHK
:
539 compute_trims (ref
, live
, &head_trim
, &tail_trim
, stmt
);
542 /* Tail trimming is easy, we can just reduce the count. */
544 decrement_count (stmt
, tail_trim
);
546 /* Head trimming requires adjusting all the arguments. */
549 /* For __*_chk need to adjust also the last argument. */
550 if (gimple_call_num_args (stmt
) == 4)
552 tree size
= gimple_call_arg (stmt
, 3);
553 if (!tree_fits_uhwi_p (size
))
555 if (!integer_all_onesp (size
))
557 unsigned HOST_WIDE_INT sz
= tree_to_uhwi (size
);
558 if (sz
< (unsigned) head_trim
)
560 tree arg
= wide_int_to_tree (TREE_TYPE (size
),
562 gimple_call_set_arg (stmt
, 3, arg
);
565 tree
*dst
= gimple_call_arg_ptr (stmt
, 0);
566 increment_start_addr (stmt
, dst
, head_trim
);
567 tree
*src
= gimple_call_arg_ptr (stmt
, 1);
568 increment_start_addr (stmt
, src
, head_trim
);
569 decrement_count (stmt
, head_trim
);
573 case BUILT_IN_MEMSET
:
574 case BUILT_IN_MEMSET_CHK
:
575 compute_trims (ref
, live
, &head_trim
, &tail_trim
, stmt
);
577 /* Tail trimming is easy, we can just reduce the count. */
579 decrement_count (stmt
, tail_trim
);
581 /* Head trimming requires adjusting all the arguments. */
584 /* For __*_chk need to adjust also the last argument. */
585 if (gimple_call_num_args (stmt
) == 4)
587 tree size
= gimple_call_arg (stmt
, 3);
588 if (!tree_fits_uhwi_p (size
))
590 if (!integer_all_onesp (size
))
592 unsigned HOST_WIDE_INT sz
= tree_to_uhwi (size
);
593 if (sz
< (unsigned) head_trim
)
595 tree arg
= wide_int_to_tree (TREE_TYPE (size
),
597 gimple_call_set_arg (stmt
, 3, arg
);
600 tree
*dst
= gimple_call_arg_ptr (stmt
, 0);
601 increment_start_addr (stmt
, dst
, head_trim
);
602 decrement_count (stmt
, head_trim
);
611 /* STMT is a memory write where one or more bytes written are dead
612 stores. ORIG is the bitmap of bytes stored by STMT. LIVE is the
613 bitmap of stores that are actually live.
615 Attempt to rewrite STMT so that it writes fewer memory locations. Right
616 now we only support trimming at the start or end of the memory region.
617 It's not clear how much there is to be gained by trimming from the middle
621 maybe_trim_partially_dead_store (ao_ref
*ref
, sbitmap live
, gimple
*stmt
)
623 if (is_gimple_assign (stmt
)
624 && TREE_CODE (gimple_assign_lhs (stmt
)) != TARGET_MEM_REF
)
626 switch (gimple_assign_rhs_code (stmt
))
629 maybe_trim_constructor_store (ref
, live
, stmt
);
632 maybe_trim_complex_store (ref
, live
, stmt
);
640 /* Return TRUE if USE_REF reads bytes from LIVE where live is
641 derived from REF, a write reference.
643 While this routine may modify USE_REF, it's passed by value, not
644 location. So callers do not see those modifications. */
647 live_bytes_read (ao_ref use_ref
, ao_ref
*ref
, sbitmap live
)
649 /* We have already verified that USE_REF and REF hit the same object.
650 Now verify that there's actually an overlap between USE_REF and REF. */
651 HOST_WIDE_INT start
, size
;
652 if (normalize_ref (&use_ref
, ref
)
653 && (use_ref
.offset
- ref
->offset
).is_constant (&start
)
654 && use_ref
.size
.is_constant (&size
))
656 /* If USE_REF covers all of REF, then it will hit one or more
657 live bytes. This avoids useless iteration over the bitmap
659 if (start
== 0 && known_eq (size
, ref
->size
))
662 /* Now check if any of the remaining bits in use_ref are set in LIVE. */
663 return bitmap_bit_in_range_p (live
, start
/ BITS_PER_UNIT
,
664 (start
+ size
- 1) / BITS_PER_UNIT
);
669 /* Callback for dse_classify_store calling for_each_index. Verify that
670 indices are invariant in the loop with backedge PHI in basic-block DATA. */
673 check_name (tree
, tree
*idx
, void *data
)
675 basic_block phi_bb
= (basic_block
) data
;
676 if (TREE_CODE (*idx
) == SSA_NAME
677 && !SSA_NAME_IS_DEFAULT_DEF (*idx
)
678 && dominated_by_p (CDI_DOMINATORS
, gimple_bb (SSA_NAME_DEF_STMT (*idx
)),
684 /* STMT stores the value 0 into one or more memory locations
685 (via memset, empty constructor, calloc call, etc).
687 See if there is a subsequent store of the value 0 to one
688 or more of the same memory location(s). If so, the subsequent
689 store is redundant and can be removed.
691 The subsequent stores could be via memset, empty constructors,
692 simple MEM stores, etc. */
695 dse_optimize_redundant_stores (gimple
*stmt
)
699 /* TBAA state of STMT, if it is a call it is effectively alias-set zero. */
700 alias_set_type earlier_set
= 0;
701 alias_set_type earlier_base_set
= 0;
702 if (is_gimple_assign (stmt
))
705 ao_ref_init (&lhs_ref
, gimple_assign_lhs (stmt
));
706 earlier_set
= ao_ref_alias_set (&lhs_ref
);
707 earlier_base_set
= ao_ref_base_alias_set (&lhs_ref
);
710 /* We could do something fairly complex and look through PHIs
711 like DSE_CLASSIFY_STORE, but it doesn't seem to be worth
714 Look at all the immediate uses of the VDEF (which are obviously
715 dominated by STMT). See if one or more stores 0 into the same
716 memory locations a STMT, if so remove the immediate use statements. */
717 tree defvar
= gimple_vdef (stmt
);
720 FOR_EACH_IMM_USE_STMT (use_stmt
, ui
, defvar
)
722 /* Limit stmt walking. */
723 if (++cnt
> param_dse_max_alias_queries_per_store
)
726 /* If USE_STMT stores 0 into one or more of the same locations
727 as STMT and STMT would kill USE_STMT, then we can just remove
730 if ((is_gimple_assign (use_stmt
)
731 && gimple_vdef (use_stmt
)
732 && (gimple_assign_single_p (use_stmt
)
733 && initializer_zerop (gimple_assign_rhs1 (use_stmt
))))
734 || (gimple_call_builtin_p (use_stmt
, BUILT_IN_NORMAL
)
735 && (fndecl
= gimple_call_fndecl (use_stmt
)) != NULL
736 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMSET
737 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMSET_CHK
)
738 && integer_zerop (gimple_call_arg (use_stmt
, 1))))
742 if (!initialize_ao_ref_for_dse (use_stmt
, &write
))
745 if (valid_ao_ref_for_dse (&write
)
746 && stmt_kills_ref_p (stmt
, &write
))
748 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
749 if (is_gimple_assign (use_stmt
))
752 ao_ref_init (&lhs_ref
, gimple_assign_lhs (use_stmt
));
753 if ((earlier_set
== ao_ref_alias_set (&lhs_ref
)
754 || alias_set_subset_of (ao_ref_alias_set (&lhs_ref
),
756 && (earlier_base_set
== ao_ref_base_alias_set (&lhs_ref
)
757 || alias_set_subset_of
758 (ao_ref_base_alias_set (&lhs_ref
),
760 delete_dead_or_redundant_assignment (&gsi
, "redundant",
763 else if (is_gimple_call (use_stmt
))
765 if ((earlier_set
== 0
766 || alias_set_subset_of (0, earlier_set
))
767 && (earlier_base_set
== 0
768 || alias_set_subset_of (0, earlier_base_set
)))
769 delete_dead_or_redundant_call (&gsi
, "redundant");
778 /* A helper of dse_optimize_stmt.
779 Given a GIMPLE_ASSIGN in STMT that writes to REF, classify it
780 according to downstream uses and defs. Sets *BY_CLOBBER_P to true
781 if only clobber statements influenced the classification result.
782 Returns the classification. */
785 dse_classify_store (ao_ref
*ref
, gimple
*stmt
,
786 bool byte_tracking_enabled
, sbitmap live_bytes
,
787 bool *by_clobber_p
, tree stop_at_vuse
)
794 *by_clobber_p
= true;
796 /* Find the first dominated statement that clobbers (part of) the
797 memory stmt stores to with no intermediate statement that may use
798 part of the memory stmt stores. That is, find a store that may
799 prove stmt to be a dead store. */
808 if (gimple_code (temp
) == GIMPLE_PHI
)
810 /* If we visit this PHI by following a backedge then we have to
811 make sure ref->ref only refers to SSA names that are invariant
812 with respect to the loop represented by this PHI node. */
813 if (dominated_by_p (CDI_DOMINATORS
, gimple_bb (stmt
),
815 && !for_each_index (ref
->ref
? &ref
->ref
: &ref
->base
,
816 check_name
, gimple_bb (temp
)))
817 return DSE_STORE_LIVE
;
818 defvar
= PHI_RESULT (temp
);
819 bitmap_set_bit (visited
, SSA_NAME_VERSION (defvar
));
822 defvar
= gimple_vdef (temp
);
824 /* If we're instructed to stop walking at region boundary, do so. */
825 if (defvar
== stop_at_vuse
)
826 return DSE_STORE_LIVE
;
828 auto_vec
<gimple
*, 10> defs
;
829 gimple
*first_phi_def
= NULL
;
830 gimple
*last_phi_def
= NULL
;
831 FOR_EACH_IMM_USE_STMT (use_stmt
, ui
, defvar
)
833 /* Limit stmt walking. */
834 if (++cnt
> param_dse_max_alias_queries_per_store
)
840 /* In simple cases we can look through PHI nodes, but we
841 have to be careful with loops and with memory references
842 containing operands that are also operands of PHI nodes.
843 See gcc.c-torture/execute/20051110-*.c. */
844 if (gimple_code (use_stmt
) == GIMPLE_PHI
)
846 /* If we already visited this PHI ignore it for further
848 if (!bitmap_bit_p (visited
,
849 SSA_NAME_VERSION (PHI_RESULT (use_stmt
))))
851 defs
.safe_push (use_stmt
);
853 first_phi_def
= use_stmt
;
854 last_phi_def
= use_stmt
;
857 /* If the statement is a use the store is not dead. */
858 else if (ref_maybe_used_by_stmt_p (use_stmt
, ref
))
860 /* Handle common cases where we can easily build an ao_ref
861 structure for USE_STMT and in doing so we find that the
862 references hit non-live bytes and thus can be ignored. */
863 if (byte_tracking_enabled
864 && is_gimple_assign (use_stmt
))
867 ao_ref_init (&use_ref
, gimple_assign_rhs1 (use_stmt
));
868 if (valid_ao_ref_for_dse (&use_ref
)
869 && use_ref
.base
== ref
->base
870 && known_eq (use_ref
.size
, use_ref
.max_size
)
871 && !live_bytes_read (use_ref
, ref
, live_bytes
))
873 /* If this is a store, remember it as we possibly
874 need to walk the defs uses. */
875 if (gimple_vdef (use_stmt
))
876 defs
.safe_push (use_stmt
);
884 /* We have visited ourselves already so ignore STMT for the
885 purpose of chaining. */
886 else if (use_stmt
== stmt
)
888 /* If this is a store, remember it as we possibly need to walk the
890 else if (gimple_vdef (use_stmt
))
891 defs
.safe_push (use_stmt
);
896 /* STMT might be partially dead and we may be able to reduce
897 how many memory locations it stores into. */
898 if (byte_tracking_enabled
&& !gimple_clobber_p (stmt
))
899 return DSE_STORE_MAYBE_PARTIAL_DEAD
;
900 return DSE_STORE_LIVE
;
903 /* If we didn't find any definition this means the store is dead
904 if it isn't a store to global reachable memory. In this case
905 just pretend the stmt makes itself dead. Otherwise fail. */
906 if (defs
.is_empty ())
908 if (ref_may_alias_global_p (ref
))
909 return DSE_STORE_LIVE
;
912 *by_clobber_p
= false;
913 return DSE_STORE_DEAD
;
916 /* Process defs and remove those we need not process further. */
917 for (unsigned i
= 0; i
< defs
.length ();)
919 gimple
*def
= defs
[i
];
922 tree vdef
= (gimple_code (def
) == GIMPLE_PHI
923 ? gimple_phi_result (def
) : gimple_vdef (def
));
924 /* If the path to check starts with a kill we do not need to
926 ??? With byte tracking we need only kill the bytes currently
928 if (stmt_kills_ref_p (def
, ref
))
930 if (by_clobber_p
&& !gimple_clobber_p (def
))
931 *by_clobber_p
= false;
932 defs
.unordered_remove (i
);
934 /* If the path ends here we do not need to process it further.
935 This for example happens with calls to noreturn functions. */
936 else if (has_zero_uses (vdef
))
938 /* But if the store is to global memory it is definitely
940 if (ref_may_alias_global_p (ref
))
941 return DSE_STORE_LIVE
;
942 defs
.unordered_remove (i
);
944 /* In addition to kills we can remove defs whose only use
945 is another def in defs. That can only ever be PHIs of which
946 we track two for simplicity reasons, the first and last in
947 {first,last}_phi_def (we fail for multiple PHIs anyways).
948 We can also ignore defs that feed only into
949 already visited PHIs. */
950 else if (single_imm_use (vdef
, &use_p
, &use_stmt
)
951 && (use_stmt
== first_phi_def
952 || use_stmt
== last_phi_def
953 || (gimple_code (use_stmt
) == GIMPLE_PHI
954 && bitmap_bit_p (visited
,
956 (PHI_RESULT (use_stmt
))))))
957 defs
.unordered_remove (i
);
962 /* If all defs kill the ref we are done. */
963 if (defs
.is_empty ())
964 return DSE_STORE_DEAD
;
965 /* If more than one def survives fail. */
966 if (defs
.length () > 1)
968 /* STMT might be partially dead and we may be able to reduce
969 how many memory locations it stores into. */
970 if (byte_tracking_enabled
&& !gimple_clobber_p (stmt
))
971 return DSE_STORE_MAYBE_PARTIAL_DEAD
;
972 return DSE_STORE_LIVE
;
976 /* Track partial kills. */
977 if (byte_tracking_enabled
)
979 clear_bytes_written_by (live_bytes
, temp
, ref
);
980 if (bitmap_empty_p (live_bytes
))
982 if (by_clobber_p
&& !gimple_clobber_p (temp
))
983 *by_clobber_p
= false;
984 return DSE_STORE_DEAD
;
988 /* Continue walking until there are no more live bytes. */
993 /* Delete a dead call at GSI, which is mem* call of some kind. */
995 delete_dead_or_redundant_call (gimple_stmt_iterator
*gsi
, const char *type
)
997 gimple
*stmt
= gsi_stmt (*gsi
);
998 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1000 fprintf (dump_file
, " Deleted %s call: ", type
);
1001 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1002 fprintf (dump_file
, "\n");
1005 basic_block bb
= gimple_bb (stmt
);
1006 tree lhs
= gimple_call_lhs (stmt
);
1009 tree ptr
= gimple_call_arg (stmt
, 0);
1010 gimple
*new_stmt
= gimple_build_assign (lhs
, ptr
);
1011 unlink_stmt_vdef (stmt
);
1012 if (gsi_replace (gsi
, new_stmt
, true))
1013 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
1017 /* Then we need to fix the operand of the consuming stmt. */
1018 unlink_stmt_vdef (stmt
);
1020 /* Remove the dead store. */
1021 if (gsi_remove (gsi
, true))
1022 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
1023 release_defs (stmt
);
1027 /* Delete a dead store at GSI, which is a gimple assignment. */
1030 delete_dead_or_redundant_assignment (gimple_stmt_iterator
*gsi
, const char *type
,
1031 bitmap need_eh_cleanup
)
1033 gimple
*stmt
= gsi_stmt (*gsi
);
1034 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1036 fprintf (dump_file
, " Deleted %s store: ", type
);
1037 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1038 fprintf (dump_file
, "\n");
1041 /* Then we need to fix the operand of the consuming stmt. */
1042 unlink_stmt_vdef (stmt
);
1044 /* Remove the dead store. */
1045 basic_block bb
= gimple_bb (stmt
);
1046 if (gsi_remove (gsi
, true) && need_eh_cleanup
)
1047 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
1049 /* And release any SSA_NAMEs set in this statement back to the
1050 SSA_NAME manager. */
1051 release_defs (stmt
);
1054 /* Try to prove, using modref summary, that all memory written to by a call is
1055 dead and remove it. Assume that if return value is written to memory
1056 it is already proved to be dead. */
1059 dse_optimize_call (gimple_stmt_iterator
*gsi
, sbitmap live_bytes
)
1061 gcall
*stmt
= dyn_cast
<gcall
*> (gsi_stmt (*gsi
));
1066 tree callee
= gimple_call_fndecl (stmt
);
1071 /* Pure/const functions are optimized by normal DCE
1072 or handled as store above. */
1073 int flags
= gimple_call_flags (stmt
);
1074 if ((flags
& (ECF_PURE
|ECF_CONST
|ECF_NOVOPS
))
1075 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
1078 cgraph_node
*node
= cgraph_node::get (callee
);
1082 if (stmt_could_throw_p (cfun
, stmt
)
1083 && !cfun
->can_delete_dead_exceptions
)
1086 /* If return value is used the call is not dead. */
1087 tree lhs
= gimple_call_lhs (stmt
);
1088 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
1090 imm_use_iterator ui
;
1092 FOR_EACH_IMM_USE_STMT (use_stmt
, ui
, lhs
)
1093 if (!is_gimple_debug (use_stmt
))
1097 /* Verify that there are no side-effects except for return value
1098 and memory writes tracked by modref. */
1099 modref_summary
*summary
= get_modref_function_summary (node
);
1100 if (!summary
|| !summary
->try_dse
)
1103 bool by_clobber_p
= false;
1105 /* Walk all memory writes and verify that they are dead. */
1106 for (auto base_node
: summary
->stores
->bases
)
1107 for (auto ref_node
: base_node
->refs
)
1108 for (auto access_node
: ref_node
->accesses
)
1110 tree arg
= access_node
.get_call_arg (stmt
);
1115 if (integer_zerop (arg
) && flag_delete_null_pointer_checks
)
1120 if (!access_node
.get_ao_ref (stmt
, &ref
))
1122 ref
.ref_alias_set
= ref_node
->ref
;
1123 ref
.base_alias_set
= base_node
->base
;
1125 bool byte_tracking_enabled
1126 = setup_live_bytes_from_ref (&ref
, live_bytes
);
1127 enum dse_store_status store_status
;
1129 store_status
= dse_classify_store (&ref
, stmt
,
1130 byte_tracking_enabled
,
1131 live_bytes
, &by_clobber_p
);
1132 if (store_status
!= DSE_STORE_DEAD
)
1135 delete_dead_or_redundant_assignment (gsi
, "dead", need_eh_cleanup
);
1139 /* Attempt to eliminate dead stores in the statement referenced by BSI.
1141 A dead store is a store into a memory location which will later be
1142 overwritten by another store without any intervening loads. In this
1143 case the earlier store can be deleted.
1145 In our SSA + virtual operand world we use immediate uses of virtual
1146 operands to detect dead stores. If a store's virtual definition
1147 is used precisely once by a later store to the same location which
1148 post dominates the first store, then the first store is dead. */
1151 dse_optimize_stmt (function
*fun
, gimple_stmt_iterator
*gsi
, sbitmap live_bytes
)
1153 gimple
*stmt
= gsi_stmt (*gsi
);
1155 /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
1156 if (gimple_has_volatile_ops (stmt
)
1157 && (!gimple_clobber_p (stmt
)
1158 || TREE_CODE (gimple_assign_lhs (stmt
)) != MEM_REF
))
1162 /* If this is not a store we can still remove dead call using
1164 if (!initialize_ao_ref_for_dse (stmt
, &ref
))
1166 dse_optimize_call (gsi
, live_bytes
);
1170 /* We know we have virtual definitions. We can handle assignments and
1171 some builtin calls. */
1172 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
1174 tree fndecl
= gimple_call_fndecl (stmt
);
1175 switch (DECL_FUNCTION_CODE (fndecl
))
1177 case BUILT_IN_MEMCPY
:
1178 case BUILT_IN_MEMMOVE
:
1179 case BUILT_IN_STRNCPY
:
1180 case BUILT_IN_MEMSET
:
1181 case BUILT_IN_MEMCPY_CHK
:
1182 case BUILT_IN_MEMMOVE_CHK
:
1183 case BUILT_IN_STRNCPY_CHK
:
1184 case BUILT_IN_MEMSET_CHK
:
1186 /* Occasionally calls with an explicit length of zero
1187 show up in the IL. It's pointless to do analysis
1188 on them, they're trivially dead. */
1189 tree size
= gimple_call_arg (stmt
, 2);
1190 if (integer_zerop (size
))
1192 delete_dead_or_redundant_call (gsi
, "dead");
1196 /* If this is a memset call that initializes an object
1197 to zero, it may be redundant with an earlier memset
1198 or empty CONSTRUCTOR of a larger object. */
1199 if ((DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMSET
1200 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMSET_CHK
)
1201 && integer_zerop (gimple_call_arg (stmt
, 1)))
1202 dse_optimize_redundant_stores (stmt
);
1204 enum dse_store_status store_status
;
1205 bool byte_tracking_enabled
1206 = setup_live_bytes_from_ref (&ref
, live_bytes
);
1207 store_status
= dse_classify_store (&ref
, stmt
,
1208 byte_tracking_enabled
,
1210 if (store_status
== DSE_STORE_LIVE
)
1213 if (store_status
== DSE_STORE_MAYBE_PARTIAL_DEAD
)
1215 maybe_trim_memstar_call (&ref
, live_bytes
, stmt
);
1219 if (store_status
== DSE_STORE_DEAD
)
1220 delete_dead_or_redundant_call (gsi
, "dead");
1224 case BUILT_IN_CALLOC
:
1225 /* We already know the arguments are integer constants. */
1226 dse_optimize_redundant_stores (stmt
);
1234 bool by_clobber_p
= false;
1236 /* Check if this statement stores zero to a memory location,
1237 and if there is a subsequent store of zero to the same
1238 memory location. If so, remove the subsequent store. */
1239 if (gimple_assign_single_p (stmt
)
1240 && initializer_zerop (gimple_assign_rhs1 (stmt
)))
1241 dse_optimize_redundant_stores (stmt
);
1243 /* Self-assignments are zombies. */
1244 if (is_gimple_assign (stmt
)
1245 && operand_equal_p (gimple_assign_rhs1 (stmt
),
1246 gimple_assign_lhs (stmt
), 0))
1250 bool byte_tracking_enabled
1251 = setup_live_bytes_from_ref (&ref
, live_bytes
);
1252 enum dse_store_status store_status
;
1253 store_status
= dse_classify_store (&ref
, stmt
,
1254 byte_tracking_enabled
,
1255 live_bytes
, &by_clobber_p
);
1256 if (store_status
== DSE_STORE_LIVE
)
1259 if (store_status
== DSE_STORE_MAYBE_PARTIAL_DEAD
)
1261 maybe_trim_partially_dead_store (&ref
, live_bytes
, stmt
);
1266 /* Now we know that use_stmt kills the LHS of stmt. */
1268 /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
1269 another clobber stmt. */
1270 if (gimple_clobber_p (stmt
)
1274 if (is_gimple_call (stmt
)
1275 && (gimple_has_side_effects (stmt
)
1276 || (stmt_could_throw_p (fun
, stmt
)
1277 && !fun
->can_delete_dead_exceptions
)))
1279 /* See if we can remove complete call. */
1280 if (dse_optimize_call (gsi
, live_bytes
))
1282 /* Make sure we do not remove a return slot we cannot reconstruct
1284 if (gimple_call_return_slot_opt_p (as_a
<gcall
*>(stmt
))
1285 && (TREE_ADDRESSABLE (TREE_TYPE (gimple_call_fntype (stmt
)))
1287 (TYPE_SIZE (TREE_TYPE (gimple_call_fntype (stmt
))))))
1289 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1291 fprintf (dump_file
, " Deleted dead store in call LHS: ");
1292 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1293 fprintf (dump_file
, "\n");
1295 gimple_call_set_lhs (stmt
, NULL_TREE
);
1299 delete_dead_or_redundant_assignment (gsi
, "dead", need_eh_cleanup
);
1304 const pass_data pass_data_dse
=
1306 GIMPLE_PASS
, /* type */
1308 OPTGROUP_NONE
, /* optinfo_flags */
1309 TV_TREE_DSE
, /* tv_id */
1310 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1311 0, /* properties_provided */
1312 0, /* properties_destroyed */
1313 0, /* todo_flags_start */
1314 0, /* todo_flags_finish */
1317 class pass_dse
: public gimple_opt_pass
1320 pass_dse (gcc::context
*ctxt
)
1321 : gimple_opt_pass (pass_data_dse
, ctxt
)
1324 /* opt_pass methods: */
1325 opt_pass
* clone () { return new pass_dse (m_ctxt
); }
1326 virtual bool gate (function
*) { return flag_tree_dse
!= 0; }
1327 virtual unsigned int execute (function
*);
1329 }; // class pass_dse
1332 pass_dse::execute (function
*fun
)
1335 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
1336 auto_sbitmap
live_bytes (param_dse_max_object_size
);
1338 renumber_gimple_stmt_uids (fun
);
1340 calculate_dominance_info (CDI_DOMINATORS
);
1342 /* Dead store elimination is fundamentally a reverse program order walk. */
1343 int *rpo
= XNEWVEC (int, n_basic_blocks_for_fn (fun
) - NUM_FIXED_BLOCKS
);
1344 int n
= pre_and_rev_post_order_compute_fn (fun
, NULL
, rpo
, false);
1345 for (int i
= n
; i
!= 0; --i
)
1347 basic_block bb
= BASIC_BLOCK_FOR_FN (fun
, rpo
[i
-1]);
1348 gimple_stmt_iterator gsi
;
1350 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);)
1352 gimple
*stmt
= gsi_stmt (gsi
);
1354 if (gimple_vdef (stmt
))
1355 dse_optimize_stmt (fun
, &gsi
, live_bytes
);
1356 else if (def_operand_p
1357 def_p
= single_ssa_def_operand (stmt
, SSA_OP_DEF
))
1359 /* When we remove dead stores make sure to also delete trivially
1361 if (has_zero_uses (DEF_FROM_PTR (def_p
))
1362 && !gimple_has_side_effects (stmt
)
1363 && !is_ctrl_altering_stmt (stmt
)
1364 && (!stmt_could_throw_p (fun
, stmt
)
1365 || fun
->can_delete_dead_exceptions
))
1367 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1369 fprintf (dump_file
, " Deleted trivially dead stmt: ");
1370 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1371 fprintf (dump_file
, "\n");
1373 if (gsi_remove (&gsi
, true) && need_eh_cleanup
)
1374 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
1375 release_defs (stmt
);
1378 if (gsi_end_p (gsi
))
1379 gsi
= gsi_last_bb (bb
);
1383 bool removed_phi
= false;
1384 for (gphi_iterator si
= gsi_start_phis (bb
); !gsi_end_p (si
);)
1386 gphi
*phi
= si
.phi ();
1387 if (has_zero_uses (gimple_phi_result (phi
)))
1389 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1391 fprintf (dump_file
, " Deleted trivially dead PHI: ");
1392 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
1393 fprintf (dump_file
, "\n");
1395 remove_phi_node (&si
, true);
1401 if (removed_phi
&& gimple_seq_empty_p (phi_nodes (bb
)))
1402 todo
|= TODO_cleanup_cfg
;
1406 /* Removal of stores may make some EH edges dead. Purge such edges from
1407 the CFG as needed. */
1408 if (!bitmap_empty_p (need_eh_cleanup
))
1410 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
1411 todo
|= TODO_cleanup_cfg
;
1414 BITMAP_FREE (need_eh_cleanup
);
1422 make_pass_dse (gcc::context
*ctxt
)
1424 return new pass_dse (ctxt
);