]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-dse.c
Correct a function pre/postcondition [PR102403].
[thirdparty/gcc.git] / gcc / tree-ssa-dse.c
1 /* Dead and redundant store elimination
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "gimple-iterator.h"
32 #include "tree-cfg.h"
33 #include "tree-dfa.h"
34 #include "tree-cfgcleanup.h"
35 #include "alias.h"
36 #include "tree-ssa-loop.h"
37 #include "tree-ssa-dse.h"
38 #include "builtins.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "tree-eh.h"
42 #include "cfganal.h"
43
44 /* This file implements dead store elimination.
45
46 A dead store is a store into a memory location which will later be
47 overwritten by another store without any intervening loads. In this
48 case the earlier store can be deleted or trimmed if the store
49 was partially dead.
50
51 A redundant store is a store into a memory location which stores
52 the exact same value as a prior store to the same memory location.
53 While this can often be handled by dead store elimination, removing
54 the redundant store is often better than removing or trimming the
55 dead store.
56
57 In our SSA + virtual operand world we use immediate uses of virtual
58 operands to detect these cases. If a store's virtual definition
59 is used precisely once by a later store to the same location which
60 post dominates the first store, then the first store is dead. If
61 the data stored is the same, then the second store is redundant.
62
63 The single use of the store's virtual definition ensures that
64 there are no intervening aliased loads and the requirement that
65 the second load post dominate the first ensures that if the earlier
66 store executes, then the later stores will execute before the function
67 exits.
68
69 It may help to think of this as first moving the earlier store to
70 the point immediately before the later store. Again, the single
71 use of the virtual definition and the post-dominance relationship
72 ensure that such movement would be safe. Clearly if there are
73 back to back stores, then the second is makes the first dead. If
74 the second store stores the same value, then the second store is
75 redundant.
76
77 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
78 may also help in understanding this code since it discusses the
79 relationship between dead store and redundant load elimination. In
80 fact, they are the same transformation applied to different views of
81 the CFG. */
82
83 static void delete_dead_or_redundant_call (gimple_stmt_iterator *, const char *);
84
85 /* Bitmap of blocks that have had EH statements cleaned. We should
86 remove their dead edges eventually. */
87 static bitmap need_eh_cleanup;
88
89 /* STMT is a statement that may write into memory. Analyze it and
90 initialize WRITE to describe how STMT affects memory.
91
92 Return TRUE if the statement was analyzed, FALSE otherwise.
93
94 It is always safe to return FALSE. But typically better optimziation
95 can be achieved by analyzing more statements. */
96
97 static bool
98 initialize_ao_ref_for_dse (gimple *stmt, ao_ref *write)
99 {
100 /* It's advantageous to handle certain mem* functions. */
101 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
102 {
103 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
104 {
105 case BUILT_IN_MEMCPY:
106 case BUILT_IN_MEMMOVE:
107 case BUILT_IN_MEMSET:
108 case BUILT_IN_MEMCPY_CHK:
109 case BUILT_IN_MEMMOVE_CHK:
110 case BUILT_IN_MEMSET_CHK:
111 case BUILT_IN_STRNCPY:
112 case BUILT_IN_STRNCPY_CHK:
113 {
114 tree size = gimple_call_arg (stmt, 2);
115 tree ptr = gimple_call_arg (stmt, 0);
116 ao_ref_init_from_ptr_and_size (write, ptr, size);
117 return true;
118 }
119
120 /* A calloc call can never be dead, but it can make
121 subsequent stores redundant if they store 0 into
122 the same memory locations. */
123 case BUILT_IN_CALLOC:
124 {
125 tree nelem = gimple_call_arg (stmt, 0);
126 tree selem = gimple_call_arg (stmt, 1);
127 tree lhs;
128 if (TREE_CODE (nelem) == INTEGER_CST
129 && TREE_CODE (selem) == INTEGER_CST
130 && (lhs = gimple_call_lhs (stmt)) != NULL_TREE)
131 {
132 tree size = fold_build2 (MULT_EXPR, TREE_TYPE (nelem),
133 nelem, selem);
134 ao_ref_init_from_ptr_and_size (write, lhs, size);
135 return true;
136 }
137 }
138
139 default:
140 break;
141 }
142 }
143 else if (tree lhs = gimple_get_lhs (stmt))
144 {
145 if (TREE_CODE (lhs) != SSA_NAME)
146 {
147 ao_ref_init (write, lhs);
148 return true;
149 }
150 }
151 return false;
152 }
153
154 /* Given REF from the alias oracle, return TRUE if it is a valid
155 memory reference for dead store elimination, false otherwise.
156
157 In particular, the reference must have a known base, known maximum
158 size, start at a byte offset and have a size that is one or more
159 bytes. */
160
161 static bool
162 valid_ao_ref_for_dse (ao_ref *ref)
163 {
164 return (ao_ref_base (ref)
165 && known_size_p (ref->max_size)
166 && maybe_ne (ref->size, 0)
167 && known_eq (ref->max_size, ref->size)
168 && known_ge (ref->offset, 0)
169 && multiple_p (ref->offset, BITS_PER_UNIT)
170 && multiple_p (ref->size, BITS_PER_UNIT));
171 }
172
173 /* Try to normalize COPY (an ao_ref) relative to REF. Essentially when we are
174 done COPY will only refer bytes found within REF. Return true if COPY
175 is known to intersect at least one byte of REF. */
176
177 static bool
178 normalize_ref (ao_ref *copy, ao_ref *ref)
179 {
180 if (!ordered_p (copy->offset, ref->offset))
181 return false;
182
183 /* If COPY starts before REF, then reset the beginning of
184 COPY to match REF and decrease the size of COPY by the
185 number of bytes removed from COPY. */
186 if (maybe_lt (copy->offset, ref->offset))
187 {
188 poly_int64 diff = ref->offset - copy->offset;
189 if (maybe_le (copy->size, diff))
190 return false;
191 copy->size -= diff;
192 copy->offset = ref->offset;
193 }
194
195 poly_int64 diff = copy->offset - ref->offset;
196 if (maybe_le (ref->size, diff))
197 return false;
198
199 /* If COPY extends beyond REF, chop off its size appropriately. */
200 poly_int64 limit = ref->size - diff;
201 if (!ordered_p (limit, copy->size))
202 return false;
203
204 if (maybe_gt (copy->size, limit))
205 copy->size = limit;
206 return true;
207 }
208
209 /* Clear any bytes written by STMT from the bitmap LIVE_BYTES. The base
210 address written by STMT must match the one found in REF, which must
211 have its base address previously initialized.
212
213 This routine must be conservative. If we don't know the offset or
214 actual size written, assume nothing was written. */
215
216 static void
217 clear_bytes_written_by (sbitmap live_bytes, gimple *stmt, ao_ref *ref)
218 {
219 ao_ref write;
220 if (!initialize_ao_ref_for_dse (stmt, &write))
221 return;
222
223 /* Verify we have the same base memory address, the write
224 has a known size and overlaps with REF. */
225 HOST_WIDE_INT start, size;
226 if (valid_ao_ref_for_dse (&write)
227 && operand_equal_p (write.base, ref->base, OEP_ADDRESS_OF)
228 && known_eq (write.size, write.max_size)
229 && normalize_ref (&write, ref)
230 && (write.offset - ref->offset).is_constant (&start)
231 && write.size.is_constant (&size))
232 bitmap_clear_range (live_bytes, start / BITS_PER_UNIT,
233 size / BITS_PER_UNIT);
234 }
235
236 /* REF is a memory write. Extract relevant information from it and
237 initialize the LIVE_BYTES bitmap. If successful, return TRUE.
238 Otherwise return FALSE. */
239
240 static bool
241 setup_live_bytes_from_ref (ao_ref *ref, sbitmap live_bytes)
242 {
243 HOST_WIDE_INT const_size;
244 if (valid_ao_ref_for_dse (ref)
245 && ref->size.is_constant (&const_size)
246 && (const_size / BITS_PER_UNIT
247 <= param_dse_max_object_size))
248 {
249 bitmap_clear (live_bytes);
250 bitmap_set_range (live_bytes, 0, const_size / BITS_PER_UNIT);
251 return true;
252 }
253 return false;
254 }
255
256 /* Compute the number of elements that we can trim from the head and
257 tail of ORIG resulting in a bitmap that is a superset of LIVE.
258
259 Store the number of elements trimmed from the head and tail in
260 TRIM_HEAD and TRIM_TAIL.
261
262 STMT is the statement being trimmed and is used for debugging dump
263 output only. */
264
265 static void
266 compute_trims (ao_ref *ref, sbitmap live, int *trim_head, int *trim_tail,
267 gimple *stmt)
268 {
269 /* We use sbitmaps biased such that ref->offset is bit zero and the bitmap
270 extends through ref->size. So we know that in the original bitmap
271 bits 0..ref->size were true. We don't actually need the bitmap, just
272 the REF to compute the trims. */
273
274 /* Now identify how much, if any of the tail we can chop off. */
275 HOST_WIDE_INT const_size;
276 int last_live = bitmap_last_set_bit (live);
277 if (ref->size.is_constant (&const_size))
278 {
279 int last_orig = (const_size / BITS_PER_UNIT) - 1;
280 /* We can leave inconvenient amounts on the tail as
281 residual handling in mem* and str* functions is usually
282 reasonably efficient. */
283 *trim_tail = last_orig - last_live;
284
285 /* But don't trim away out of bounds accesses, as this defeats
286 proper warnings.
287
288 We could have a type with no TYPE_SIZE_UNIT or we could have a VLA
289 where TYPE_SIZE_UNIT is not a constant. */
290 if (*trim_tail
291 && TYPE_SIZE_UNIT (TREE_TYPE (ref->base))
292 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (ref->base))) == INTEGER_CST
293 && compare_tree_int (TYPE_SIZE_UNIT (TREE_TYPE (ref->base)),
294 last_orig) <= 0)
295 *trim_tail = 0;
296 }
297 else
298 *trim_tail = 0;
299
300 /* Identify how much, if any of the head we can chop off. */
301 int first_orig = 0;
302 int first_live = bitmap_first_set_bit (live);
303 *trim_head = first_live - first_orig;
304
305 /* If more than a word remains, then make sure to keep the
306 starting point at least word aligned. */
307 if (last_live - first_live > UNITS_PER_WORD)
308 *trim_head &= ~(UNITS_PER_WORD - 1);
309
310 if ((*trim_head || *trim_tail)
311 && dump_file && (dump_flags & TDF_DETAILS))
312 {
313 fprintf (dump_file, " Trimming statement (head = %d, tail = %d): ",
314 *trim_head, *trim_tail);
315 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
316 fprintf (dump_file, "\n");
317 }
318 }
319
320 /* STMT initializes an object from COMPLEX_CST where one or more of the
321 bytes written may be dead stores. REF is a representation of the
322 memory written. LIVE is the bitmap of stores that are actually live.
323
324 Attempt to rewrite STMT so that only the real or imaginary part of
325 the object is actually stored. */
326
327 static void
328 maybe_trim_complex_store (ao_ref *ref, sbitmap live, gimple *stmt)
329 {
330 int trim_head, trim_tail;
331 compute_trims (ref, live, &trim_head, &trim_tail, stmt);
332
333 /* The amount of data trimmed from the head or tail must be at
334 least half the size of the object to ensure we're trimming
335 the entire real or imaginary half. By writing things this
336 way we avoid more O(n) bitmap operations. */
337 if (known_ge (trim_tail * 2 * BITS_PER_UNIT, ref->size))
338 {
339 /* TREE_REALPART is live */
340 tree x = TREE_REALPART (gimple_assign_rhs1 (stmt));
341 tree y = gimple_assign_lhs (stmt);
342 y = build1 (REALPART_EXPR, TREE_TYPE (x), y);
343 gimple_assign_set_lhs (stmt, y);
344 gimple_assign_set_rhs1 (stmt, x);
345 }
346 else if (known_ge (trim_head * 2 * BITS_PER_UNIT, ref->size))
347 {
348 /* TREE_IMAGPART is live */
349 tree x = TREE_IMAGPART (gimple_assign_rhs1 (stmt));
350 tree y = gimple_assign_lhs (stmt);
351 y = build1 (IMAGPART_EXPR, TREE_TYPE (x), y);
352 gimple_assign_set_lhs (stmt, y);
353 gimple_assign_set_rhs1 (stmt, x);
354 }
355
356 /* Other cases indicate parts of both the real and imag subobjects
357 are live. We do not try to optimize those cases. */
358 }
359
360 /* STMT initializes an object using a CONSTRUCTOR where one or more of the
361 bytes written are dead stores. ORIG is the bitmap of bytes stored by
362 STMT. LIVE is the bitmap of stores that are actually live.
363
364 Attempt to rewrite STMT so that only the real or imaginary part of
365 the object is actually stored.
366
367 The most common case for getting here is a CONSTRUCTOR with no elements
368 being used to zero initialize an object. We do not try to handle other
369 cases as those would force us to fully cover the object with the
370 CONSTRUCTOR node except for the components that are dead. */
371
372 static void
373 maybe_trim_constructor_store (ao_ref *ref, sbitmap live, gimple *stmt)
374 {
375 tree ctor = gimple_assign_rhs1 (stmt);
376
377 /* This is the only case we currently handle. It actually seems to
378 catch most cases of actual interest. */
379 gcc_assert (CONSTRUCTOR_NELTS (ctor) == 0);
380
381 int head_trim = 0;
382 int tail_trim = 0;
383 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
384
385 /* Now we want to replace the constructor initializer
386 with memset (object + head_trim, 0, size - head_trim - tail_trim). */
387 if (head_trim || tail_trim)
388 {
389 /* We want &lhs for the MEM_REF expression. */
390 tree lhs_addr = build_fold_addr_expr (gimple_assign_lhs (stmt));
391
392 if (! is_gimple_min_invariant (lhs_addr))
393 return;
394
395 /* The number of bytes for the new constructor. */
396 poly_int64 ref_bytes = exact_div (ref->size, BITS_PER_UNIT);
397 poly_int64 count = ref_bytes - head_trim - tail_trim;
398
399 /* And the new type for the CONSTRUCTOR. Essentially it's just
400 a char array large enough to cover the non-trimmed parts of
401 the original CONSTRUCTOR. Note we want explicit bounds here
402 so that we know how many bytes to clear when expanding the
403 CONSTRUCTOR. */
404 tree type = build_array_type_nelts (char_type_node, count);
405
406 /* Build a suitable alias type rather than using alias set zero
407 to avoid pessimizing. */
408 tree alias_type = reference_alias_ptr_type (gimple_assign_lhs (stmt));
409
410 /* Build a MEM_REF representing the whole accessed area, starting
411 at the first byte not trimmed. */
412 tree exp = fold_build2 (MEM_REF, type, lhs_addr,
413 build_int_cst (alias_type, head_trim));
414
415 /* Now update STMT with a new RHS and LHS. */
416 gimple_assign_set_lhs (stmt, exp);
417 gimple_assign_set_rhs1 (stmt, build_constructor (type, NULL));
418 }
419 }
420
421 /* STMT is a memcpy, memmove or memset. Decrement the number of bytes
422 copied/set by DECREMENT. */
423 static void
424 decrement_count (gimple *stmt, int decrement)
425 {
426 tree *countp = gimple_call_arg_ptr (stmt, 2);
427 gcc_assert (TREE_CODE (*countp) == INTEGER_CST);
428 *countp = wide_int_to_tree (TREE_TYPE (*countp), (TREE_INT_CST_LOW (*countp)
429 - decrement));
430 }
431
432 static void
433 increment_start_addr (gimple *stmt, tree *where, int increment)
434 {
435 if (tree lhs = gimple_call_lhs (stmt))
436 if (where == gimple_call_arg_ptr (stmt, 0))
437 {
438 gassign *newop = gimple_build_assign (lhs, unshare_expr (*where));
439 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
440 gsi_insert_after (&gsi, newop, GSI_SAME_STMT);
441 gimple_call_set_lhs (stmt, NULL_TREE);
442 update_stmt (stmt);
443 }
444
445 if (TREE_CODE (*where) == SSA_NAME)
446 {
447 tree tem = make_ssa_name (TREE_TYPE (*where));
448 gassign *newop
449 = gimple_build_assign (tem, POINTER_PLUS_EXPR, *where,
450 build_int_cst (sizetype, increment));
451 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
452 gsi_insert_before (&gsi, newop, GSI_SAME_STMT);
453 *where = tem;
454 update_stmt (stmt);
455 return;
456 }
457
458 *where = build_fold_addr_expr (fold_build2 (MEM_REF, char_type_node,
459 *where,
460 build_int_cst (ptr_type_node,
461 increment)));
462 }
463
464 /* STMT is builtin call that writes bytes in bitmap ORIG, some bytes are dead
465 (ORIG & ~NEW) and need not be stored. Try to rewrite STMT to reduce
466 the amount of data it actually writes.
467
468 Right now we only support trimming from the head or the tail of the
469 memory region. In theory we could split the mem* call, but it's
470 likely of marginal value. */
471
472 static void
473 maybe_trim_memstar_call (ao_ref *ref, sbitmap live, gimple *stmt)
474 {
475 int head_trim, tail_trim;
476 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
477 {
478 case BUILT_IN_STRNCPY:
479 case BUILT_IN_STRNCPY_CHK:
480 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
481 if (head_trim)
482 {
483 /* Head trimming of strncpy is only possible if we can
484 prove all bytes we would trim are non-zero (or we could
485 turn the strncpy into memset if there must be zero
486 among the head trimmed bytes). If we don't know anything
487 about those bytes, the presence or absence of '\0' bytes
488 in there will affect whether it acts for the non-trimmed
489 bytes as memset or memcpy/strncpy. */
490 c_strlen_data lendata = { };
491 int orig_head_trim = head_trim;
492 tree srcstr = gimple_call_arg (stmt, 1);
493 if (!get_range_strlen (srcstr, &lendata, /*eltsize=*/1)
494 || !tree_fits_uhwi_p (lendata.minlen))
495 head_trim = 0;
496 else if (tree_to_uhwi (lendata.minlen) < (unsigned) head_trim)
497 {
498 head_trim = tree_to_uhwi (lendata.minlen);
499 if ((orig_head_trim & (UNITS_PER_WORD - 1)) == 0)
500 head_trim &= ~(UNITS_PER_WORD - 1);
501 }
502 if (orig_head_trim != head_trim
503 && dump_file
504 && (dump_flags & TDF_DETAILS))
505 fprintf (dump_file,
506 " Adjusting strncpy trimming to (head = %d,"
507 " tail = %d)\n", head_trim, tail_trim);
508 }
509 goto do_memcpy;
510
511 case BUILT_IN_MEMCPY:
512 case BUILT_IN_MEMMOVE:
513 case BUILT_IN_MEMCPY_CHK:
514 case BUILT_IN_MEMMOVE_CHK:
515 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
516
517 do_memcpy:
518 /* Tail trimming is easy, we can just reduce the count. */
519 if (tail_trim)
520 decrement_count (stmt, tail_trim);
521
522 /* Head trimming requires adjusting all the arguments. */
523 if (head_trim)
524 {
525 /* For __*_chk need to adjust also the last argument. */
526 if (gimple_call_num_args (stmt) == 4)
527 {
528 tree size = gimple_call_arg (stmt, 3);
529 if (!tree_fits_uhwi_p (size))
530 break;
531 if (!integer_all_onesp (size))
532 {
533 unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
534 if (sz < (unsigned) head_trim)
535 break;
536 tree arg = wide_int_to_tree (TREE_TYPE (size),
537 sz - head_trim);
538 gimple_call_set_arg (stmt, 3, arg);
539 }
540 }
541 tree *dst = gimple_call_arg_ptr (stmt, 0);
542 increment_start_addr (stmt, dst, head_trim);
543 tree *src = gimple_call_arg_ptr (stmt, 1);
544 increment_start_addr (stmt, src, head_trim);
545 decrement_count (stmt, head_trim);
546 }
547 break;
548
549 case BUILT_IN_MEMSET:
550 case BUILT_IN_MEMSET_CHK:
551 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
552
553 /* Tail trimming is easy, we can just reduce the count. */
554 if (tail_trim)
555 decrement_count (stmt, tail_trim);
556
557 /* Head trimming requires adjusting all the arguments. */
558 if (head_trim)
559 {
560 /* For __*_chk need to adjust also the last argument. */
561 if (gimple_call_num_args (stmt) == 4)
562 {
563 tree size = gimple_call_arg (stmt, 3);
564 if (!tree_fits_uhwi_p (size))
565 break;
566 if (!integer_all_onesp (size))
567 {
568 unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
569 if (sz < (unsigned) head_trim)
570 break;
571 tree arg = wide_int_to_tree (TREE_TYPE (size),
572 sz - head_trim);
573 gimple_call_set_arg (stmt, 3, arg);
574 }
575 }
576 tree *dst = gimple_call_arg_ptr (stmt, 0);
577 increment_start_addr (stmt, dst, head_trim);
578 decrement_count (stmt, head_trim);
579 }
580 break;
581
582 default:
583 break;
584 }
585 }
586
587 /* STMT is a memory write where one or more bytes written are dead
588 stores. ORIG is the bitmap of bytes stored by STMT. LIVE is the
589 bitmap of stores that are actually live.
590
591 Attempt to rewrite STMT so that it writes fewer memory locations. Right
592 now we only support trimming at the start or end of the memory region.
593 It's not clear how much there is to be gained by trimming from the middle
594 of the region. */
595
596 static void
597 maybe_trim_partially_dead_store (ao_ref *ref, sbitmap live, gimple *stmt)
598 {
599 if (is_gimple_assign (stmt)
600 && TREE_CODE (gimple_assign_lhs (stmt)) != TARGET_MEM_REF)
601 {
602 switch (gimple_assign_rhs_code (stmt))
603 {
604 case CONSTRUCTOR:
605 maybe_trim_constructor_store (ref, live, stmt);
606 break;
607 case COMPLEX_CST:
608 maybe_trim_complex_store (ref, live, stmt);
609 break;
610 default:
611 break;
612 }
613 }
614 }
615
616 /* Return TRUE if USE_REF reads bytes from LIVE where live is
617 derived from REF, a write reference.
618
619 While this routine may modify USE_REF, it's passed by value, not
620 location. So callers do not see those modifications. */
621
622 static bool
623 live_bytes_read (ao_ref use_ref, ao_ref *ref, sbitmap live)
624 {
625 /* We have already verified that USE_REF and REF hit the same object.
626 Now verify that there's actually an overlap between USE_REF and REF. */
627 HOST_WIDE_INT start, size;
628 if (normalize_ref (&use_ref, ref)
629 && (use_ref.offset - ref->offset).is_constant (&start)
630 && use_ref.size.is_constant (&size))
631 {
632 /* If USE_REF covers all of REF, then it will hit one or more
633 live bytes. This avoids useless iteration over the bitmap
634 below. */
635 if (start == 0 && known_eq (size, ref->size))
636 return true;
637
638 /* Now check if any of the remaining bits in use_ref are set in LIVE. */
639 return bitmap_bit_in_range_p (live, start / BITS_PER_UNIT,
640 (start + size - 1) / BITS_PER_UNIT);
641 }
642 return true;
643 }
644
645 /* Callback for dse_classify_store calling for_each_index. Verify that
646 indices are invariant in the loop with backedge PHI in basic-block DATA. */
647
648 static bool
649 check_name (tree, tree *idx, void *data)
650 {
651 basic_block phi_bb = (basic_block) data;
652 if (TREE_CODE (*idx) == SSA_NAME
653 && !SSA_NAME_IS_DEFAULT_DEF (*idx)
654 && dominated_by_p (CDI_DOMINATORS, gimple_bb (SSA_NAME_DEF_STMT (*idx)),
655 phi_bb))
656 return false;
657 return true;
658 }
659
660 /* STMT stores the value 0 into one or more memory locations
661 (via memset, empty constructor, calloc call, etc).
662
663 See if there is a subsequent store of the value 0 to one
664 or more of the same memory location(s). If so, the subsequent
665 store is redundant and can be removed.
666
667 The subsequent stores could be via memset, empty constructors,
668 simple MEM stores, etc. */
669
670 static void
671 dse_optimize_redundant_stores (gimple *stmt)
672 {
673 int cnt = 0;
674
675 /* TBAA state of STMT, if it is a call it is effectively alias-set zero. */
676 alias_set_type earlier_set = 0;
677 alias_set_type earlier_base_set = 0;
678 if (is_gimple_assign (stmt))
679 {
680 ao_ref lhs_ref;
681 ao_ref_init (&lhs_ref, gimple_assign_lhs (stmt));
682 earlier_set = ao_ref_alias_set (&lhs_ref);
683 earlier_base_set = ao_ref_base_alias_set (&lhs_ref);
684 }
685
686 /* We could do something fairly complex and look through PHIs
687 like DSE_CLASSIFY_STORE, but it doesn't seem to be worth
688 the effort.
689
690 Look at all the immediate uses of the VDEF (which are obviously
691 dominated by STMT). See if one or more stores 0 into the same
692 memory locations a STMT, if so remove the immediate use statements. */
693 tree defvar = gimple_vdef (stmt);
694 imm_use_iterator ui;
695 gimple *use_stmt;
696 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
697 {
698 /* Limit stmt walking. */
699 if (++cnt > param_dse_max_alias_queries_per_store)
700 break;
701
702 /* If USE_STMT stores 0 into one or more of the same locations
703 as STMT and STMT would kill USE_STMT, then we can just remove
704 USE_STMT. */
705 tree fndecl;
706 if ((is_gimple_assign (use_stmt)
707 && gimple_vdef (use_stmt)
708 && (gimple_assign_single_p (use_stmt)
709 && initializer_zerop (gimple_assign_rhs1 (use_stmt))))
710 || (gimple_call_builtin_p (use_stmt, BUILT_IN_NORMAL)
711 && (fndecl = gimple_call_fndecl (use_stmt)) != NULL
712 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
713 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
714 && integer_zerop (gimple_call_arg (use_stmt, 1))))
715 {
716 ao_ref write;
717
718 if (!initialize_ao_ref_for_dse (use_stmt, &write))
719 break;
720
721 if (valid_ao_ref_for_dse (&write)
722 && stmt_kills_ref_p (stmt, &write))
723 {
724 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
725 if (is_gimple_assign (use_stmt))
726 {
727 ao_ref lhs_ref;
728 ao_ref_init (&lhs_ref, gimple_assign_lhs (use_stmt));
729 if ((earlier_set == ao_ref_alias_set (&lhs_ref)
730 || alias_set_subset_of (ao_ref_alias_set (&lhs_ref),
731 earlier_set))
732 && (earlier_base_set == ao_ref_base_alias_set (&lhs_ref)
733 || alias_set_subset_of
734 (ao_ref_base_alias_set (&lhs_ref),
735 earlier_base_set)))
736 delete_dead_or_redundant_assignment (&gsi, "redundant",
737 need_eh_cleanup);
738 }
739 else if (is_gimple_call (use_stmt))
740 {
741 if ((earlier_set == 0
742 || alias_set_subset_of (0, earlier_set))
743 && (earlier_base_set == 0
744 || alias_set_subset_of (0, earlier_base_set)))
745 delete_dead_or_redundant_call (&gsi, "redundant");
746 }
747 else
748 gcc_unreachable ();
749 }
750 }
751 }
752 }
753
754 /* A helper of dse_optimize_stmt.
755 Given a GIMPLE_ASSIGN in STMT that writes to REF, classify it
756 according to downstream uses and defs. Sets *BY_CLOBBER_P to true
757 if only clobber statements influenced the classification result.
758 Returns the classification. */
759
760 dse_store_status
761 dse_classify_store (ao_ref *ref, gimple *stmt,
762 bool byte_tracking_enabled, sbitmap live_bytes,
763 bool *by_clobber_p, tree stop_at_vuse)
764 {
765 gimple *temp;
766 int cnt = 0;
767 auto_bitmap visited;
768
769 if (by_clobber_p)
770 *by_clobber_p = true;
771
772 /* Find the first dominated statement that clobbers (part of) the
773 memory stmt stores to with no intermediate statement that may use
774 part of the memory stmt stores. That is, find a store that may
775 prove stmt to be a dead store. */
776 temp = stmt;
777 do
778 {
779 gimple *use_stmt;
780 imm_use_iterator ui;
781 bool fail = false;
782 tree defvar;
783
784 if (gimple_code (temp) == GIMPLE_PHI)
785 {
786 /* If we visit this PHI by following a backedge then we have to
787 make sure ref->ref only refers to SSA names that are invariant
788 with respect to the loop represented by this PHI node. */
789 if (dominated_by_p (CDI_DOMINATORS, gimple_bb (stmt),
790 gimple_bb (temp))
791 && !for_each_index (ref->ref ? &ref->ref : &ref->base,
792 check_name, gimple_bb (temp)))
793 return DSE_STORE_LIVE;
794 defvar = PHI_RESULT (temp);
795 bitmap_set_bit (visited, SSA_NAME_VERSION (defvar));
796 }
797 else
798 defvar = gimple_vdef (temp);
799
800 /* If we're instructed to stop walking at region boundary, do so. */
801 if (defvar == stop_at_vuse)
802 return DSE_STORE_LIVE;
803
804 auto_vec<gimple *, 10> defs;
805 gimple *first_phi_def = NULL;
806 gimple *last_phi_def = NULL;
807 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
808 {
809 /* Limit stmt walking. */
810 if (++cnt > param_dse_max_alias_queries_per_store)
811 {
812 fail = true;
813 break;
814 }
815
816 /* In simple cases we can look through PHI nodes, but we
817 have to be careful with loops and with memory references
818 containing operands that are also operands of PHI nodes.
819 See gcc.c-torture/execute/20051110-*.c. */
820 if (gimple_code (use_stmt) == GIMPLE_PHI)
821 {
822 /* If we already visited this PHI ignore it for further
823 processing. */
824 if (!bitmap_bit_p (visited,
825 SSA_NAME_VERSION (PHI_RESULT (use_stmt))))
826 {
827 defs.safe_push (use_stmt);
828 if (!first_phi_def)
829 first_phi_def = use_stmt;
830 last_phi_def = use_stmt;
831 }
832 }
833 /* If the statement is a use the store is not dead. */
834 else if (ref_maybe_used_by_stmt_p (use_stmt, ref))
835 {
836 /* Handle common cases where we can easily build an ao_ref
837 structure for USE_STMT and in doing so we find that the
838 references hit non-live bytes and thus can be ignored. */
839 if (byte_tracking_enabled
840 && is_gimple_assign (use_stmt))
841 {
842 ao_ref use_ref;
843 ao_ref_init (&use_ref, gimple_assign_rhs1 (use_stmt));
844 if (valid_ao_ref_for_dse (&use_ref)
845 && use_ref.base == ref->base
846 && known_eq (use_ref.size, use_ref.max_size)
847 && !live_bytes_read (use_ref, ref, live_bytes))
848 {
849 /* If this is a store, remember it as we possibly
850 need to walk the defs uses. */
851 if (gimple_vdef (use_stmt))
852 defs.safe_push (use_stmt);
853 continue;
854 }
855 }
856
857 fail = true;
858 break;
859 }
860 /* We have visited ourselves already so ignore STMT for the
861 purpose of chaining. */
862 else if (use_stmt == stmt)
863 ;
864 /* If this is a store, remember it as we possibly need to walk the
865 defs uses. */
866 else if (gimple_vdef (use_stmt))
867 defs.safe_push (use_stmt);
868 }
869
870 if (fail)
871 {
872 /* STMT might be partially dead and we may be able to reduce
873 how many memory locations it stores into. */
874 if (byte_tracking_enabled && !gimple_clobber_p (stmt))
875 return DSE_STORE_MAYBE_PARTIAL_DEAD;
876 return DSE_STORE_LIVE;
877 }
878
879 /* If we didn't find any definition this means the store is dead
880 if it isn't a store to global reachable memory. In this case
881 just pretend the stmt makes itself dead. Otherwise fail. */
882 if (defs.is_empty ())
883 {
884 if (ref_may_alias_global_p (ref))
885 return DSE_STORE_LIVE;
886
887 if (by_clobber_p)
888 *by_clobber_p = false;
889 return DSE_STORE_DEAD;
890 }
891
892 /* Process defs and remove those we need not process further. */
893 for (unsigned i = 0; i < defs.length ();)
894 {
895 gimple *def = defs[i];
896 gimple *use_stmt;
897 use_operand_p use_p;
898 tree vdef = (gimple_code (def) == GIMPLE_PHI
899 ? gimple_phi_result (def) : gimple_vdef (def));
900 /* If the path to check starts with a kill we do not need to
901 process it further.
902 ??? With byte tracking we need only kill the bytes currently
903 live. */
904 if (stmt_kills_ref_p (def, ref))
905 {
906 if (by_clobber_p && !gimple_clobber_p (def))
907 *by_clobber_p = false;
908 defs.unordered_remove (i);
909 }
910 /* If the path ends here we do not need to process it further.
911 This for example happens with calls to noreturn functions. */
912 else if (has_zero_uses (vdef))
913 {
914 /* But if the store is to global memory it is definitely
915 not dead. */
916 if (ref_may_alias_global_p (ref))
917 return DSE_STORE_LIVE;
918 defs.unordered_remove (i);
919 }
920 /* In addition to kills we can remove defs whose only use
921 is another def in defs. That can only ever be PHIs of which
922 we track two for simplicity reasons, the first and last in
923 {first,last}_phi_def (we fail for multiple PHIs anyways).
924 We can also ignore defs that feed only into
925 already visited PHIs. */
926 else if (single_imm_use (vdef, &use_p, &use_stmt)
927 && (use_stmt == first_phi_def
928 || use_stmt == last_phi_def
929 || (gimple_code (use_stmt) == GIMPLE_PHI
930 && bitmap_bit_p (visited,
931 SSA_NAME_VERSION
932 (PHI_RESULT (use_stmt))))))
933 defs.unordered_remove (i);
934 else
935 ++i;
936 }
937
938 /* If all defs kill the ref we are done. */
939 if (defs.is_empty ())
940 return DSE_STORE_DEAD;
941 /* If more than one def survives fail. */
942 if (defs.length () > 1)
943 {
944 /* STMT might be partially dead and we may be able to reduce
945 how many memory locations it stores into. */
946 if (byte_tracking_enabled && !gimple_clobber_p (stmt))
947 return DSE_STORE_MAYBE_PARTIAL_DEAD;
948 return DSE_STORE_LIVE;
949 }
950 temp = defs[0];
951
952 /* Track partial kills. */
953 if (byte_tracking_enabled)
954 {
955 clear_bytes_written_by (live_bytes, temp, ref);
956 if (bitmap_empty_p (live_bytes))
957 {
958 if (by_clobber_p && !gimple_clobber_p (temp))
959 *by_clobber_p = false;
960 return DSE_STORE_DEAD;
961 }
962 }
963 }
964 /* Continue walking until there are no more live bytes. */
965 while (1);
966 }
967
968
969 /* Delete a dead call at GSI, which is mem* call of some kind. */
970 static void
971 delete_dead_or_redundant_call (gimple_stmt_iterator *gsi, const char *type)
972 {
973 gimple *stmt = gsi_stmt (*gsi);
974 if (dump_file && (dump_flags & TDF_DETAILS))
975 {
976 fprintf (dump_file, " Deleted %s call: ", type);
977 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
978 fprintf (dump_file, "\n");
979 }
980
981 tree lhs = gimple_call_lhs (stmt);
982 if (lhs)
983 {
984 tree ptr = gimple_call_arg (stmt, 0);
985 gimple *new_stmt = gimple_build_assign (lhs, ptr);
986 unlink_stmt_vdef (stmt);
987 if (gsi_replace (gsi, new_stmt, true))
988 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
989 }
990 else
991 {
992 /* Then we need to fix the operand of the consuming stmt. */
993 unlink_stmt_vdef (stmt);
994
995 /* Remove the dead store. */
996 if (gsi_remove (gsi, true))
997 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
998 release_defs (stmt);
999 }
1000 }
1001
1002 /* Delete a dead store at GSI, which is a gimple assignment. */
1003
1004 void
1005 delete_dead_or_redundant_assignment (gimple_stmt_iterator *gsi, const char *type,
1006 bitmap need_eh_cleanup)
1007 {
1008 gimple *stmt = gsi_stmt (*gsi);
1009 if (dump_file && (dump_flags & TDF_DETAILS))
1010 {
1011 fprintf (dump_file, " Deleted %s store: ", type);
1012 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1013 fprintf (dump_file, "\n");
1014 }
1015
1016 /* Then we need to fix the operand of the consuming stmt. */
1017 unlink_stmt_vdef (stmt);
1018
1019 /* Remove the dead store. */
1020 basic_block bb = gimple_bb (stmt);
1021 if (gsi_remove (gsi, true) && need_eh_cleanup)
1022 bitmap_set_bit (need_eh_cleanup, bb->index);
1023
1024 /* And release any SSA_NAMEs set in this statement back to the
1025 SSA_NAME manager. */
1026 release_defs (stmt);
1027 }
1028
1029 /* Attempt to eliminate dead stores in the statement referenced by BSI.
1030
1031 A dead store is a store into a memory location which will later be
1032 overwritten by another store without any intervening loads. In this
1033 case the earlier store can be deleted.
1034
1035 In our SSA + virtual operand world we use immediate uses of virtual
1036 operands to detect dead stores. If a store's virtual definition
1037 is used precisely once by a later store to the same location which
1038 post dominates the first store, then the first store is dead. */
1039
1040 static void
1041 dse_optimize_stmt (function *fun, gimple_stmt_iterator *gsi, sbitmap live_bytes)
1042 {
1043 gimple *stmt = gsi_stmt (*gsi);
1044
1045 /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
1046 if (gimple_has_volatile_ops (stmt)
1047 && (!gimple_clobber_p (stmt)
1048 || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
1049 return;
1050
1051 ao_ref ref;
1052 if (!initialize_ao_ref_for_dse (stmt, &ref))
1053 return;
1054
1055 /* We know we have virtual definitions. We can handle assignments and
1056 some builtin calls. */
1057 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1058 {
1059 tree fndecl = gimple_call_fndecl (stmt);
1060 switch (DECL_FUNCTION_CODE (fndecl))
1061 {
1062 case BUILT_IN_MEMCPY:
1063 case BUILT_IN_MEMMOVE:
1064 case BUILT_IN_STRNCPY:
1065 case BUILT_IN_MEMSET:
1066 case BUILT_IN_MEMCPY_CHK:
1067 case BUILT_IN_MEMMOVE_CHK:
1068 case BUILT_IN_STRNCPY_CHK:
1069 case BUILT_IN_MEMSET_CHK:
1070 {
1071 /* Occasionally calls with an explicit length of zero
1072 show up in the IL. It's pointless to do analysis
1073 on them, they're trivially dead. */
1074 tree size = gimple_call_arg (stmt, 2);
1075 if (integer_zerop (size))
1076 {
1077 delete_dead_or_redundant_call (gsi, "dead");
1078 return;
1079 }
1080
1081 /* If this is a memset call that initializes an object
1082 to zero, it may be redundant with an earlier memset
1083 or empty CONSTRUCTOR of a larger object. */
1084 if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1085 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
1086 && integer_zerop (gimple_call_arg (stmt, 1)))
1087 dse_optimize_redundant_stores (stmt);
1088
1089 enum dse_store_status store_status;
1090 bool byte_tracking_enabled
1091 = setup_live_bytes_from_ref (&ref, live_bytes);
1092 store_status = dse_classify_store (&ref, stmt,
1093 byte_tracking_enabled,
1094 live_bytes);
1095 if (store_status == DSE_STORE_LIVE)
1096 return;
1097
1098 if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1099 {
1100 maybe_trim_memstar_call (&ref, live_bytes, stmt);
1101 return;
1102 }
1103
1104 if (store_status == DSE_STORE_DEAD)
1105 delete_dead_or_redundant_call (gsi, "dead");
1106 return;
1107 }
1108
1109 case BUILT_IN_CALLOC:
1110 /* We already know the arguments are integer constants. */
1111 dse_optimize_redundant_stores (stmt);
1112 return;
1113
1114 default:
1115 return;
1116 }
1117 }
1118
1119 bool by_clobber_p = false;
1120
1121 /* Check if this statement stores zero to a memory location,
1122 and if there is a subsequent store of zero to the same
1123 memory location. If so, remove the subsequent store. */
1124 if (gimple_assign_single_p (stmt)
1125 && initializer_zerop (gimple_assign_rhs1 (stmt)))
1126 dse_optimize_redundant_stores (stmt);
1127
1128 /* Self-assignments are zombies. */
1129 if (is_gimple_assign (stmt)
1130 && operand_equal_p (gimple_assign_rhs1 (stmt),
1131 gimple_assign_lhs (stmt), 0))
1132 ;
1133 else
1134 {
1135 bool byte_tracking_enabled
1136 = setup_live_bytes_from_ref (&ref, live_bytes);
1137 enum dse_store_status store_status;
1138 store_status = dse_classify_store (&ref, stmt,
1139 byte_tracking_enabled,
1140 live_bytes, &by_clobber_p);
1141 if (store_status == DSE_STORE_LIVE)
1142 return;
1143
1144 if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1145 {
1146 maybe_trim_partially_dead_store (&ref, live_bytes, stmt);
1147 return;
1148 }
1149 }
1150
1151 /* Now we know that use_stmt kills the LHS of stmt. */
1152
1153 /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
1154 another clobber stmt. */
1155 if (gimple_clobber_p (stmt)
1156 && !by_clobber_p)
1157 return;
1158
1159 if (is_gimple_call (stmt)
1160 && (gimple_has_side_effects (stmt)
1161 || (stmt_could_throw_p (fun, stmt)
1162 && !fun->can_delete_dead_exceptions)))
1163 {
1164 /* Make sure we do not remove a return slot we cannot reconstruct
1165 later. */
1166 if (gimple_call_return_slot_opt_p (as_a <gcall *>(stmt))
1167 && (TREE_ADDRESSABLE (TREE_TYPE (gimple_call_fntype (stmt)))
1168 || !poly_int_tree_p
1169 (TYPE_SIZE (TREE_TYPE (gimple_call_fntype (stmt))))))
1170 return;
1171 if (dump_file && (dump_flags & TDF_DETAILS))
1172 {
1173 fprintf (dump_file, " Deleted dead store in call LHS: ");
1174 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1175 fprintf (dump_file, "\n");
1176 }
1177 gimple_call_set_lhs (stmt, NULL_TREE);
1178 update_stmt (stmt);
1179 }
1180 else
1181 delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup);
1182 }
1183
1184 namespace {
1185
1186 const pass_data pass_data_dse =
1187 {
1188 GIMPLE_PASS, /* type */
1189 "dse", /* name */
1190 OPTGROUP_NONE, /* optinfo_flags */
1191 TV_TREE_DSE, /* tv_id */
1192 ( PROP_cfg | PROP_ssa ), /* properties_required */
1193 0, /* properties_provided */
1194 0, /* properties_destroyed */
1195 0, /* todo_flags_start */
1196 0, /* todo_flags_finish */
1197 };
1198
1199 class pass_dse : public gimple_opt_pass
1200 {
1201 public:
1202 pass_dse (gcc::context *ctxt)
1203 : gimple_opt_pass (pass_data_dse, ctxt)
1204 {}
1205
1206 /* opt_pass methods: */
1207 opt_pass * clone () { return new pass_dse (m_ctxt); }
1208 virtual bool gate (function *) { return flag_tree_dse != 0; }
1209 virtual unsigned int execute (function *);
1210
1211 }; // class pass_dse
1212
1213 unsigned int
1214 pass_dse::execute (function *fun)
1215 {
1216 unsigned todo = 0;
1217 need_eh_cleanup = BITMAP_ALLOC (NULL);
1218 auto_sbitmap live_bytes (param_dse_max_object_size);
1219
1220 renumber_gimple_stmt_uids (fun);
1221
1222 calculate_dominance_info (CDI_DOMINATORS);
1223
1224 /* Dead store elimination is fundamentally a reverse program order walk. */
1225 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS);
1226 int n = pre_and_rev_post_order_compute_fn (fun, NULL, rpo, false);
1227 for (int i = n; i != 0; --i)
1228 {
1229 basic_block bb = BASIC_BLOCK_FOR_FN (fun, rpo[i-1]);
1230 gimple_stmt_iterator gsi;
1231
1232 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
1233 {
1234 gimple *stmt = gsi_stmt (gsi);
1235
1236 if (gimple_vdef (stmt))
1237 dse_optimize_stmt (fun, &gsi, live_bytes);
1238 else if (def_operand_p
1239 def_p = single_ssa_def_operand (stmt, SSA_OP_DEF))
1240 {
1241 /* When we remove dead stores make sure to also delete trivially
1242 dead SSA defs. */
1243 if (has_zero_uses (DEF_FROM_PTR (def_p))
1244 && !gimple_has_side_effects (stmt)
1245 && !is_ctrl_altering_stmt (stmt)
1246 && (!stmt_could_throw_p (fun, stmt)
1247 || fun->can_delete_dead_exceptions))
1248 {
1249 if (dump_file && (dump_flags & TDF_DETAILS))
1250 {
1251 fprintf (dump_file, " Deleted trivially dead stmt: ");
1252 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1253 fprintf (dump_file, "\n");
1254 }
1255 if (gsi_remove (&gsi, true) && need_eh_cleanup)
1256 bitmap_set_bit (need_eh_cleanup, bb->index);
1257 release_defs (stmt);
1258 }
1259 }
1260 if (gsi_end_p (gsi))
1261 gsi = gsi_last_bb (bb);
1262 else
1263 gsi_prev (&gsi);
1264 }
1265 bool removed_phi = false;
1266 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);)
1267 {
1268 gphi *phi = si.phi ();
1269 if (has_zero_uses (gimple_phi_result (phi)))
1270 {
1271 if (dump_file && (dump_flags & TDF_DETAILS))
1272 {
1273 fprintf (dump_file, " Deleted trivially dead PHI: ");
1274 print_gimple_stmt (dump_file, phi, 0, dump_flags);
1275 fprintf (dump_file, "\n");
1276 }
1277 remove_phi_node (&si, true);
1278 removed_phi = true;
1279 }
1280 else
1281 gsi_next (&si);
1282 }
1283 if (removed_phi && gimple_seq_empty_p (phi_nodes (bb)))
1284 todo |= TODO_cleanup_cfg;
1285 }
1286 free (rpo);
1287
1288 /* Removal of stores may make some EH edges dead. Purge such edges from
1289 the CFG as needed. */
1290 if (!bitmap_empty_p (need_eh_cleanup))
1291 {
1292 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
1293 todo |= TODO_cleanup_cfg;
1294 }
1295
1296 BITMAP_FREE (need_eh_cleanup);
1297
1298 return todo;
1299 }
1300
1301 } // anon namespace
1302
1303 gimple_opt_pass *
1304 make_pass_dse (gcc::context *ctxt)
1305 {
1306 return new pass_dse (ctxt);
1307 }