]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-dse.c
x86: Remove "%!" before ret
[thirdparty/gcc.git] / gcc / tree-ssa-dse.c
1 /* Dead and redundant store elimination
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "gimple-iterator.h"
32 #include "tree-cfg.h"
33 #include "tree-dfa.h"
34 #include "tree-cfgcleanup.h"
35 #include "alias.h"
36 #include "tree-ssa-loop.h"
37 #include "tree-ssa-dse.h"
38 #include "builtins.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "tree-eh.h"
42 #include "cfganal.h"
43 #include "cgraph.h"
44 #include "ipa-modref-tree.h"
45 #include "ipa-modref.h"
46
47 /* This file implements dead store elimination.
48
49 A dead store is a store into a memory location which will later be
50 overwritten by another store without any intervening loads. In this
51 case the earlier store can be deleted or trimmed if the store
52 was partially dead.
53
54 A redundant store is a store into a memory location which stores
55 the exact same value as a prior store to the same memory location.
56 While this can often be handled by dead store elimination, removing
57 the redundant store is often better than removing or trimming the
58 dead store.
59
60 In our SSA + virtual operand world we use immediate uses of virtual
61 operands to detect these cases. If a store's virtual definition
62 is used precisely once by a later store to the same location which
63 post dominates the first store, then the first store is dead. If
64 the data stored is the same, then the second store is redundant.
65
66 The single use of the store's virtual definition ensures that
67 there are no intervening aliased loads and the requirement that
68 the second load post dominate the first ensures that if the earlier
69 store executes, then the later stores will execute before the function
70 exits.
71
72 It may help to think of this as first moving the earlier store to
73 the point immediately before the later store. Again, the single
74 use of the virtual definition and the post-dominance relationship
75 ensure that such movement would be safe. Clearly if there are
76 back to back stores, then the second is makes the first dead. If
77 the second store stores the same value, then the second store is
78 redundant.
79
80 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
81 may also help in understanding this code since it discusses the
82 relationship between dead store and redundant load elimination. In
83 fact, they are the same transformation applied to different views of
84 the CFG. */
85
86 static void delete_dead_or_redundant_call (gimple_stmt_iterator *, const char *);
87
88 /* Bitmap of blocks that have had EH statements cleaned. We should
89 remove their dead edges eventually. */
90 static bitmap need_eh_cleanup;
91
92 /* STMT is a statement that may write into memory. Analyze it and
93 initialize WRITE to describe how STMT affects memory.
94
95 Return TRUE if the statement was analyzed, FALSE otherwise.
96
97 It is always safe to return FALSE. But typically better optimziation
98 can be achieved by analyzing more statements. */
99
100 static bool
101 initialize_ao_ref_for_dse (gimple *stmt, ao_ref *write)
102 {
103 /* It's advantageous to handle certain mem* functions. */
104 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
105 {
106 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
107 {
108 case BUILT_IN_MEMCPY:
109 case BUILT_IN_MEMMOVE:
110 case BUILT_IN_MEMSET:
111 case BUILT_IN_MEMCPY_CHK:
112 case BUILT_IN_MEMMOVE_CHK:
113 case BUILT_IN_MEMSET_CHK:
114 case BUILT_IN_STRNCPY:
115 case BUILT_IN_STRNCPY_CHK:
116 {
117 tree size = gimple_call_arg (stmt, 2);
118 tree ptr = gimple_call_arg (stmt, 0);
119 ao_ref_init_from_ptr_and_size (write, ptr, size);
120 return true;
121 }
122
123 /* A calloc call can never be dead, but it can make
124 subsequent stores redundant if they store 0 into
125 the same memory locations. */
126 case BUILT_IN_CALLOC:
127 {
128 tree nelem = gimple_call_arg (stmt, 0);
129 tree selem = gimple_call_arg (stmt, 1);
130 tree lhs;
131 if (TREE_CODE (nelem) == INTEGER_CST
132 && TREE_CODE (selem) == INTEGER_CST
133 && (lhs = gimple_call_lhs (stmt)) != NULL_TREE)
134 {
135 tree size = fold_build2 (MULT_EXPR, TREE_TYPE (nelem),
136 nelem, selem);
137 ao_ref_init_from_ptr_and_size (write, lhs, size);
138 return true;
139 }
140 }
141
142 default:
143 break;
144 }
145 }
146 else if (tree lhs = gimple_get_lhs (stmt))
147 {
148 if (TREE_CODE (lhs) != SSA_NAME)
149 {
150 ao_ref_init (write, lhs);
151 return true;
152 }
153 }
154 return false;
155 }
156
157 /* Given REF from the alias oracle, return TRUE if it is a valid
158 memory reference for dead store elimination, false otherwise.
159
160 In particular, the reference must have a known base, known maximum
161 size, start at a byte offset and have a size that is one or more
162 bytes. */
163
164 static bool
165 valid_ao_ref_for_dse (ao_ref *ref)
166 {
167 return (ao_ref_base (ref)
168 && known_size_p (ref->max_size)
169 && maybe_ne (ref->size, 0)
170 && known_eq (ref->max_size, ref->size)
171 && known_ge (ref->offset, 0)
172 && multiple_p (ref->offset, BITS_PER_UNIT)
173 && multiple_p (ref->size, BITS_PER_UNIT));
174 }
175
176 /* Try to normalize COPY (an ao_ref) relative to REF. Essentially when we are
177 done COPY will only refer bytes found within REF. Return true if COPY
178 is known to intersect at least one byte of REF. */
179
180 static bool
181 normalize_ref (ao_ref *copy, ao_ref *ref)
182 {
183 if (!ordered_p (copy->offset, ref->offset))
184 return false;
185
186 /* If COPY starts before REF, then reset the beginning of
187 COPY to match REF and decrease the size of COPY by the
188 number of bytes removed from COPY. */
189 if (maybe_lt (copy->offset, ref->offset))
190 {
191 poly_int64 diff = ref->offset - copy->offset;
192 if (maybe_le (copy->size, diff))
193 return false;
194 copy->size -= diff;
195 copy->offset = ref->offset;
196 }
197
198 poly_int64 diff = copy->offset - ref->offset;
199 if (maybe_le (ref->size, diff))
200 return false;
201
202 /* If COPY extends beyond REF, chop off its size appropriately. */
203 poly_int64 limit = ref->size - diff;
204 if (!ordered_p (limit, copy->size))
205 return false;
206
207 if (maybe_gt (copy->size, limit))
208 copy->size = limit;
209 return true;
210 }
211
212 /* Update LIVE_BYTES tracking REF for write to WRITE:
213 Verify we have the same base memory address, the write
214 has a known size and overlaps with REF. */
215 static void
216 clear_live_bytes_for_ref (sbitmap live_bytes, ao_ref *ref, ao_ref write)
217 {
218 HOST_WIDE_INT start, size;
219
220 if (valid_ao_ref_for_dse (&write)
221 && operand_equal_p (write.base, ref->base, OEP_ADDRESS_OF)
222 && known_eq (write.size, write.max_size)
223 /* normalize_ref modifies write and for that reason write is not
224 passed by reference. */
225 && normalize_ref (&write, ref)
226 && (write.offset - ref->offset).is_constant (&start)
227 && write.size.is_constant (&size))
228 bitmap_clear_range (live_bytes, start / BITS_PER_UNIT,
229 size / BITS_PER_UNIT);
230 }
231
232 /* Clear any bytes written by STMT from the bitmap LIVE_BYTES. The base
233 address written by STMT must match the one found in REF, which must
234 have its base address previously initialized.
235
236 This routine must be conservative. If we don't know the offset or
237 actual size written, assume nothing was written. */
238
239 static void
240 clear_bytes_written_by (sbitmap live_bytes, gimple *stmt, ao_ref *ref)
241 {
242 ao_ref write;
243
244 if (gcall *call = dyn_cast <gcall *> (stmt))
245 {
246 bool interposed;
247 modref_summary *summary = get_modref_function_summary (call, &interposed);
248
249 if (summary && !interposed)
250 for (auto kill : summary->kills)
251 if (kill.get_ao_ref (as_a <gcall *> (stmt), &write))
252 clear_live_bytes_for_ref (live_bytes, ref, write);
253 }
254 if (!initialize_ao_ref_for_dse (stmt, &write))
255 return;
256
257 clear_live_bytes_for_ref (live_bytes, ref, write);
258 }
259
260 /* REF is a memory write. Extract relevant information from it and
261 initialize the LIVE_BYTES bitmap. If successful, return TRUE.
262 Otherwise return FALSE. */
263
264 static bool
265 setup_live_bytes_from_ref (ao_ref *ref, sbitmap live_bytes)
266 {
267 HOST_WIDE_INT const_size;
268 if (valid_ao_ref_for_dse (ref)
269 && ref->size.is_constant (&const_size)
270 && (const_size / BITS_PER_UNIT
271 <= param_dse_max_object_size))
272 {
273 bitmap_clear (live_bytes);
274 bitmap_set_range (live_bytes, 0, const_size / BITS_PER_UNIT);
275 return true;
276 }
277 return false;
278 }
279
280 /* Compute the number of elements that we can trim from the head and
281 tail of ORIG resulting in a bitmap that is a superset of LIVE.
282
283 Store the number of elements trimmed from the head and tail in
284 TRIM_HEAD and TRIM_TAIL.
285
286 STMT is the statement being trimmed and is used for debugging dump
287 output only. */
288
289 static void
290 compute_trims (ao_ref *ref, sbitmap live, int *trim_head, int *trim_tail,
291 gimple *stmt)
292 {
293 /* We use sbitmaps biased such that ref->offset is bit zero and the bitmap
294 extends through ref->size. So we know that in the original bitmap
295 bits 0..ref->size were true. We don't actually need the bitmap, just
296 the REF to compute the trims. */
297
298 /* Now identify how much, if any of the tail we can chop off. */
299 HOST_WIDE_INT const_size;
300 int last_live = bitmap_last_set_bit (live);
301 if (ref->size.is_constant (&const_size))
302 {
303 int last_orig = (const_size / BITS_PER_UNIT) - 1;
304 /* We can leave inconvenient amounts on the tail as
305 residual handling in mem* and str* functions is usually
306 reasonably efficient. */
307 *trim_tail = last_orig - last_live;
308
309 /* But don't trim away out of bounds accesses, as this defeats
310 proper warnings.
311
312 We could have a type with no TYPE_SIZE_UNIT or we could have a VLA
313 where TYPE_SIZE_UNIT is not a constant. */
314 if (*trim_tail
315 && TYPE_SIZE_UNIT (TREE_TYPE (ref->base))
316 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (ref->base))) == INTEGER_CST
317 && compare_tree_int (TYPE_SIZE_UNIT (TREE_TYPE (ref->base)),
318 last_orig) <= 0)
319 *trim_tail = 0;
320 }
321 else
322 *trim_tail = 0;
323
324 /* Identify how much, if any of the head we can chop off. */
325 int first_orig = 0;
326 int first_live = bitmap_first_set_bit (live);
327 *trim_head = first_live - first_orig;
328
329 /* If more than a word remains, then make sure to keep the
330 starting point at least word aligned. */
331 if (last_live - first_live > UNITS_PER_WORD)
332 *trim_head &= ~(UNITS_PER_WORD - 1);
333
334 if ((*trim_head || *trim_tail)
335 && dump_file && (dump_flags & TDF_DETAILS))
336 {
337 fprintf (dump_file, " Trimming statement (head = %d, tail = %d): ",
338 *trim_head, *trim_tail);
339 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
340 fprintf (dump_file, "\n");
341 }
342 }
343
344 /* STMT initializes an object from COMPLEX_CST where one or more of the
345 bytes written may be dead stores. REF is a representation of the
346 memory written. LIVE is the bitmap of stores that are actually live.
347
348 Attempt to rewrite STMT so that only the real or imaginary part of
349 the object is actually stored. */
350
351 static void
352 maybe_trim_complex_store (ao_ref *ref, sbitmap live, gimple *stmt)
353 {
354 int trim_head, trim_tail;
355 compute_trims (ref, live, &trim_head, &trim_tail, stmt);
356
357 /* The amount of data trimmed from the head or tail must be at
358 least half the size of the object to ensure we're trimming
359 the entire real or imaginary half. By writing things this
360 way we avoid more O(n) bitmap operations. */
361 if (known_ge (trim_tail * 2 * BITS_PER_UNIT, ref->size))
362 {
363 /* TREE_REALPART is live */
364 tree x = TREE_REALPART (gimple_assign_rhs1 (stmt));
365 tree y = gimple_assign_lhs (stmt);
366 y = build1 (REALPART_EXPR, TREE_TYPE (x), y);
367 gimple_assign_set_lhs (stmt, y);
368 gimple_assign_set_rhs1 (stmt, x);
369 }
370 else if (known_ge (trim_head * 2 * BITS_PER_UNIT, ref->size))
371 {
372 /* TREE_IMAGPART is live */
373 tree x = TREE_IMAGPART (gimple_assign_rhs1 (stmt));
374 tree y = gimple_assign_lhs (stmt);
375 y = build1 (IMAGPART_EXPR, TREE_TYPE (x), y);
376 gimple_assign_set_lhs (stmt, y);
377 gimple_assign_set_rhs1 (stmt, x);
378 }
379
380 /* Other cases indicate parts of both the real and imag subobjects
381 are live. We do not try to optimize those cases. */
382 }
383
384 /* STMT initializes an object using a CONSTRUCTOR where one or more of the
385 bytes written are dead stores. ORIG is the bitmap of bytes stored by
386 STMT. LIVE is the bitmap of stores that are actually live.
387
388 Attempt to rewrite STMT so that only the real or imaginary part of
389 the object is actually stored.
390
391 The most common case for getting here is a CONSTRUCTOR with no elements
392 being used to zero initialize an object. We do not try to handle other
393 cases as those would force us to fully cover the object with the
394 CONSTRUCTOR node except for the components that are dead. */
395
396 static void
397 maybe_trim_constructor_store (ao_ref *ref, sbitmap live, gimple *stmt)
398 {
399 tree ctor = gimple_assign_rhs1 (stmt);
400
401 /* This is the only case we currently handle. It actually seems to
402 catch most cases of actual interest. */
403 gcc_assert (CONSTRUCTOR_NELTS (ctor) == 0);
404
405 int head_trim = 0;
406 int tail_trim = 0;
407 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
408
409 /* Now we want to replace the constructor initializer
410 with memset (object + head_trim, 0, size - head_trim - tail_trim). */
411 if (head_trim || tail_trim)
412 {
413 /* We want &lhs for the MEM_REF expression. */
414 tree lhs_addr = build_fold_addr_expr (gimple_assign_lhs (stmt));
415
416 if (! is_gimple_min_invariant (lhs_addr))
417 return;
418
419 /* The number of bytes for the new constructor. */
420 poly_int64 ref_bytes = exact_div (ref->size, BITS_PER_UNIT);
421 poly_int64 count = ref_bytes - head_trim - tail_trim;
422
423 /* And the new type for the CONSTRUCTOR. Essentially it's just
424 a char array large enough to cover the non-trimmed parts of
425 the original CONSTRUCTOR. Note we want explicit bounds here
426 so that we know how many bytes to clear when expanding the
427 CONSTRUCTOR. */
428 tree type = build_array_type_nelts (char_type_node, count);
429
430 /* Build a suitable alias type rather than using alias set zero
431 to avoid pessimizing. */
432 tree alias_type = reference_alias_ptr_type (gimple_assign_lhs (stmt));
433
434 /* Build a MEM_REF representing the whole accessed area, starting
435 at the first byte not trimmed. */
436 tree exp = fold_build2 (MEM_REF, type, lhs_addr,
437 build_int_cst (alias_type, head_trim));
438
439 /* Now update STMT with a new RHS and LHS. */
440 gimple_assign_set_lhs (stmt, exp);
441 gimple_assign_set_rhs1 (stmt, build_constructor (type, NULL));
442 }
443 }
444
445 /* STMT is a memcpy, memmove or memset. Decrement the number of bytes
446 copied/set by DECREMENT. */
447 static void
448 decrement_count (gimple *stmt, int decrement)
449 {
450 tree *countp = gimple_call_arg_ptr (stmt, 2);
451 gcc_assert (TREE_CODE (*countp) == INTEGER_CST);
452 *countp = wide_int_to_tree (TREE_TYPE (*countp), (TREE_INT_CST_LOW (*countp)
453 - decrement));
454 }
455
456 static void
457 increment_start_addr (gimple *stmt, tree *where, int increment)
458 {
459 if (tree lhs = gimple_call_lhs (stmt))
460 if (where == gimple_call_arg_ptr (stmt, 0))
461 {
462 gassign *newop = gimple_build_assign (lhs, unshare_expr (*where));
463 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
464 gsi_insert_after (&gsi, newop, GSI_SAME_STMT);
465 gimple_call_set_lhs (stmt, NULL_TREE);
466 update_stmt (stmt);
467 }
468
469 if (TREE_CODE (*where) == SSA_NAME)
470 {
471 tree tem = make_ssa_name (TREE_TYPE (*where));
472 gassign *newop
473 = gimple_build_assign (tem, POINTER_PLUS_EXPR, *where,
474 build_int_cst (sizetype, increment));
475 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
476 gsi_insert_before (&gsi, newop, GSI_SAME_STMT);
477 *where = tem;
478 update_stmt (stmt);
479 return;
480 }
481
482 *where = build_fold_addr_expr (fold_build2 (MEM_REF, char_type_node,
483 *where,
484 build_int_cst (ptr_type_node,
485 increment)));
486 }
487
488 /* STMT is builtin call that writes bytes in bitmap ORIG, some bytes are dead
489 (ORIG & ~NEW) and need not be stored. Try to rewrite STMT to reduce
490 the amount of data it actually writes.
491
492 Right now we only support trimming from the head or the tail of the
493 memory region. In theory we could split the mem* call, but it's
494 likely of marginal value. */
495
496 static void
497 maybe_trim_memstar_call (ao_ref *ref, sbitmap live, gimple *stmt)
498 {
499 int head_trim, tail_trim;
500 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
501 {
502 case BUILT_IN_STRNCPY:
503 case BUILT_IN_STRNCPY_CHK:
504 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
505 if (head_trim)
506 {
507 /* Head trimming of strncpy is only possible if we can
508 prove all bytes we would trim are non-zero (or we could
509 turn the strncpy into memset if there must be zero
510 among the head trimmed bytes). If we don't know anything
511 about those bytes, the presence or absence of '\0' bytes
512 in there will affect whether it acts for the non-trimmed
513 bytes as memset or memcpy/strncpy. */
514 c_strlen_data lendata = { };
515 int orig_head_trim = head_trim;
516 tree srcstr = gimple_call_arg (stmt, 1);
517 if (!get_range_strlen (srcstr, &lendata, /*eltsize=*/1)
518 || !tree_fits_uhwi_p (lendata.minlen))
519 head_trim = 0;
520 else if (tree_to_uhwi (lendata.minlen) < (unsigned) head_trim)
521 {
522 head_trim = tree_to_uhwi (lendata.minlen);
523 if ((orig_head_trim & (UNITS_PER_WORD - 1)) == 0)
524 head_trim &= ~(UNITS_PER_WORD - 1);
525 }
526 if (orig_head_trim != head_trim
527 && dump_file
528 && (dump_flags & TDF_DETAILS))
529 fprintf (dump_file,
530 " Adjusting strncpy trimming to (head = %d,"
531 " tail = %d)\n", head_trim, tail_trim);
532 }
533 goto do_memcpy;
534
535 case BUILT_IN_MEMCPY:
536 case BUILT_IN_MEMMOVE:
537 case BUILT_IN_MEMCPY_CHK:
538 case BUILT_IN_MEMMOVE_CHK:
539 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
540
541 do_memcpy:
542 /* Tail trimming is easy, we can just reduce the count. */
543 if (tail_trim)
544 decrement_count (stmt, tail_trim);
545
546 /* Head trimming requires adjusting all the arguments. */
547 if (head_trim)
548 {
549 /* For __*_chk need to adjust also the last argument. */
550 if (gimple_call_num_args (stmt) == 4)
551 {
552 tree size = gimple_call_arg (stmt, 3);
553 if (!tree_fits_uhwi_p (size))
554 break;
555 if (!integer_all_onesp (size))
556 {
557 unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
558 if (sz < (unsigned) head_trim)
559 break;
560 tree arg = wide_int_to_tree (TREE_TYPE (size),
561 sz - head_trim);
562 gimple_call_set_arg (stmt, 3, arg);
563 }
564 }
565 tree *dst = gimple_call_arg_ptr (stmt, 0);
566 increment_start_addr (stmt, dst, head_trim);
567 tree *src = gimple_call_arg_ptr (stmt, 1);
568 increment_start_addr (stmt, src, head_trim);
569 decrement_count (stmt, head_trim);
570 }
571 break;
572
573 case BUILT_IN_MEMSET:
574 case BUILT_IN_MEMSET_CHK:
575 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
576
577 /* Tail trimming is easy, we can just reduce the count. */
578 if (tail_trim)
579 decrement_count (stmt, tail_trim);
580
581 /* Head trimming requires adjusting all the arguments. */
582 if (head_trim)
583 {
584 /* For __*_chk need to adjust also the last argument. */
585 if (gimple_call_num_args (stmt) == 4)
586 {
587 tree size = gimple_call_arg (stmt, 3);
588 if (!tree_fits_uhwi_p (size))
589 break;
590 if (!integer_all_onesp (size))
591 {
592 unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
593 if (sz < (unsigned) head_trim)
594 break;
595 tree arg = wide_int_to_tree (TREE_TYPE (size),
596 sz - head_trim);
597 gimple_call_set_arg (stmt, 3, arg);
598 }
599 }
600 tree *dst = gimple_call_arg_ptr (stmt, 0);
601 increment_start_addr (stmt, dst, head_trim);
602 decrement_count (stmt, head_trim);
603 }
604 break;
605
606 default:
607 break;
608 }
609 }
610
611 /* STMT is a memory write where one or more bytes written are dead
612 stores. ORIG is the bitmap of bytes stored by STMT. LIVE is the
613 bitmap of stores that are actually live.
614
615 Attempt to rewrite STMT so that it writes fewer memory locations. Right
616 now we only support trimming at the start or end of the memory region.
617 It's not clear how much there is to be gained by trimming from the middle
618 of the region. */
619
620 static void
621 maybe_trim_partially_dead_store (ao_ref *ref, sbitmap live, gimple *stmt)
622 {
623 if (is_gimple_assign (stmt)
624 && TREE_CODE (gimple_assign_lhs (stmt)) != TARGET_MEM_REF)
625 {
626 switch (gimple_assign_rhs_code (stmt))
627 {
628 case CONSTRUCTOR:
629 maybe_trim_constructor_store (ref, live, stmt);
630 break;
631 case COMPLEX_CST:
632 maybe_trim_complex_store (ref, live, stmt);
633 break;
634 default:
635 break;
636 }
637 }
638 }
639
640 /* Return TRUE if USE_REF reads bytes from LIVE where live is
641 derived from REF, a write reference.
642
643 While this routine may modify USE_REF, it's passed by value, not
644 location. So callers do not see those modifications. */
645
646 static bool
647 live_bytes_read (ao_ref use_ref, ao_ref *ref, sbitmap live)
648 {
649 /* We have already verified that USE_REF and REF hit the same object.
650 Now verify that there's actually an overlap between USE_REF and REF. */
651 HOST_WIDE_INT start, size;
652 if (normalize_ref (&use_ref, ref)
653 && (use_ref.offset - ref->offset).is_constant (&start)
654 && use_ref.size.is_constant (&size))
655 {
656 /* If USE_REF covers all of REF, then it will hit one or more
657 live bytes. This avoids useless iteration over the bitmap
658 below. */
659 if (start == 0 && known_eq (size, ref->size))
660 return true;
661
662 /* Now check if any of the remaining bits in use_ref are set in LIVE. */
663 return bitmap_bit_in_range_p (live, start / BITS_PER_UNIT,
664 (start + size - 1) / BITS_PER_UNIT);
665 }
666 return true;
667 }
668
669 /* Callback for dse_classify_store calling for_each_index. Verify that
670 indices are invariant in the loop with backedge PHI in basic-block DATA. */
671
672 static bool
673 check_name (tree, tree *idx, void *data)
674 {
675 basic_block phi_bb = (basic_block) data;
676 if (TREE_CODE (*idx) == SSA_NAME
677 && !SSA_NAME_IS_DEFAULT_DEF (*idx)
678 && dominated_by_p (CDI_DOMINATORS, gimple_bb (SSA_NAME_DEF_STMT (*idx)),
679 phi_bb))
680 return false;
681 return true;
682 }
683
684 /* STMT stores the value 0 into one or more memory locations
685 (via memset, empty constructor, calloc call, etc).
686
687 See if there is a subsequent store of the value 0 to one
688 or more of the same memory location(s). If so, the subsequent
689 store is redundant and can be removed.
690
691 The subsequent stores could be via memset, empty constructors,
692 simple MEM stores, etc. */
693
694 static void
695 dse_optimize_redundant_stores (gimple *stmt)
696 {
697 int cnt = 0;
698
699 /* TBAA state of STMT, if it is a call it is effectively alias-set zero. */
700 alias_set_type earlier_set = 0;
701 alias_set_type earlier_base_set = 0;
702 if (is_gimple_assign (stmt))
703 {
704 ao_ref lhs_ref;
705 ao_ref_init (&lhs_ref, gimple_assign_lhs (stmt));
706 earlier_set = ao_ref_alias_set (&lhs_ref);
707 earlier_base_set = ao_ref_base_alias_set (&lhs_ref);
708 }
709
710 /* We could do something fairly complex and look through PHIs
711 like DSE_CLASSIFY_STORE, but it doesn't seem to be worth
712 the effort.
713
714 Look at all the immediate uses of the VDEF (which are obviously
715 dominated by STMT). See if one or more stores 0 into the same
716 memory locations a STMT, if so remove the immediate use statements. */
717 tree defvar = gimple_vdef (stmt);
718 imm_use_iterator ui;
719 gimple *use_stmt;
720 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
721 {
722 /* Limit stmt walking. */
723 if (++cnt > param_dse_max_alias_queries_per_store)
724 break;
725
726 /* If USE_STMT stores 0 into one or more of the same locations
727 as STMT and STMT would kill USE_STMT, then we can just remove
728 USE_STMT. */
729 tree fndecl;
730 if ((is_gimple_assign (use_stmt)
731 && gimple_vdef (use_stmt)
732 && (gimple_assign_single_p (use_stmt)
733 && initializer_zerop (gimple_assign_rhs1 (use_stmt))))
734 || (gimple_call_builtin_p (use_stmt, BUILT_IN_NORMAL)
735 && (fndecl = gimple_call_fndecl (use_stmt)) != NULL
736 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
737 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
738 && integer_zerop (gimple_call_arg (use_stmt, 1))))
739 {
740 ao_ref write;
741
742 if (!initialize_ao_ref_for_dse (use_stmt, &write))
743 break;
744
745 if (valid_ao_ref_for_dse (&write)
746 && stmt_kills_ref_p (stmt, &write))
747 {
748 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
749 if (is_gimple_assign (use_stmt))
750 {
751 ao_ref lhs_ref;
752 ao_ref_init (&lhs_ref, gimple_assign_lhs (use_stmt));
753 if ((earlier_set == ao_ref_alias_set (&lhs_ref)
754 || alias_set_subset_of (ao_ref_alias_set (&lhs_ref),
755 earlier_set))
756 && (earlier_base_set == ao_ref_base_alias_set (&lhs_ref)
757 || alias_set_subset_of
758 (ao_ref_base_alias_set (&lhs_ref),
759 earlier_base_set)))
760 delete_dead_or_redundant_assignment (&gsi, "redundant",
761 need_eh_cleanup);
762 }
763 else if (is_gimple_call (use_stmt))
764 {
765 if ((earlier_set == 0
766 || alias_set_subset_of (0, earlier_set))
767 && (earlier_base_set == 0
768 || alias_set_subset_of (0, earlier_base_set)))
769 delete_dead_or_redundant_call (&gsi, "redundant");
770 }
771 else
772 gcc_unreachable ();
773 }
774 }
775 }
776 }
777
778 /* A helper of dse_optimize_stmt.
779 Given a GIMPLE_ASSIGN in STMT that writes to REF, classify it
780 according to downstream uses and defs. Sets *BY_CLOBBER_P to true
781 if only clobber statements influenced the classification result.
782 Returns the classification. */
783
784 dse_store_status
785 dse_classify_store (ao_ref *ref, gimple *stmt,
786 bool byte_tracking_enabled, sbitmap live_bytes,
787 bool *by_clobber_p, tree stop_at_vuse)
788 {
789 gimple *temp;
790 int cnt = 0;
791 auto_bitmap visited;
792
793 if (by_clobber_p)
794 *by_clobber_p = true;
795
796 /* Find the first dominated statement that clobbers (part of) the
797 memory stmt stores to with no intermediate statement that may use
798 part of the memory stmt stores. That is, find a store that may
799 prove stmt to be a dead store. */
800 temp = stmt;
801 do
802 {
803 gimple *use_stmt;
804 imm_use_iterator ui;
805 bool fail = false;
806 tree defvar;
807
808 if (gimple_code (temp) == GIMPLE_PHI)
809 {
810 /* If we visit this PHI by following a backedge then we have to
811 make sure ref->ref only refers to SSA names that are invariant
812 with respect to the loop represented by this PHI node. */
813 if (dominated_by_p (CDI_DOMINATORS, gimple_bb (stmt),
814 gimple_bb (temp))
815 && !for_each_index (ref->ref ? &ref->ref : &ref->base,
816 check_name, gimple_bb (temp)))
817 return DSE_STORE_LIVE;
818 defvar = PHI_RESULT (temp);
819 bitmap_set_bit (visited, SSA_NAME_VERSION (defvar));
820 }
821 else
822 defvar = gimple_vdef (temp);
823
824 /* If we're instructed to stop walking at region boundary, do so. */
825 if (defvar == stop_at_vuse)
826 return DSE_STORE_LIVE;
827
828 auto_vec<gimple *, 10> defs;
829 gimple *first_phi_def = NULL;
830 gimple *last_phi_def = NULL;
831 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
832 {
833 /* Limit stmt walking. */
834 if (++cnt > param_dse_max_alias_queries_per_store)
835 {
836 fail = true;
837 break;
838 }
839
840 /* In simple cases we can look through PHI nodes, but we
841 have to be careful with loops and with memory references
842 containing operands that are also operands of PHI nodes.
843 See gcc.c-torture/execute/20051110-*.c. */
844 if (gimple_code (use_stmt) == GIMPLE_PHI)
845 {
846 /* If we already visited this PHI ignore it for further
847 processing. */
848 if (!bitmap_bit_p (visited,
849 SSA_NAME_VERSION (PHI_RESULT (use_stmt))))
850 {
851 defs.safe_push (use_stmt);
852 if (!first_phi_def)
853 first_phi_def = use_stmt;
854 last_phi_def = use_stmt;
855 }
856 }
857 /* If the statement is a use the store is not dead. */
858 else if (ref_maybe_used_by_stmt_p (use_stmt, ref))
859 {
860 /* Handle common cases where we can easily build an ao_ref
861 structure for USE_STMT and in doing so we find that the
862 references hit non-live bytes and thus can be ignored. */
863 if (byte_tracking_enabled
864 && is_gimple_assign (use_stmt))
865 {
866 ao_ref use_ref;
867 ao_ref_init (&use_ref, gimple_assign_rhs1 (use_stmt));
868 if (valid_ao_ref_for_dse (&use_ref)
869 && use_ref.base == ref->base
870 && known_eq (use_ref.size, use_ref.max_size)
871 && !live_bytes_read (use_ref, ref, live_bytes))
872 {
873 /* If this is a store, remember it as we possibly
874 need to walk the defs uses. */
875 if (gimple_vdef (use_stmt))
876 defs.safe_push (use_stmt);
877 continue;
878 }
879 }
880
881 fail = true;
882 break;
883 }
884 /* We have visited ourselves already so ignore STMT for the
885 purpose of chaining. */
886 else if (use_stmt == stmt)
887 ;
888 /* If this is a store, remember it as we possibly need to walk the
889 defs uses. */
890 else if (gimple_vdef (use_stmt))
891 defs.safe_push (use_stmt);
892 }
893
894 if (fail)
895 {
896 /* STMT might be partially dead and we may be able to reduce
897 how many memory locations it stores into. */
898 if (byte_tracking_enabled && !gimple_clobber_p (stmt))
899 return DSE_STORE_MAYBE_PARTIAL_DEAD;
900 return DSE_STORE_LIVE;
901 }
902
903 /* If we didn't find any definition this means the store is dead
904 if it isn't a store to global reachable memory. In this case
905 just pretend the stmt makes itself dead. Otherwise fail. */
906 if (defs.is_empty ())
907 {
908 if (ref_may_alias_global_p (ref))
909 return DSE_STORE_LIVE;
910
911 if (by_clobber_p)
912 *by_clobber_p = false;
913 return DSE_STORE_DEAD;
914 }
915
916 /* Process defs and remove those we need not process further. */
917 for (unsigned i = 0; i < defs.length ();)
918 {
919 gimple *def = defs[i];
920 gimple *use_stmt;
921 use_operand_p use_p;
922 tree vdef = (gimple_code (def) == GIMPLE_PHI
923 ? gimple_phi_result (def) : gimple_vdef (def));
924 /* If the path to check starts with a kill we do not need to
925 process it further.
926 ??? With byte tracking we need only kill the bytes currently
927 live. */
928 if (stmt_kills_ref_p (def, ref))
929 {
930 if (by_clobber_p && !gimple_clobber_p (def))
931 *by_clobber_p = false;
932 defs.unordered_remove (i);
933 }
934 /* If the path ends here we do not need to process it further.
935 This for example happens with calls to noreturn functions. */
936 else if (has_zero_uses (vdef))
937 {
938 /* But if the store is to global memory it is definitely
939 not dead. */
940 if (ref_may_alias_global_p (ref))
941 return DSE_STORE_LIVE;
942 defs.unordered_remove (i);
943 }
944 /* In addition to kills we can remove defs whose only use
945 is another def in defs. That can only ever be PHIs of which
946 we track two for simplicity reasons, the first and last in
947 {first,last}_phi_def (we fail for multiple PHIs anyways).
948 We can also ignore defs that feed only into
949 already visited PHIs. */
950 else if (single_imm_use (vdef, &use_p, &use_stmt)
951 && (use_stmt == first_phi_def
952 || use_stmt == last_phi_def
953 || (gimple_code (use_stmt) == GIMPLE_PHI
954 && bitmap_bit_p (visited,
955 SSA_NAME_VERSION
956 (PHI_RESULT (use_stmt))))))
957 defs.unordered_remove (i);
958 else
959 ++i;
960 }
961
962 /* If all defs kill the ref we are done. */
963 if (defs.is_empty ())
964 return DSE_STORE_DEAD;
965 /* If more than one def survives fail. */
966 if (defs.length () > 1)
967 {
968 /* STMT might be partially dead and we may be able to reduce
969 how many memory locations it stores into. */
970 if (byte_tracking_enabled && !gimple_clobber_p (stmt))
971 return DSE_STORE_MAYBE_PARTIAL_DEAD;
972 return DSE_STORE_LIVE;
973 }
974 temp = defs[0];
975
976 /* Track partial kills. */
977 if (byte_tracking_enabled)
978 {
979 clear_bytes_written_by (live_bytes, temp, ref);
980 if (bitmap_empty_p (live_bytes))
981 {
982 if (by_clobber_p && !gimple_clobber_p (temp))
983 *by_clobber_p = false;
984 return DSE_STORE_DEAD;
985 }
986 }
987 }
988 /* Continue walking until there are no more live bytes. */
989 while (1);
990 }
991
992
993 /* Delete a dead call at GSI, which is mem* call of some kind. */
994 static void
995 delete_dead_or_redundant_call (gimple_stmt_iterator *gsi, const char *type)
996 {
997 gimple *stmt = gsi_stmt (*gsi);
998 if (dump_file && (dump_flags & TDF_DETAILS))
999 {
1000 fprintf (dump_file, " Deleted %s call: ", type);
1001 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1002 fprintf (dump_file, "\n");
1003 }
1004
1005 basic_block bb = gimple_bb (stmt);
1006 tree lhs = gimple_call_lhs (stmt);
1007 if (lhs)
1008 {
1009 tree ptr = gimple_call_arg (stmt, 0);
1010 gimple *new_stmt = gimple_build_assign (lhs, ptr);
1011 unlink_stmt_vdef (stmt);
1012 if (gsi_replace (gsi, new_stmt, true))
1013 bitmap_set_bit (need_eh_cleanup, bb->index);
1014 }
1015 else
1016 {
1017 /* Then we need to fix the operand of the consuming stmt. */
1018 unlink_stmt_vdef (stmt);
1019
1020 /* Remove the dead store. */
1021 if (gsi_remove (gsi, true))
1022 bitmap_set_bit (need_eh_cleanup, bb->index);
1023 release_defs (stmt);
1024 }
1025 }
1026
1027 /* Delete a dead store at GSI, which is a gimple assignment. */
1028
1029 void
1030 delete_dead_or_redundant_assignment (gimple_stmt_iterator *gsi, const char *type,
1031 bitmap need_eh_cleanup)
1032 {
1033 gimple *stmt = gsi_stmt (*gsi);
1034 if (dump_file && (dump_flags & TDF_DETAILS))
1035 {
1036 fprintf (dump_file, " Deleted %s store: ", type);
1037 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1038 fprintf (dump_file, "\n");
1039 }
1040
1041 /* Then we need to fix the operand of the consuming stmt. */
1042 unlink_stmt_vdef (stmt);
1043
1044 /* Remove the dead store. */
1045 basic_block bb = gimple_bb (stmt);
1046 if (gsi_remove (gsi, true) && need_eh_cleanup)
1047 bitmap_set_bit (need_eh_cleanup, bb->index);
1048
1049 /* And release any SSA_NAMEs set in this statement back to the
1050 SSA_NAME manager. */
1051 release_defs (stmt);
1052 }
1053
1054 /* Try to prove, using modref summary, that all memory written to by a call is
1055 dead and remove it. Assume that if return value is written to memory
1056 it is already proved to be dead. */
1057
1058 static bool
1059 dse_optimize_call (gimple_stmt_iterator *gsi, sbitmap live_bytes)
1060 {
1061 gcall *stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
1062
1063 if (!stmt)
1064 return false;
1065
1066 tree callee = gimple_call_fndecl (stmt);
1067
1068 if (!callee)
1069 return false;
1070
1071 /* Pure/const functions are optimized by normal DCE
1072 or handled as store above. */
1073 int flags = gimple_call_flags (stmt);
1074 if ((flags & (ECF_PURE|ECF_CONST|ECF_NOVOPS))
1075 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
1076 return false;
1077
1078 cgraph_node *node = cgraph_node::get (callee);
1079 if (!node)
1080 return false;
1081
1082 if (stmt_could_throw_p (cfun, stmt)
1083 && !cfun->can_delete_dead_exceptions)
1084 return false;
1085
1086 /* If return value is used the call is not dead. */
1087 tree lhs = gimple_call_lhs (stmt);
1088 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1089 {
1090 imm_use_iterator ui;
1091 gimple *use_stmt;
1092 FOR_EACH_IMM_USE_STMT (use_stmt, ui, lhs)
1093 if (!is_gimple_debug (use_stmt))
1094 return false;
1095 }
1096
1097 /* Verify that there are no side-effects except for return value
1098 and memory writes tracked by modref. */
1099 modref_summary *summary = get_modref_function_summary (node);
1100 if (!summary || !summary->try_dse)
1101 return false;
1102
1103 bool by_clobber_p = false;
1104
1105 /* Walk all memory writes and verify that they are dead. */
1106 for (auto base_node : summary->stores->bases)
1107 for (auto ref_node : base_node->refs)
1108 for (auto access_node : ref_node->accesses)
1109 {
1110 tree arg = access_node.get_call_arg (stmt);
1111
1112 if (!arg)
1113 return false;
1114
1115 if (integer_zerop (arg) && flag_delete_null_pointer_checks)
1116 continue;
1117
1118 ao_ref ref;
1119
1120 if (!access_node.get_ao_ref (stmt, &ref))
1121 return false;
1122 ref.ref_alias_set = ref_node->ref;
1123 ref.base_alias_set = base_node->base;
1124
1125 bool byte_tracking_enabled
1126 = setup_live_bytes_from_ref (&ref, live_bytes);
1127 enum dse_store_status store_status;
1128
1129 store_status = dse_classify_store (&ref, stmt,
1130 byte_tracking_enabled,
1131 live_bytes, &by_clobber_p);
1132 if (store_status != DSE_STORE_DEAD)
1133 return false;
1134 }
1135 delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup);
1136 return true;
1137 }
1138
1139 /* Attempt to eliminate dead stores in the statement referenced by BSI.
1140
1141 A dead store is a store into a memory location which will later be
1142 overwritten by another store without any intervening loads. In this
1143 case the earlier store can be deleted.
1144
1145 In our SSA + virtual operand world we use immediate uses of virtual
1146 operands to detect dead stores. If a store's virtual definition
1147 is used precisely once by a later store to the same location which
1148 post dominates the first store, then the first store is dead. */
1149
1150 static void
1151 dse_optimize_stmt (function *fun, gimple_stmt_iterator *gsi, sbitmap live_bytes)
1152 {
1153 gimple *stmt = gsi_stmt (*gsi);
1154
1155 /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
1156 if (gimple_has_volatile_ops (stmt)
1157 && (!gimple_clobber_p (stmt)
1158 || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
1159 return;
1160
1161 ao_ref ref;
1162 /* If this is not a store we can still remove dead call using
1163 modref summary. */
1164 if (!initialize_ao_ref_for_dse (stmt, &ref))
1165 {
1166 dse_optimize_call (gsi, live_bytes);
1167 return;
1168 }
1169
1170 /* We know we have virtual definitions. We can handle assignments and
1171 some builtin calls. */
1172 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1173 {
1174 tree fndecl = gimple_call_fndecl (stmt);
1175 switch (DECL_FUNCTION_CODE (fndecl))
1176 {
1177 case BUILT_IN_MEMCPY:
1178 case BUILT_IN_MEMMOVE:
1179 case BUILT_IN_STRNCPY:
1180 case BUILT_IN_MEMSET:
1181 case BUILT_IN_MEMCPY_CHK:
1182 case BUILT_IN_MEMMOVE_CHK:
1183 case BUILT_IN_STRNCPY_CHK:
1184 case BUILT_IN_MEMSET_CHK:
1185 {
1186 /* Occasionally calls with an explicit length of zero
1187 show up in the IL. It's pointless to do analysis
1188 on them, they're trivially dead. */
1189 tree size = gimple_call_arg (stmt, 2);
1190 if (integer_zerop (size))
1191 {
1192 delete_dead_or_redundant_call (gsi, "dead");
1193 return;
1194 }
1195
1196 /* If this is a memset call that initializes an object
1197 to zero, it may be redundant with an earlier memset
1198 or empty CONSTRUCTOR of a larger object. */
1199 if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1200 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
1201 && integer_zerop (gimple_call_arg (stmt, 1)))
1202 dse_optimize_redundant_stores (stmt);
1203
1204 enum dse_store_status store_status;
1205 bool byte_tracking_enabled
1206 = setup_live_bytes_from_ref (&ref, live_bytes);
1207 store_status = dse_classify_store (&ref, stmt,
1208 byte_tracking_enabled,
1209 live_bytes);
1210 if (store_status == DSE_STORE_LIVE)
1211 return;
1212
1213 if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1214 {
1215 maybe_trim_memstar_call (&ref, live_bytes, stmt);
1216 return;
1217 }
1218
1219 if (store_status == DSE_STORE_DEAD)
1220 delete_dead_or_redundant_call (gsi, "dead");
1221 return;
1222 }
1223
1224 case BUILT_IN_CALLOC:
1225 /* We already know the arguments are integer constants. */
1226 dse_optimize_redundant_stores (stmt);
1227 return;
1228
1229 default:
1230 return;
1231 }
1232 }
1233
1234 bool by_clobber_p = false;
1235
1236 /* Check if this statement stores zero to a memory location,
1237 and if there is a subsequent store of zero to the same
1238 memory location. If so, remove the subsequent store. */
1239 if (gimple_assign_single_p (stmt)
1240 && initializer_zerop (gimple_assign_rhs1 (stmt)))
1241 dse_optimize_redundant_stores (stmt);
1242
1243 /* Self-assignments are zombies. */
1244 if (is_gimple_assign (stmt)
1245 && operand_equal_p (gimple_assign_rhs1 (stmt),
1246 gimple_assign_lhs (stmt), 0))
1247 ;
1248 else
1249 {
1250 bool byte_tracking_enabled
1251 = setup_live_bytes_from_ref (&ref, live_bytes);
1252 enum dse_store_status store_status;
1253 store_status = dse_classify_store (&ref, stmt,
1254 byte_tracking_enabled,
1255 live_bytes, &by_clobber_p);
1256 if (store_status == DSE_STORE_LIVE)
1257 return;
1258
1259 if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1260 {
1261 maybe_trim_partially_dead_store (&ref, live_bytes, stmt);
1262 return;
1263 }
1264 }
1265
1266 /* Now we know that use_stmt kills the LHS of stmt. */
1267
1268 /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
1269 another clobber stmt. */
1270 if (gimple_clobber_p (stmt)
1271 && !by_clobber_p)
1272 return;
1273
1274 if (is_gimple_call (stmt)
1275 && (gimple_has_side_effects (stmt)
1276 || (stmt_could_throw_p (fun, stmt)
1277 && !fun->can_delete_dead_exceptions)))
1278 {
1279 /* See if we can remove complete call. */
1280 if (dse_optimize_call (gsi, live_bytes))
1281 return;
1282 /* Make sure we do not remove a return slot we cannot reconstruct
1283 later. */
1284 if (gimple_call_return_slot_opt_p (as_a <gcall *>(stmt))
1285 && (TREE_ADDRESSABLE (TREE_TYPE (gimple_call_fntype (stmt)))
1286 || !poly_int_tree_p
1287 (TYPE_SIZE (TREE_TYPE (gimple_call_fntype (stmt))))))
1288 return;
1289 if (dump_file && (dump_flags & TDF_DETAILS))
1290 {
1291 fprintf (dump_file, " Deleted dead store in call LHS: ");
1292 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1293 fprintf (dump_file, "\n");
1294 }
1295 gimple_call_set_lhs (stmt, NULL_TREE);
1296 update_stmt (stmt);
1297 }
1298 else
1299 delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup);
1300 }
1301
1302 namespace {
1303
1304 const pass_data pass_data_dse =
1305 {
1306 GIMPLE_PASS, /* type */
1307 "dse", /* name */
1308 OPTGROUP_NONE, /* optinfo_flags */
1309 TV_TREE_DSE, /* tv_id */
1310 ( PROP_cfg | PROP_ssa ), /* properties_required */
1311 0, /* properties_provided */
1312 0, /* properties_destroyed */
1313 0, /* todo_flags_start */
1314 0, /* todo_flags_finish */
1315 };
1316
1317 class pass_dse : public gimple_opt_pass
1318 {
1319 public:
1320 pass_dse (gcc::context *ctxt)
1321 : gimple_opt_pass (pass_data_dse, ctxt)
1322 {}
1323
1324 /* opt_pass methods: */
1325 opt_pass * clone () { return new pass_dse (m_ctxt); }
1326 virtual bool gate (function *) { return flag_tree_dse != 0; }
1327 virtual unsigned int execute (function *);
1328
1329 }; // class pass_dse
1330
1331 unsigned int
1332 pass_dse::execute (function *fun)
1333 {
1334 unsigned todo = 0;
1335 need_eh_cleanup = BITMAP_ALLOC (NULL);
1336 auto_sbitmap live_bytes (param_dse_max_object_size);
1337
1338 renumber_gimple_stmt_uids (fun);
1339
1340 calculate_dominance_info (CDI_DOMINATORS);
1341
1342 /* Dead store elimination is fundamentally a reverse program order walk. */
1343 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS);
1344 int n = pre_and_rev_post_order_compute_fn (fun, NULL, rpo, false);
1345 for (int i = n; i != 0; --i)
1346 {
1347 basic_block bb = BASIC_BLOCK_FOR_FN (fun, rpo[i-1]);
1348 gimple_stmt_iterator gsi;
1349
1350 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
1351 {
1352 gimple *stmt = gsi_stmt (gsi);
1353
1354 if (gimple_vdef (stmt))
1355 dse_optimize_stmt (fun, &gsi, live_bytes);
1356 else if (def_operand_p
1357 def_p = single_ssa_def_operand (stmt, SSA_OP_DEF))
1358 {
1359 /* When we remove dead stores make sure to also delete trivially
1360 dead SSA defs. */
1361 if (has_zero_uses (DEF_FROM_PTR (def_p))
1362 && !gimple_has_side_effects (stmt)
1363 && !is_ctrl_altering_stmt (stmt)
1364 && (!stmt_could_throw_p (fun, stmt)
1365 || fun->can_delete_dead_exceptions))
1366 {
1367 if (dump_file && (dump_flags & TDF_DETAILS))
1368 {
1369 fprintf (dump_file, " Deleted trivially dead stmt: ");
1370 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1371 fprintf (dump_file, "\n");
1372 }
1373 if (gsi_remove (&gsi, true) && need_eh_cleanup)
1374 bitmap_set_bit (need_eh_cleanup, bb->index);
1375 release_defs (stmt);
1376 }
1377 }
1378 if (gsi_end_p (gsi))
1379 gsi = gsi_last_bb (bb);
1380 else
1381 gsi_prev (&gsi);
1382 }
1383 bool removed_phi = false;
1384 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);)
1385 {
1386 gphi *phi = si.phi ();
1387 if (has_zero_uses (gimple_phi_result (phi)))
1388 {
1389 if (dump_file && (dump_flags & TDF_DETAILS))
1390 {
1391 fprintf (dump_file, " Deleted trivially dead PHI: ");
1392 print_gimple_stmt (dump_file, phi, 0, dump_flags);
1393 fprintf (dump_file, "\n");
1394 }
1395 remove_phi_node (&si, true);
1396 removed_phi = true;
1397 }
1398 else
1399 gsi_next (&si);
1400 }
1401 if (removed_phi && gimple_seq_empty_p (phi_nodes (bb)))
1402 todo |= TODO_cleanup_cfg;
1403 }
1404 free (rpo);
1405
1406 /* Removal of stores may make some EH edges dead. Purge such edges from
1407 the CFG as needed. */
1408 if (!bitmap_empty_p (need_eh_cleanup))
1409 {
1410 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
1411 todo |= TODO_cleanup_cfg;
1412 }
1413
1414 BITMAP_FREE (need_eh_cleanup);
1415
1416 return todo;
1417 }
1418
1419 } // anon namespace
1420
1421 gimple_opt_pass *
1422 make_pass_dse (gcc::context *ctxt)
1423 {
1424 return new pass_dse (ctxt);
1425 }