]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-dse.c
tree-ssa.h: Don't include gimple-low.h...
[thirdparty/gcc.git] / gcc / tree-ssa-dse.c
1 /* Dead store elimination
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "ggc.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-ssa.h"
30 #include "tree-pass.h"
31 #include "domwalk.h"
32 #include "flags.h"
33 #include "langhooks.h"
34 #include "tree-cfgcleanup.h"
35
36 /* This file implements dead store elimination.
37
38 A dead store is a store into a memory location which will later be
39 overwritten by another store without any intervening loads. In this
40 case the earlier store can be deleted.
41
42 In our SSA + virtual operand world we use immediate uses of virtual
43 operands to detect dead stores. If a store's virtual definition
44 is used precisely once by a later store to the same location which
45 post dominates the first store, then the first store is dead.
46
47 The single use of the store's virtual definition ensures that
48 there are no intervening aliased loads and the requirement that
49 the second load post dominate the first ensures that if the earlier
50 store executes, then the later stores will execute before the function
51 exits.
52
53 It may help to think of this as first moving the earlier store to
54 the point immediately before the later store. Again, the single
55 use of the virtual definition and the post-dominance relationship
56 ensure that such movement would be safe. Clearly if there are
57 back to back stores, then the second is redundant.
58
59 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
60 may also help in understanding this code since it discusses the
61 relationship between dead store and redundant load elimination. In
62 fact, they are the same transformation applied to different views of
63 the CFG. */
64
65
66 /* Bitmap of blocks that have had EH statements cleaned. We should
67 remove their dead edges eventually. */
68 static bitmap need_eh_cleanup;
69
70 static bool gate_dse (void);
71 static unsigned int tree_ssa_dse (void);
72
73
74 /* A helper of dse_optimize_stmt.
75 Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that
76 may prove STMT to be dead.
77 Return TRUE if the above conditions are met, otherwise FALSE. */
78
79 static bool
80 dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
81 {
82 gimple temp;
83 unsigned cnt = 0;
84
85 *use_stmt = NULL;
86
87 /* Self-assignments are zombies. */
88 if (operand_equal_p (gimple_assign_rhs1 (stmt), gimple_assign_lhs (stmt), 0))
89 {
90 *use_stmt = stmt;
91 return true;
92 }
93
94 /* Find the first dominated statement that clobbers (part of) the
95 memory stmt stores to with no intermediate statement that may use
96 part of the memory stmt stores. That is, find a store that may
97 prove stmt to be a dead store. */
98 temp = stmt;
99 do
100 {
101 gimple use_stmt, defvar_def;
102 imm_use_iterator ui;
103 bool fail = false;
104 tree defvar;
105
106 /* Limit stmt walking to be linear in the number of possibly
107 dead stores. */
108 if (++cnt > 256)
109 return false;
110
111 if (gimple_code (temp) == GIMPLE_PHI)
112 defvar = PHI_RESULT (temp);
113 else
114 defvar = gimple_vdef (temp);
115 defvar_def = temp;
116 temp = NULL;
117 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
118 {
119 cnt++;
120
121 /* If we ever reach our DSE candidate stmt again fail. We
122 cannot handle dead stores in loops. */
123 if (use_stmt == stmt)
124 {
125 fail = true;
126 BREAK_FROM_IMM_USE_STMT (ui);
127 }
128 /* In simple cases we can look through PHI nodes, but we
129 have to be careful with loops and with memory references
130 containing operands that are also operands of PHI nodes.
131 See gcc.c-torture/execute/20051110-*.c. */
132 else if (gimple_code (use_stmt) == GIMPLE_PHI)
133 {
134 if (temp
135 /* Make sure we are not in a loop latch block. */
136 || gimple_bb (stmt) == gimple_bb (use_stmt)
137 || dominated_by_p (CDI_DOMINATORS,
138 gimple_bb (stmt), gimple_bb (use_stmt))
139 /* We can look through PHIs to regions post-dominating
140 the DSE candidate stmt. */
141 || !dominated_by_p (CDI_POST_DOMINATORS,
142 gimple_bb (stmt), gimple_bb (use_stmt)))
143 {
144 fail = true;
145 BREAK_FROM_IMM_USE_STMT (ui);
146 }
147 /* Do not consider the PHI as use if it dominates the
148 stmt defining the virtual operand we are processing,
149 we have processed it already in this case. */
150 if (gimple_bb (defvar_def) != gimple_bb (use_stmt)
151 && !dominated_by_p (CDI_DOMINATORS,
152 gimple_bb (defvar_def),
153 gimple_bb (use_stmt)))
154 temp = use_stmt;
155 }
156 /* If the statement is a use the store is not dead. */
157 else if (ref_maybe_used_by_stmt_p (use_stmt,
158 gimple_assign_lhs (stmt)))
159 {
160 fail = true;
161 BREAK_FROM_IMM_USE_STMT (ui);
162 }
163 /* If this is a store, remember it or bail out if we have
164 multiple ones (the will be in different CFG parts then). */
165 else if (gimple_vdef (use_stmt))
166 {
167 if (temp)
168 {
169 fail = true;
170 BREAK_FROM_IMM_USE_STMT (ui);
171 }
172 temp = use_stmt;
173 }
174 }
175
176 if (fail)
177 return false;
178
179 /* If we didn't find any definition this means the store is dead
180 if it isn't a store to global reachable memory. In this case
181 just pretend the stmt makes itself dead. Otherwise fail. */
182 if (!temp)
183 {
184 if (stmt_may_clobber_global_p (stmt))
185 return false;
186
187 temp = stmt;
188 break;
189 }
190 }
191 /* We deliberately stop on clobbering statements and not only on
192 killing ones to make walking cheaper. Otherwise we can just
193 continue walking until both stores have equal reference trees. */
194 while (!stmt_may_clobber_ref_p (temp, gimple_assign_lhs (stmt)));
195
196 *use_stmt = temp;
197
198 return true;
199 }
200
201
202 /* Attempt to eliminate dead stores in the statement referenced by BSI.
203
204 A dead store is a store into a memory location which will later be
205 overwritten by another store without any intervening loads. In this
206 case the earlier store can be deleted.
207
208 In our SSA + virtual operand world we use immediate uses of virtual
209 operands to detect dead stores. If a store's virtual definition
210 is used precisely once by a later store to the same location which
211 post dominates the first store, then the first store is dead. */
212
213 static void
214 dse_optimize_stmt (gimple_stmt_iterator *gsi)
215 {
216 gimple stmt = gsi_stmt (*gsi);
217
218 /* If this statement has no virtual defs, then there is nothing
219 to do. */
220 if (!gimple_vdef (stmt))
221 return;
222
223 /* We know we have virtual definitions. If this is a GIMPLE_ASSIGN
224 that's not also a function call, then record it into our table. */
225 if (is_gimple_call (stmt) && gimple_call_fndecl (stmt))
226 return;
227
228 /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
229 if (gimple_has_volatile_ops (stmt)
230 && (!gimple_clobber_p (stmt)
231 || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
232 return;
233
234 if (is_gimple_assign (stmt))
235 {
236 gimple use_stmt;
237
238 if (!dse_possible_dead_store_p (stmt, &use_stmt))
239 return;
240
241 /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
242 another clobber stmt. */
243 if (gimple_clobber_p (stmt)
244 && !gimple_clobber_p (use_stmt))
245 return;
246
247 /* If we have precisely one immediate use at this point and the
248 stores are to the same memory location or there is a chain of
249 virtual uses from stmt and the stmt which stores to that same
250 memory location, then we may have found redundant store. */
251 if ((gimple_has_lhs (use_stmt)
252 && (operand_equal_p (gimple_assign_lhs (stmt),
253 gimple_get_lhs (use_stmt), 0)))
254 || stmt_kills_ref_p (use_stmt, gimple_assign_lhs (stmt)))
255 {
256 basic_block bb;
257
258 /* If use_stmt is or might be a nop assignment, e.g. for
259 struct { ... } S a, b, *p; ...
260 b = a; b = b;
261 or
262 b = a; b = *p; where p might be &b,
263 or
264 *p = a; *p = b; where p might be &b,
265 or
266 *p = *u; *p = *v; where p might be v, then USE_STMT
267 acts as a use as well as definition, so store in STMT
268 is not dead. */
269 if (stmt != use_stmt
270 && ref_maybe_used_by_stmt_p (use_stmt, gimple_assign_lhs (stmt)))
271 return;
272
273 if (dump_file && (dump_flags & TDF_DETAILS))
274 {
275 fprintf (dump_file, " Deleted dead store '");
276 print_gimple_stmt (dump_file, gsi_stmt (*gsi), dump_flags, 0);
277 fprintf (dump_file, "'\n");
278 }
279
280 /* Then we need to fix the operand of the consuming stmt. */
281 unlink_stmt_vdef (stmt);
282
283 /* Remove the dead store. */
284 bb = gimple_bb (stmt);
285 if (gsi_remove (gsi, true))
286 bitmap_set_bit (need_eh_cleanup, bb->index);
287
288 /* And release any SSA_NAMEs set in this statement back to the
289 SSA_NAME manager. */
290 release_defs (stmt);
291 }
292 }
293 }
294
295 class dse_dom_walker : public dom_walker
296 {
297 public:
298 dse_dom_walker (cdi_direction direction) : dom_walker (direction) {}
299
300 virtual void before_dom_children (basic_block);
301 };
302
303 void
304 dse_dom_walker::before_dom_children (basic_block bb)
305 {
306 gimple_stmt_iterator gsi;
307
308 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
309 {
310 dse_optimize_stmt (&gsi);
311 if (gsi_end_p (gsi))
312 gsi = gsi_last_bb (bb);
313 else
314 gsi_prev (&gsi);
315 }
316 }
317
318 /* Main entry point. */
319
320 static unsigned int
321 tree_ssa_dse (void)
322 {
323 need_eh_cleanup = BITMAP_ALLOC (NULL);
324
325 renumber_gimple_stmt_uids ();
326
327 /* We might consider making this a property of each pass so that it
328 can be [re]computed on an as-needed basis. Particularly since
329 this pass could be seen as an extension of DCE which needs post
330 dominators. */
331 calculate_dominance_info (CDI_POST_DOMINATORS);
332 calculate_dominance_info (CDI_DOMINATORS);
333
334 /* Dead store elimination is fundamentally a walk of the post-dominator
335 tree and a backwards walk of statements within each block. */
336 dse_dom_walker (CDI_POST_DOMINATORS).walk (cfun->cfg->x_exit_block_ptr);
337
338 /* Removal of stores may make some EH edges dead. Purge such edges from
339 the CFG as needed. */
340 if (!bitmap_empty_p (need_eh_cleanup))
341 {
342 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
343 cleanup_tree_cfg ();
344 }
345
346 BITMAP_FREE (need_eh_cleanup);
347
348 /* For now, just wipe the post-dominator information. */
349 free_dominance_info (CDI_POST_DOMINATORS);
350 return 0;
351 }
352
353 static bool
354 gate_dse (void)
355 {
356 return flag_tree_dse != 0;
357 }
358
359 namespace {
360
361 const pass_data pass_data_dse =
362 {
363 GIMPLE_PASS, /* type */
364 "dse", /* name */
365 OPTGROUP_NONE, /* optinfo_flags */
366 true, /* has_gate */
367 true, /* has_execute */
368 TV_TREE_DSE, /* tv_id */
369 ( PROP_cfg | PROP_ssa ), /* properties_required */
370 0, /* properties_provided */
371 0, /* properties_destroyed */
372 0, /* todo_flags_start */
373 TODO_verify_ssa, /* todo_flags_finish */
374 };
375
376 class pass_dse : public gimple_opt_pass
377 {
378 public:
379 pass_dse (gcc::context *ctxt)
380 : gimple_opt_pass (pass_data_dse, ctxt)
381 {}
382
383 /* opt_pass methods: */
384 opt_pass * clone () { return new pass_dse (m_ctxt); }
385 bool gate () { return gate_dse (); }
386 unsigned int execute () { return tree_ssa_dse (); }
387
388 }; // class pass_dse
389
390 } // anon namespace
391
392 gimple_opt_pass *
393 make_pass_dse (gcc::context *ctxt)
394 {
395 return new pass_dse (ctxt);
396 }