]>
Commit | Line | Data |
---|---|---|
6de9cd9a | 1 | /* Dead code elimination pass for the GNU compiler. |
d1e082c2 | 2 | Copyright (C) 2002-2013 Free Software Foundation, Inc. |
6de9cd9a DN |
3 | Contributed by Ben Elliston <bje@redhat.com> |
4 | and Andrew MacLeod <amacleod@redhat.com> | |
5 | Adapted to use control dependence by Steven Bosscher, SUSE Labs. | |
b8698a0f | 6 | |
6de9cd9a | 7 | This file is part of GCC. |
b8698a0f | 8 | |
6de9cd9a DN |
9 | GCC is free software; you can redistribute it and/or modify it |
10 | under the terms of the GNU General Public License as published by the | |
9dcd6f09 | 11 | Free Software Foundation; either version 3, or (at your option) any |
6de9cd9a | 12 | later version. |
b8698a0f | 13 | |
6de9cd9a DN |
14 | GCC is distributed in the hope that it will be useful, but WITHOUT |
15 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
16 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
17 | for more details. | |
b8698a0f | 18 | |
6de9cd9a | 19 | You should have received a copy of the GNU General Public License |
9dcd6f09 NC |
20 | along with GCC; see the file COPYING3. If not see |
21 | <http://www.gnu.org/licenses/>. */ | |
6de9cd9a DN |
22 | |
23 | /* Dead code elimination. | |
24 | ||
25 | References: | |
26 | ||
27 | Building an Optimizing Compiler, | |
28 | Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9. | |
29 | ||
30 | Advanced Compiler Design and Implementation, | |
31 | Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10. | |
32 | ||
33 | Dead-code elimination is the removal of statements which have no | |
34 | impact on the program's output. "Dead statements" have no impact | |
35 | on the program's output, while "necessary statements" may have | |
36 | impact on the output. | |
37 | ||
38 | The algorithm consists of three phases: | |
39 | 1. Marking as necessary all statements known to be necessary, | |
40 | e.g. most function calls, writing a value to memory, etc; | |
41 | 2. Propagating necessary statements, e.g., the statements | |
42 | giving values to operands in necessary statements; and | |
43 | 3. Removing dead statements. */ | |
44 | ||
45 | #include "config.h" | |
46 | #include "system.h" | |
47 | #include "coretypes.h" | |
48 | #include "tm.h" | |
6de9cd9a DN |
49 | |
50 | #include "tree.h" | |
cf835838 | 51 | #include "gimple-pretty-print.h" |
40013784 | 52 | #include "basic-block.h" |
7a300452 | 53 | #include "tree-ssa.h" |
726a989a | 54 | #include "gimple.h" |
6de9cd9a | 55 | #include "tree-pass.h" |
6de9cd9a | 56 | #include "flags.h" |
49896738 | 57 | #include "cfgloop.h" |
a4176272 | 58 | #include "tree-scalar-evolution.h" |
726a989a | 59 | |
6de9cd9a DN |
60 | static struct stmt_stats |
61 | { | |
62 | int total; | |
63 | int total_phis; | |
64 | int removed; | |
65 | int removed_phis; | |
66 | } stats; | |
67 | ||
726a989a RB |
68 | #define STMT_NECESSARY GF_PLF_1 |
69 | ||
9771b263 | 70 | static vec<gimple> worklist; |
6de9cd9a DN |
71 | |
72 | /* Vector indicating an SSA name has already been processed and marked | |
73 | as necessary. */ | |
74 | static sbitmap processed; | |
75 | ||
ad06ee51 EB |
76 | /* Vector indicating that the last statement of a basic block has already |
77 | been marked as necessary. */ | |
6de9cd9a DN |
78 | static sbitmap last_stmt_necessary; |
79 | ||
1fc41282 JH |
80 | /* Vector indicating that BB contains statements that are live. */ |
81 | static sbitmap bb_contains_live_stmts; | |
82 | ||
6de9cd9a DN |
83 | /* Before we can determine whether a control branch is dead, we need to |
84 | compute which blocks are control dependent on which edges. | |
85 | ||
86 | We expect each block to be control dependent on very few edges so we | |
87 | use a bitmap for each block recording its edges. An array holds the | |
88 | bitmap. The Ith bit in the bitmap is set if that block is dependent | |
89 | on the Ith edge. */ | |
c8e9d8c3 | 90 | static control_dependences *cd; |
6de9cd9a | 91 | |
a28fee03 SB |
92 | /* Vector indicating that a basic block has already had all the edges |
93 | processed that it is control dependent on. */ | |
8c80c4aa | 94 | static sbitmap visited_control_parents; |
a28fee03 | 95 | |
9da4058c JL |
96 | /* TRUE if this pass alters the CFG (by removing control statements). |
97 | FALSE otherwise. | |
98 | ||
99 | If this pass alters the CFG, then it will arrange for the dominators | |
100 | to be recomputed. */ | |
101 | static bool cfg_altered; | |
102 | ||
9b3b55a1 | 103 | |
6de9cd9a DN |
104 | /* If STMT is not already marked necessary, mark it, and add it to the |
105 | worklist if ADD_TO_WORKLIST is true. */ | |
ad06ee51 | 106 | |
6de9cd9a | 107 | static inline void |
726a989a | 108 | mark_stmt_necessary (gimple stmt, bool add_to_worklist) |
6de9cd9a | 109 | { |
1e128c5f | 110 | gcc_assert (stmt); |
6de9cd9a | 111 | |
726a989a | 112 | if (gimple_plf (stmt, STMT_NECESSARY)) |
6de9cd9a DN |
113 | return; |
114 | ||
115 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
116 | { | |
117 | fprintf (dump_file, "Marking useful stmt: "); | |
726a989a | 118 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); |
6de9cd9a DN |
119 | fprintf (dump_file, "\n"); |
120 | } | |
121 | ||
726a989a | 122 | gimple_set_plf (stmt, STMT_NECESSARY, true); |
6de9cd9a | 123 | if (add_to_worklist) |
9771b263 | 124 | worklist.safe_push (stmt); |
b5b8b0ac | 125 | if (bb_contains_live_stmts && !is_gimple_debug (stmt)) |
d7c028c0 | 126 | bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index); |
6de9cd9a DN |
127 | } |
128 | ||
38635499 DN |
129 | |
130 | /* Mark the statement defining operand OP as necessary. */ | |
6de9cd9a DN |
131 | |
132 | static inline void | |
38635499 | 133 | mark_operand_necessary (tree op) |
6de9cd9a | 134 | { |
726a989a | 135 | gimple stmt; |
6de9cd9a DN |
136 | int ver; |
137 | ||
1e128c5f | 138 | gcc_assert (op); |
6de9cd9a DN |
139 | |
140 | ver = SSA_NAME_VERSION (op); | |
d7c028c0 | 141 | if (bitmap_bit_p (processed, ver)) |
5006671f RG |
142 | { |
143 | stmt = SSA_NAME_DEF_STMT (op); | |
144 | gcc_assert (gimple_nop_p (stmt) | |
145 | || gimple_plf (stmt, STMT_NECESSARY)); | |
146 | return; | |
147 | } | |
d7c028c0 | 148 | bitmap_set_bit (processed, ver); |
6de9cd9a DN |
149 | |
150 | stmt = SSA_NAME_DEF_STMT (op); | |
1e128c5f | 151 | gcc_assert (stmt); |
6de9cd9a | 152 | |
726a989a | 153 | if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt)) |
6de9cd9a DN |
154 | return; |
155 | ||
5006671f RG |
156 | if (dump_file && (dump_flags & TDF_DETAILS)) |
157 | { | |
158 | fprintf (dump_file, "marking necessary through "); | |
159 | print_generic_expr (dump_file, op, 0); | |
160 | fprintf (dump_file, " stmt "); | |
161 | print_gimple_stmt (dump_file, stmt, 0, 0); | |
162 | } | |
163 | ||
726a989a | 164 | gimple_set_plf (stmt, STMT_NECESSARY, true); |
1fc41282 | 165 | if (bb_contains_live_stmts) |
d7c028c0 | 166 | bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index); |
9771b263 | 167 | worklist.safe_push (stmt); |
6de9cd9a | 168 | } |
9b3b55a1 | 169 | |
6de9cd9a | 170 | |
adb35797 | 171 | /* Mark STMT as necessary if it obviously is. Add it to the worklist if |
6de9cd9a DN |
172 | it can make other statements necessary. |
173 | ||
174 | If AGGRESSIVE is false, control statements are conservatively marked as | |
175 | necessary. */ | |
176 | ||
177 | static void | |
726a989a | 178 | mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive) |
6de9cd9a | 179 | { |
75b80166 | 180 | /* With non-call exceptions, we have to assume that all statements could |
2da02156 EB |
181 | throw. If a statement could throw, it can be deemed necessary. */ |
182 | if (cfun->can_throw_non_call_exceptions | |
183 | && !cfun->can_delete_dead_exceptions | |
184 | && stmt_could_throw_p (stmt)) | |
75b80166 SB |
185 | { |
186 | mark_stmt_necessary (stmt, true); | |
187 | return; | |
188 | } | |
189 | ||
726a989a RB |
190 | /* Statements that are implicitly live. Most function calls, asm |
191 | and return statements are required. Labels and GIMPLE_BIND nodes | |
192 | are kept because they are control flow, and we have no way of | |
193 | knowing whether they can be removed. DCE can eliminate all the | |
194 | other statements in a block, and CFG can then remove the block | |
195 | and labels. */ | |
196 | switch (gimple_code (stmt)) | |
6de9cd9a | 197 | { |
726a989a RB |
198 | case GIMPLE_PREDICT: |
199 | case GIMPLE_LABEL: | |
6de9cd9a DN |
200 | mark_stmt_necessary (stmt, false); |
201 | return; | |
202 | ||
726a989a RB |
203 | case GIMPLE_ASM: |
204 | case GIMPLE_RESX: | |
205 | case GIMPLE_RETURN: | |
6de9cd9a DN |
206 | mark_stmt_necessary (stmt, true); |
207 | return; | |
208 | ||
726a989a | 209 | case GIMPLE_CALL: |
c22c0db2 RG |
210 | { |
211 | tree callee = gimple_call_fndecl (stmt); | |
212 | if (callee != NULL_TREE | |
213 | && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) | |
214 | switch (DECL_FUNCTION_CODE (callee)) | |
215 | { | |
216 | case BUILT_IN_MALLOC: | |
217 | case BUILT_IN_CALLOC: | |
218 | case BUILT_IN_ALLOCA: | |
13e49da9 | 219 | case BUILT_IN_ALLOCA_WITH_ALIGN: |
c22c0db2 | 220 | return; |
996e1de5 RG |
221 | |
222 | default:; | |
c22c0db2 RG |
223 | } |
224 | /* Most, but not all function calls are required. Function calls that | |
225 | produce no result and have no side effects (i.e. const pure | |
226 | functions) are unnecessary. */ | |
227 | if (gimple_has_side_effects (stmt)) | |
228 | { | |
229 | mark_stmt_necessary (stmt, true); | |
230 | return; | |
231 | } | |
232 | if (!gimple_call_lhs (stmt)) | |
6de9cd9a | 233 | return; |
c22c0db2 RG |
234 | break; |
235 | } | |
6de9cd9a | 236 | |
b5b8b0ac | 237 | case GIMPLE_DEBUG: |
0ca5af51 AO |
238 | /* Debug temps without a value are not useful. ??? If we could |
239 | easily locate the debug temp bind stmt for a use thereof, | |
240 | would could refrain from marking all debug temps here, and | |
241 | mark them only if they're used. */ | |
ddb555ed JJ |
242 | if (!gimple_debug_bind_p (stmt) |
243 | || gimple_debug_bind_has_value_p (stmt) | |
0ca5af51 AO |
244 | || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL) |
245 | mark_stmt_necessary (stmt, false); | |
b5b8b0ac AO |
246 | return; |
247 | ||
726a989a | 248 | case GIMPLE_GOTO: |
7f604986 KH |
249 | gcc_assert (!simple_goto_p (stmt)); |
250 | mark_stmt_necessary (stmt, true); | |
6de9cd9a DN |
251 | return; |
252 | ||
726a989a RB |
253 | case GIMPLE_COND: |
254 | gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2); | |
6de9cd9a DN |
255 | /* Fall through. */ |
256 | ||
726a989a | 257 | case GIMPLE_SWITCH: |
6de9cd9a DN |
258 | if (! aggressive) |
259 | mark_stmt_necessary (stmt, true); | |
260 | break; | |
261 | ||
47598145 MM |
262 | case GIMPLE_ASSIGN: |
263 | if (TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME | |
264 | && TREE_CLOBBER_P (gimple_assign_rhs1 (stmt))) | |
265 | return; | |
266 | break; | |
267 | ||
6de9cd9a DN |
268 | default: |
269 | break; | |
270 | } | |
271 | ||
c597ef4e DN |
272 | /* If the statement has volatile operands, it needs to be preserved. |
273 | Same for statements that can alter control flow in unpredictable | |
274 | ways. */ | |
726a989a | 275 | if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt)) |
6de9cd9a DN |
276 | { |
277 | mark_stmt_necessary (stmt, true); | |
278 | return; | |
279 | } | |
280 | ||
209be553 | 281 | if (stmt_may_clobber_global_p (stmt)) |
a32b97a2 | 282 | { |
fa555252 DB |
283 | mark_stmt_necessary (stmt, true); |
284 | return; | |
6de9cd9a DN |
285 | } |
286 | ||
287 | return; | |
288 | } | |
9b3b55a1 DN |
289 | |
290 | ||
ad06ee51 EB |
291 | /* Mark the last statement of BB as necessary. */ |
292 | ||
293 | static void | |
294 | mark_last_stmt_necessary (basic_block bb) | |
295 | { | |
296 | gimple stmt = last_stmt (bb); | |
297 | ||
d7c028c0 LC |
298 | bitmap_set_bit (last_stmt_necessary, bb->index); |
299 | bitmap_set_bit (bb_contains_live_stmts, bb->index); | |
ad06ee51 EB |
300 | |
301 | /* We actually mark the statement only if it is a control statement. */ | |
302 | if (stmt && is_ctrl_stmt (stmt)) | |
303 | mark_stmt_necessary (stmt, true); | |
304 | } | |
305 | ||
306 | ||
307 | /* Mark control dependent edges of BB as necessary. We have to do this only | |
308 | once for each basic block so we set the appropriate bit after we're done. | |
309 | ||
310 | When IGNORE_SELF is true, ignore BB in the list of control dependences. */ | |
b1a0b3b4 | 311 | |
9b3b55a1 | 312 | static void |
c8e9d8c3 | 313 | mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self) |
9b3b55a1 DN |
314 | { |
315 | bitmap_iterator bi; | |
316 | unsigned edge_number; | |
b1a0b3b4 | 317 | bool skipped = false; |
9b3b55a1 DN |
318 | |
319 | gcc_assert (bb != EXIT_BLOCK_PTR); | |
320 | ||
321 | if (bb == ENTRY_BLOCK_PTR) | |
322 | return; | |
323 | ||
c8e9d8c3 RB |
324 | EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index), |
325 | 0, edge_number, bi) | |
9b3b55a1 | 326 | { |
c8e9d8c3 | 327 | basic_block cd_bb = cd->get_edge (edge_number)->src; |
9b3b55a1 | 328 | |
b1a0b3b4 JH |
329 | if (ignore_self && cd_bb == bb) |
330 | { | |
331 | skipped = true; | |
332 | continue; | |
333 | } | |
334 | ||
d7c028c0 | 335 | if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index)) |
ad06ee51 | 336 | mark_last_stmt_necessary (cd_bb); |
9b3b55a1 | 337 | } |
ad06ee51 | 338 | |
b1a0b3b4 | 339 | if (!skipped) |
d7c028c0 | 340 | bitmap_set_bit (visited_control_parents, bb->index); |
9b3b55a1 DN |
341 | } |
342 | ||
343 | ||
6de9cd9a DN |
344 | /* Find obviously necessary statements. These are things like most function |
345 | calls, and stores to file level variables. | |
346 | ||
347 | If EL is NULL, control statements are conservatively marked as | |
348 | necessary. Otherwise it contains the list of edges used by control | |
349 | dependence analysis. */ | |
350 | ||
351 | static void | |
c8e9d8c3 | 352 | find_obviously_necessary_stmts (bool aggressive) |
6de9cd9a DN |
353 | { |
354 | basic_block bb; | |
726a989a | 355 | gimple_stmt_iterator gsi; |
6de9cd9a | 356 | edge e; |
726a989a | 357 | gimple phi, stmt; |
9e3920e9 | 358 | int flags; |
6de9cd9a DN |
359 | |
360 | FOR_EACH_BB (bb) | |
361 | { | |
9b3b55a1 | 362 | /* PHI nodes are never inherently necessary. */ |
726a989a RB |
363 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
364 | { | |
365 | phi = gsi_stmt (gsi); | |
366 | gimple_set_plf (phi, STMT_NECESSARY, false); | |
367 | } | |
6de9cd9a DN |
368 | |
369 | /* Check all statements in the block. */ | |
726a989a | 370 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
6de9cd9a | 371 | { |
726a989a RB |
372 | stmt = gsi_stmt (gsi); |
373 | gimple_set_plf (stmt, STMT_NECESSARY, false); | |
c8e9d8c3 | 374 | mark_stmt_if_obviously_necessary (stmt, aggressive); |
6de9cd9a | 375 | } |
6de9cd9a DN |
376 | } |
377 | ||
7351bcaa JH |
378 | /* Pure and const functions are finite and thus have no infinite loops in |
379 | them. */ | |
9e3920e9 JJ |
380 | flags = flags_from_decl_or_type (current_function_decl); |
381 | if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE)) | |
7351bcaa JH |
382 | return; |
383 | ||
384 | /* Prevent the empty possibly infinite loops from being removed. */ | |
c8e9d8c3 | 385 | if (aggressive) |
6de9cd9a | 386 | { |
7351bcaa JH |
387 | loop_iterator li; |
388 | struct loop *loop; | |
389 | scev_initialize (); | |
390 | if (mark_irreducible_loops ()) | |
391 | FOR_EACH_BB (bb) | |
392 | { | |
393 | edge_iterator ei; | |
394 | FOR_EACH_EDGE (e, ei, bb->succs) | |
395 | if ((e->flags & EDGE_DFS_BACK) | |
396 | && (e->flags & EDGE_IRREDUCIBLE_LOOP)) | |
397 | { | |
398 | if (dump_file) | |
399 | fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n", | |
400 | e->src->index, e->dest->index); | |
c8e9d8c3 | 401 | mark_control_dependent_edges_necessary (e->dest, false); |
7351bcaa JH |
402 | } |
403 | } | |
404 | ||
405 | FOR_EACH_LOOP (li, loop, 0) | |
406 | if (!finite_loop_p (loop)) | |
407 | { | |
408 | if (dump_file) | |
409 | fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num); | |
c8e9d8c3 | 410 | mark_control_dependent_edges_necessary (loop->latch, false); |
7351bcaa JH |
411 | } |
412 | scev_finalize (); | |
6de9cd9a DN |
413 | } |
414 | } | |
6de9cd9a | 415 | |
6de9cd9a | 416 | |
5006671f RG |
417 | /* Return true if REF is based on an aliased base, otherwise false. */ |
418 | ||
419 | static bool | |
420 | ref_may_be_aliased (tree ref) | |
421 | { | |
b3421a06 | 422 | gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR); |
5006671f RG |
423 | while (handled_component_p (ref)) |
424 | ref = TREE_OPERAND (ref, 0); | |
70f34814 RG |
425 | if (TREE_CODE (ref) == MEM_REF |
426 | && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR) | |
427 | ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0); | |
5006671f RG |
428 | return !(DECL_P (ref) |
429 | && !may_be_aliased (ref)); | |
430 | } | |
431 | ||
5006671f RG |
432 | static bitmap visited = NULL; |
433 | static unsigned int longest_chain = 0; | |
434 | static unsigned int total_chain = 0; | |
a61f9cc0 | 435 | static unsigned int nr_walks = 0; |
5006671f RG |
436 | static bool chain_ovfl = false; |
437 | ||
438 | /* Worker for the walker that marks reaching definitions of REF, | |
439 | which is based on a non-aliased decl, necessary. It returns | |
440 | true whenever the defining statement of the current VDEF is | |
441 | a kill for REF, as no dominating may-defs are necessary for REF | |
1415abc0 RG |
442 | anymore. DATA points to the basic-block that contains the |
443 | stmt that refers to REF. */ | |
5006671f RG |
444 | |
445 | static bool | |
1415abc0 | 446 | mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data) |
5006671f RG |
447 | { |
448 | gimple def_stmt = SSA_NAME_DEF_STMT (vdef); | |
5006671f RG |
449 | |
450 | /* All stmts we visit are necessary. */ | |
451 | mark_operand_necessary (vdef); | |
452 | ||
453 | /* If the stmt lhs kills ref, then we can stop walking. */ | |
454 | if (gimple_has_lhs (def_stmt) | |
094f6ab3 RG |
455 | && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME |
456 | /* The assignment is not necessarily carried out if it can throw | |
457 | and we can catch it in the current function where we could inspect | |
458 | the previous value. | |
459 | ??? We only need to care about the RHS throwing. For aggregate | |
460 | assignments or similar calls and non-call exceptions the LHS | |
461 | might throw as well. */ | |
462 | && !stmt_can_throw_internal (def_stmt)) | |
5006671f RG |
463 | { |
464 | tree base, lhs = gimple_get_lhs (def_stmt); | |
465 | HOST_WIDE_INT size, offset, max_size; | |
42bc61e0 | 466 | ao_ref_base (ref); |
5006671f RG |
467 | base = get_ref_base_and_extent (lhs, &offset, &size, &max_size); |
468 | /* We can get MEM[symbol: sZ, index: D.8862_1] here, | |
469 | so base == refd->base does not always hold. */ | |
42bc61e0 | 470 | if (base == ref->base) |
5006671f RG |
471 | { |
472 | /* For a must-alias check we need to be able to constrain | |
473 | the accesses properly. */ | |
474 | if (size != -1 && size == max_size | |
42bc61e0 | 475 | && ref->max_size != -1) |
5006671f | 476 | { |
42bc61e0 RG |
477 | if (offset <= ref->offset |
478 | && offset + size >= ref->offset + ref->max_size) | |
5006671f RG |
479 | return true; |
480 | } | |
481 | /* Or they need to be exactly the same. */ | |
42bc61e0 | 482 | else if (ref->ref |
1415abc0 RG |
483 | /* Make sure there is no induction variable involved |
484 | in the references (gcc.c-torture/execute/pr42142.c). | |
485 | The simplest way is to check if the kill dominates | |
486 | the use. */ | |
301bbc16 RB |
487 | /* But when both are in the same block we cannot |
488 | easily tell whether we came from a backedge | |
489 | unless we decide to compute stmt UIDs | |
490 | (see PR58246). */ | |
491 | && (basic_block) data != gimple_bb (def_stmt) | |
1415abc0 RG |
492 | && dominated_by_p (CDI_DOMINATORS, (basic_block) data, |
493 | gimple_bb (def_stmt)) | |
42bc61e0 | 494 | && operand_equal_p (ref->ref, lhs, 0)) |
5006671f RG |
495 | return true; |
496 | } | |
497 | } | |
498 | ||
499 | /* Otherwise keep walking. */ | |
500 | return false; | |
501 | } | |
502 | ||
503 | static void | |
504 | mark_aliased_reaching_defs_necessary (gimple stmt, tree ref) | |
505 | { | |
5006671f | 506 | unsigned int chain; |
42bc61e0 | 507 | ao_ref refd; |
5006671f | 508 | gcc_assert (!chain_ovfl); |
42bc61e0 RG |
509 | ao_ref_init (&refd, ref); |
510 | chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt), | |
5006671f | 511 | mark_aliased_reaching_defs_necessary_1, |
1415abc0 | 512 | gimple_bb (stmt), NULL); |
5006671f RG |
513 | if (chain > longest_chain) |
514 | longest_chain = chain; | |
515 | total_chain += chain; | |
a61f9cc0 | 516 | nr_walks++; |
5006671f RG |
517 | } |
518 | ||
519 | /* Worker for the walker that marks reaching definitions of REF, which | |
520 | is not based on a non-aliased decl. For simplicity we need to end | |
521 | up marking all may-defs necessary that are not based on a non-aliased | |
522 | decl. The only job of this walker is to skip may-defs based on | |
523 | a non-aliased decl. */ | |
524 | ||
525 | static bool | |
42bc61e0 RG |
526 | mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED, |
527 | tree vdef, void *data ATTRIBUTE_UNUSED) | |
5006671f RG |
528 | { |
529 | gimple def_stmt = SSA_NAME_DEF_STMT (vdef); | |
530 | ||
531 | /* We have to skip already visited (and thus necessary) statements | |
532 | to make the chaining work after we dropped back to simple mode. */ | |
533 | if (chain_ovfl | |
d7c028c0 | 534 | && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef))) |
5006671f RG |
535 | { |
536 | gcc_assert (gimple_nop_p (def_stmt) | |
537 | || gimple_plf (def_stmt, STMT_NECESSARY)); | |
538 | return false; | |
539 | } | |
540 | ||
541 | /* We want to skip stores to non-aliased variables. */ | |
542 | if (!chain_ovfl | |
543 | && gimple_assign_single_p (def_stmt)) | |
544 | { | |
545 | tree lhs = gimple_assign_lhs (def_stmt); | |
546 | if (!ref_may_be_aliased (lhs)) | |
547 | return false; | |
548 | } | |
549 | ||
996e1de5 RG |
550 | /* We want to skip statments that do not constitute stores but have |
551 | a virtual definition. */ | |
552 | if (is_gimple_call (def_stmt)) | |
553 | { | |
554 | tree callee = gimple_call_fndecl (def_stmt); | |
555 | if (callee != NULL_TREE | |
556 | && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) | |
557 | switch (DECL_FUNCTION_CODE (callee)) | |
558 | { | |
559 | case BUILT_IN_MALLOC: | |
560 | case BUILT_IN_CALLOC: | |
561 | case BUILT_IN_ALLOCA: | |
13e49da9 | 562 | case BUILT_IN_ALLOCA_WITH_ALIGN: |
996e1de5 RG |
563 | case BUILT_IN_FREE: |
564 | return false; | |
565 | ||
566 | default:; | |
567 | } | |
568 | } | |
569 | ||
5006671f | 570 | mark_operand_necessary (vdef); |
67635176 RG |
571 | |
572 | return false; | |
5006671f RG |
573 | } |
574 | ||
575 | static void | |
576 | mark_all_reaching_defs_necessary (gimple stmt) | |
577 | { | |
578 | walk_aliased_vdefs (NULL, gimple_vuse (stmt), | |
579 | mark_all_reaching_defs_necessary_1, NULL, &visited); | |
580 | } | |
581 | ||
7351bcaa JH |
582 | /* Return true for PHI nodes with one or identical arguments |
583 | can be removed. */ | |
584 | static bool | |
585 | degenerate_phi_p (gimple phi) | |
586 | { | |
587 | unsigned int i; | |
588 | tree op = gimple_phi_arg_def (phi, 0); | |
589 | for (i = 1; i < gimple_phi_num_args (phi); i++) | |
590 | if (gimple_phi_arg_def (phi, i) != op) | |
591 | return false; | |
592 | return true; | |
593 | } | |
594 | ||
9b3b55a1 DN |
595 | /* Propagate necessity using the operands of necessary statements. |
596 | Process the uses on each statement in the worklist, and add all | |
597 | feeding statements which contribute to the calculation of this | |
b8698a0f | 598 | value to the worklist. |
6de9cd9a DN |
599 | |
600 | In conservative mode, EL is NULL. */ | |
601 | ||
602 | static void | |
c8e9d8c3 | 603 | propagate_necessity (bool aggressive) |
6de9cd9a | 604 | { |
726a989a | 605 | gimple stmt; |
6de9cd9a DN |
606 | |
607 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
608 | fprintf (dump_file, "\nProcessing worklist:\n"); | |
609 | ||
9771b263 | 610 | while (worklist.length () > 0) |
6de9cd9a | 611 | { |
9b3b55a1 | 612 | /* Take STMT from worklist. */ |
9771b263 | 613 | stmt = worklist.pop (); |
6de9cd9a DN |
614 | |
615 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
616 | { | |
617 | fprintf (dump_file, "processing: "); | |
726a989a | 618 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); |
6de9cd9a DN |
619 | fprintf (dump_file, "\n"); |
620 | } | |
621 | ||
622 | if (aggressive) | |
623 | { | |
ad06ee51 EB |
624 | /* Mark the last statement of the basic blocks on which the block |
625 | containing STMT is control dependent, but only if we haven't | |
6de9cd9a | 626 | already done so. */ |
726a989a | 627 | basic_block bb = gimple_bb (stmt); |
a28fee03 | 628 | if (bb != ENTRY_BLOCK_PTR |
d7c028c0 | 629 | && !bitmap_bit_p (visited_control_parents, bb->index)) |
c8e9d8c3 | 630 | mark_control_dependent_edges_necessary (bb, false); |
6de9cd9a DN |
631 | } |
632 | ||
5006671f RG |
633 | if (gimple_code (stmt) == GIMPLE_PHI |
634 | /* We do not process virtual PHI nodes nor do we track their | |
635 | necessity. */ | |
ea057359 | 636 | && !virtual_operand_p (gimple_phi_result (stmt))) |
6de9cd9a DN |
637 | { |
638 | /* PHI nodes are somewhat special in that each PHI alternative has | |
639 | data and control dependencies. All the statements feeding the | |
640 | PHI node's arguments are always necessary. In aggressive mode, | |
641 | we also consider the control dependent edges leading to the | |
642 | predecessor block associated with each PHI alternative as | |
643 | necessary. */ | |
726a989a | 644 | size_t k; |
9b3b55a1 | 645 | |
726a989a | 646 | for (k = 0; k < gimple_phi_num_args (stmt); k++) |
6de9cd9a | 647 | { |
9b3b55a1 | 648 | tree arg = PHI_ARG_DEF (stmt, k); |
6de9cd9a | 649 | if (TREE_CODE (arg) == SSA_NAME) |
38635499 | 650 | mark_operand_necessary (arg); |
6de9cd9a DN |
651 | } |
652 | ||
b1a0b3b4 JH |
653 | /* For PHI operands it matters from where the control flow arrives |
654 | to the BB. Consider the following example: | |
655 | ||
656 | a=exp1; | |
657 | b=exp2; | |
658 | if (test) | |
659 | ; | |
660 | else | |
661 | ; | |
662 | c=PHI(a,b) | |
663 | ||
664 | We need to mark control dependence of the empty basic blocks, since they | |
665 | contains computation of PHI operands. | |
666 | ||
667 | Doing so is too restrictive in the case the predecestor block is in | |
668 | the loop. Consider: | |
669 | ||
670 | if (b) | |
671 | { | |
672 | int i; | |
673 | for (i = 0; i<1000; ++i) | |
674 | ; | |
675 | j = 0; | |
676 | } | |
677 | return j; | |
678 | ||
679 | There is PHI for J in the BB containing return statement. | |
680 | In this case the control dependence of predecestor block (that is | |
681 | within the empty loop) also contains the block determining number | |
682 | of iterations of the block that would prevent removing of empty | |
683 | loop in this case. | |
684 | ||
685 | This scenario can be avoided by splitting critical edges. | |
686 | To save the critical edge splitting pass we identify how the control | |
687 | dependence would look like if the edge was split. | |
688 | ||
689 | Consider the modified CFG created from current CFG by splitting | |
690 | edge B->C. In the postdominance tree of modified CFG, C' is | |
691 | always child of C. There are two cases how chlids of C' can look | |
692 | like: | |
693 | ||
694 | 1) C' is leaf | |
695 | ||
696 | In this case the only basic block C' is control dependent on is B. | |
697 | ||
698 | 2) C' has single child that is B | |
699 | ||
700 | In this case control dependence of C' is same as control | |
701 | dependence of B in original CFG except for block B itself. | |
702 | (since C' postdominate B in modified CFG) | |
703 | ||
704 | Now how to decide what case happens? There are two basic options: | |
705 | ||
706 | a) C postdominate B. Then C immediately postdominate B and | |
707 | case 2 happens iff there is no other way from B to C except | |
708 | the edge B->C. | |
709 | ||
710 | There is other way from B to C iff there is succesor of B that | |
711 | is not postdominated by B. Testing this condition is somewhat | |
712 | expensive, because we need to iterate all succesors of B. | |
713 | We are safe to assume that this does not happen: we will mark B | |
714 | as needed when processing the other path from B to C that is | |
715 | conrol dependent on B and marking control dependencies of B | |
716 | itself is harmless because they will be processed anyway after | |
717 | processing control statement in B. | |
718 | ||
719 | b) C does not postdominate B. Always case 1 happens since there is | |
720 | path from C to exit that does not go through B and thus also C'. */ | |
721 | ||
7351bcaa | 722 | if (aggressive && !degenerate_phi_p (stmt)) |
6de9cd9a | 723 | { |
726a989a | 724 | for (k = 0; k < gimple_phi_num_args (stmt); k++) |
6de9cd9a | 725 | { |
726a989a | 726 | basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src; |
b1a0b3b4 JH |
727 | |
728 | if (gimple_bb (stmt) | |
729 | != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb)) | |
6de9cd9a | 730 | { |
d7c028c0 | 731 | if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index)) |
ad06ee51 | 732 | mark_last_stmt_necessary (arg_bb); |
6de9cd9a | 733 | } |
b1a0b3b4 | 734 | else if (arg_bb != ENTRY_BLOCK_PTR |
d7c028c0 | 735 | && !bitmap_bit_p (visited_control_parents, |
ad06ee51 | 736 | arg_bb->index)) |
c8e9d8c3 | 737 | mark_control_dependent_edges_necessary (arg_bb, true); |
6de9cd9a DN |
738 | } |
739 | } | |
740 | } | |
741 | else | |
742 | { | |
743 | /* Propagate through the operands. Examine all the USE, VUSE and | |
b8698a0f | 744 | VDEF operands in this statement. Mark all the statements |
5006671f | 745 | which feed this statement's uses as necessary. */ |
38635499 DN |
746 | ssa_op_iter iter; |
747 | tree use; | |
4c124b4c | 748 | |
996e1de5 RG |
749 | /* If this is a call to free which is directly fed by an |
750 | allocation function do not mark that necessary through | |
751 | processing the argument. */ | |
752 | if (gimple_call_builtin_p (stmt, BUILT_IN_FREE)) | |
753 | { | |
754 | tree ptr = gimple_call_arg (stmt, 0); | |
755 | gimple def_stmt; | |
756 | tree def_callee; | |
757 | /* If the pointer we free is defined by an allocation | |
758 | function do not add the call to the worklist. */ | |
759 | if (TREE_CODE (ptr) == SSA_NAME | |
760 | && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr)) | |
761 | && (def_callee = gimple_call_fndecl (def_stmt)) | |
762 | && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL | |
763 | && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC | |
764 | || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC)) | |
765 | continue; | |
766 | } | |
767 | ||
5006671f | 768 | FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE) |
38635499 | 769 | mark_operand_necessary (use); |
5006671f RG |
770 | |
771 | use = gimple_vuse (stmt); | |
772 | if (!use) | |
773 | continue; | |
774 | ||
775 | /* If we dropped to simple mode make all immediately | |
776 | reachable definitions necessary. */ | |
777 | if (chain_ovfl) | |
778 | { | |
779 | mark_all_reaching_defs_necessary (stmt); | |
780 | continue; | |
781 | } | |
782 | ||
783 | /* For statements that may load from memory (have a VUSE) we | |
784 | have to mark all reaching (may-)definitions as necessary. | |
785 | We partition this task into two cases: | |
786 | 1) explicit loads based on decls that are not aliased | |
787 | 2) implicit loads (like calls) and explicit loads not | |
788 | based on decls that are not aliased (like indirect | |
789 | references or loads from globals) | |
790 | For 1) we mark all reaching may-defs as necessary, stopping | |
791 | at dominating kills. For 2) we want to mark all dominating | |
792 | references necessary, but non-aliased ones which we handle | |
67635176 RG |
793 | in 1). By keeping a global visited bitmap for references |
794 | we walk for 2) we avoid quadratic behavior for those. */ | |
5006671f RG |
795 | |
796 | if (is_gimple_call (stmt)) | |
797 | { | |
14c41b9b | 798 | tree callee = gimple_call_fndecl (stmt); |
5006671f RG |
799 | unsigned i; |
800 | ||
14c41b9b RG |
801 | /* Calls to functions that are merely acting as barriers |
802 | or that only store to memory do not make any previous | |
803 | stores necessary. */ | |
804 | if (callee != NULL_TREE | |
805 | && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL | |
806 | && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET | |
36dc1a88 | 807 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK |
14c41b9b | 808 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC |
36dc1a88 | 809 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC |
ab4472fa | 810 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE |
df2f6100 | 811 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END |
ab4472fa | 812 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA |
13e49da9 TV |
813 | || (DECL_FUNCTION_CODE (callee) |
814 | == BUILT_IN_ALLOCA_WITH_ALIGN) | |
ab4472fa | 815 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE |
45d439ac JJ |
816 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE |
817 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED)) | |
14c41b9b RG |
818 | continue; |
819 | ||
5006671f | 820 | /* Calls implicitly load from memory, their arguments |
67635176 | 821 | in addition may explicitly perform memory loads. */ |
5006671f RG |
822 | mark_all_reaching_defs_necessary (stmt); |
823 | for (i = 0; i < gimple_call_num_args (stmt); ++i) | |
824 | { | |
825 | tree arg = gimple_call_arg (stmt, i); | |
826 | if (TREE_CODE (arg) == SSA_NAME | |
827 | || is_gimple_min_invariant (arg)) | |
828 | continue; | |
b3421a06 TV |
829 | if (TREE_CODE (arg) == WITH_SIZE_EXPR) |
830 | arg = TREE_OPERAND (arg, 0); | |
5006671f RG |
831 | if (!ref_may_be_aliased (arg)) |
832 | mark_aliased_reaching_defs_necessary (stmt, arg); | |
833 | } | |
834 | } | |
835 | else if (gimple_assign_single_p (stmt)) | |
836 | { | |
67635176 | 837 | tree rhs; |
5006671f RG |
838 | /* If this is a load mark things necessary. */ |
839 | rhs = gimple_assign_rhs1 (stmt); | |
840 | if (TREE_CODE (rhs) != SSA_NAME | |
47598145 MM |
841 | && !is_gimple_min_invariant (rhs) |
842 | && TREE_CODE (rhs) != CONSTRUCTOR) | |
5006671f RG |
843 | { |
844 | if (!ref_may_be_aliased (rhs)) | |
845 | mark_aliased_reaching_defs_necessary (stmt, rhs); | |
846 | else | |
47598145 | 847 | mark_all_reaching_defs_necessary (stmt); |
5006671f | 848 | } |
5006671f RG |
849 | } |
850 | else if (gimple_code (stmt) == GIMPLE_RETURN) | |
851 | { | |
852 | tree rhs = gimple_return_retval (stmt); | |
853 | /* A return statement may perform a load. */ | |
e106efc7 RG |
854 | if (rhs |
855 | && TREE_CODE (rhs) != SSA_NAME | |
47598145 MM |
856 | && !is_gimple_min_invariant (rhs) |
857 | && TREE_CODE (rhs) != CONSTRUCTOR) | |
5006671f RG |
858 | { |
859 | if (!ref_may_be_aliased (rhs)) | |
860 | mark_aliased_reaching_defs_necessary (stmt, rhs); | |
861 | else | |
862 | mark_all_reaching_defs_necessary (stmt); | |
863 | } | |
864 | } | |
865 | else if (gimple_code (stmt) == GIMPLE_ASM) | |
866 | { | |
867 | unsigned i; | |
868 | mark_all_reaching_defs_necessary (stmt); | |
869 | /* Inputs may perform loads. */ | |
870 | for (i = 0; i < gimple_asm_ninputs (stmt); ++i) | |
871 | { | |
872 | tree op = TREE_VALUE (gimple_asm_input_op (stmt, i)); | |
873 | if (TREE_CODE (op) != SSA_NAME | |
874 | && !is_gimple_min_invariant (op) | |
47598145 | 875 | && TREE_CODE (op) != CONSTRUCTOR |
5006671f RG |
876 | && !ref_may_be_aliased (op)) |
877 | mark_aliased_reaching_defs_necessary (stmt, op); | |
878 | } | |
879 | } | |
1d4fb493 RH |
880 | else if (gimple_code (stmt) == GIMPLE_TRANSACTION) |
881 | { | |
882 | /* The beginning of a transaction is a memory barrier. */ | |
883 | /* ??? If we were really cool, we'd only be a barrier | |
884 | for the memories touched within the transaction. */ | |
885 | mark_all_reaching_defs_necessary (stmt); | |
886 | } | |
5006671f RG |
887 | else |
888 | gcc_unreachable (); | |
889 | ||
890 | /* If we over-used our alias oracle budget drop to simple | |
a61f9cc0 RG |
891 | mode. The cost metric allows quadratic behavior |
892 | (number of uses times number of may-defs queries) up to | |
893 | a constant maximal number of queries and after that falls back to | |
5006671f | 894 | super-linear complexity. */ |
a61f9cc0 RG |
895 | if (/* Constant but quadratic for small functions. */ |
896 | total_chain > 128 * 128 | |
897 | /* Linear in the number of may-defs. */ | |
898 | && total_chain > 32 * longest_chain | |
899 | /* Linear in the number of uses. */ | |
900 | && total_chain > nr_walks * 32) | |
5006671f RG |
901 | { |
902 | chain_ovfl = true; | |
903 | if (visited) | |
904 | bitmap_clear (visited); | |
905 | } | |
6de9cd9a DN |
906 | } |
907 | } | |
908 | } | |
52328bf6 | 909 | |
266fbb79 | 910 | /* Replace all uses of NAME by underlying variable and mark it |
525174a2 RG |
911 | for renaming. This assumes the defining statement of NAME is |
912 | going to be removed. */ | |
1fc41282 | 913 | |
1d65f45c | 914 | void |
266fbb79 | 915 | mark_virtual_operand_for_renaming (tree name) |
1fc41282 | 916 | { |
525174a2 | 917 | tree name_var = SSA_NAME_VAR (name); |
1fc41282 JH |
918 | bool used = false; |
919 | imm_use_iterator iter; | |
920 | use_operand_p use_p; | |
921 | gimple stmt; | |
266fbb79 | 922 | |
525174a2 | 923 | gcc_assert (VAR_DECL_IS_VIRTUAL_OPERAND (name_var)); |
266fbb79 TV |
924 | FOR_EACH_IMM_USE_STMT (stmt, iter, name) |
925 | { | |
926 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
927 | SET_USE (use_p, name_var); | |
266fbb79 TV |
928 | used = true; |
929 | } | |
930 | if (used) | |
525174a2 | 931 | mark_virtual_operands_for_renaming (cfun); |
266fbb79 | 932 | } |
1d65f45c | 933 | |
525174a2 RG |
934 | /* Replace all uses of the virtual PHI result by its underlying variable |
935 | and mark it for renaming. This assumes the PHI node is going to be | |
936 | removed. */ | |
266fbb79 TV |
937 | |
938 | void | |
939 | mark_virtual_phi_result_for_renaming (gimple phi) | |
940 | { | |
1fc41282 JH |
941 | if (dump_file && (dump_flags & TDF_DETAILS)) |
942 | { | |
943 | fprintf (dump_file, "Marking result for renaming : "); | |
944 | print_gimple_stmt (dump_file, phi, 0, TDF_SLIM); | |
945 | fprintf (dump_file, "\n"); | |
946 | } | |
1d65f45c | 947 | |
266fbb79 | 948 | mark_virtual_operand_for_renaming (gimple_phi_result (phi)); |
1fc41282 | 949 | } |
52328bf6 | 950 | |
266fbb79 | 951 | |
6de9cd9a DN |
952 | /* Remove dead PHI nodes from block BB. */ |
953 | ||
7665f023 | 954 | static bool |
6de9cd9a DN |
955 | remove_dead_phis (basic_block bb) |
956 | { | |
7665f023 | 957 | bool something_changed = false; |
726a989a RB |
958 | gimple phi; |
959 | gimple_stmt_iterator gsi; | |
6de9cd9a | 960 | |
355a7673 | 961 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);) |
6de9cd9a DN |
962 | { |
963 | stats.total_phis++; | |
726a989a | 964 | phi = gsi_stmt (gsi); |
6de9cd9a | 965 | |
5006671f RG |
966 | /* We do not track necessity of virtual PHI nodes. Instead do |
967 | very simple dead PHI removal here. */ | |
ea057359 | 968 | if (virtual_operand_p (gimple_phi_result (phi))) |
5006671f | 969 | { |
5006671f RG |
970 | /* Virtual PHI nodes with one or identical arguments |
971 | can be removed. */ | |
7351bcaa | 972 | if (degenerate_phi_p (phi)) |
5006671f RG |
973 | { |
974 | tree vdef = gimple_phi_result (phi); | |
7351bcaa JH |
975 | tree vuse = gimple_phi_arg_def (phi, 0); |
976 | ||
5006671f RG |
977 | use_operand_p use_p; |
978 | imm_use_iterator iter; | |
979 | gimple use_stmt; | |
980 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef) | |
981 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
982 | SET_USE (use_p, vuse); | |
eab09a51 JH |
983 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef) |
984 | && TREE_CODE (vuse) == SSA_NAME) | |
5006671f RG |
985 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1; |
986 | } | |
987 | else | |
988 | gimple_set_plf (phi, STMT_NECESSARY, true); | |
989 | } | |
990 | ||
726a989a | 991 | if (!gimple_plf (phi, STMT_NECESSARY)) |
6de9cd9a | 992 | { |
7665f023 | 993 | something_changed = true; |
6de9cd9a DN |
994 | if (dump_file && (dump_flags & TDF_DETAILS)) |
995 | { | |
996 | fprintf (dump_file, "Deleting : "); | |
726a989a | 997 | print_gimple_stmt (dump_file, phi, 0, TDF_SLIM); |
6de9cd9a DN |
998 | fprintf (dump_file, "\n"); |
999 | } | |
1000 | ||
726a989a | 1001 | remove_phi_node (&gsi, true); |
6de9cd9a | 1002 | stats.removed_phis++; |
5006671f | 1003 | continue; |
6de9cd9a | 1004 | } |
5006671f RG |
1005 | |
1006 | gsi_next (&gsi); | |
6de9cd9a | 1007 | } |
7665f023 | 1008 | return something_changed; |
6de9cd9a | 1009 | } |
9b3b55a1 | 1010 | |
1fc41282 JH |
1011 | /* Forward edge E to respective POST_DOM_BB and update PHIs. */ |
1012 | ||
1013 | static edge | |
1014 | forward_edge_to_pdom (edge e, basic_block post_dom_bb) | |
1015 | { | |
1016 | gimple_stmt_iterator gsi; | |
7351bcaa | 1017 | edge e2 = NULL; |
1fc41282 JH |
1018 | edge_iterator ei; |
1019 | ||
1020 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1021 | fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index, | |
1022 | e->dest->index, post_dom_bb->index); | |
1023 | ||
1024 | e2 = redirect_edge_and_branch (e, post_dom_bb); | |
1025 | cfg_altered = true; | |
1026 | ||
c0d18c6c | 1027 | /* If edge was already around, no updating is necessary. */ |
1fc41282 JH |
1028 | if (e2 != e) |
1029 | return e2; | |
1030 | ||
8eacd016 | 1031 | if (!gimple_seq_empty_p (phi_nodes (post_dom_bb))) |
1fc41282 JH |
1032 | { |
1033 | /* We are sure that for every live PHI we are seeing control dependent BB. | |
e9a8afaa | 1034 | This means that we can pick any edge to duplicate PHI args from. */ |
1fc41282 | 1035 | FOR_EACH_EDGE (e2, ei, post_dom_bb->preds) |
e9a8afaa | 1036 | if (e2 != e) |
1fc41282 JH |
1037 | break; |
1038 | for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);) | |
1039 | { | |
1040 | gimple phi = gsi_stmt (gsi); | |
7351bcaa | 1041 | tree op; |
f5045c96 | 1042 | source_location locus; |
1fc41282 | 1043 | |
e9a8afaa RG |
1044 | /* PHIs for virtuals have no control dependency relation on them. |
1045 | We are lost here and must force renaming of the symbol. */ | |
ea057359 | 1046 | if (virtual_operand_p (gimple_phi_result (phi))) |
1fc41282 JH |
1047 | { |
1048 | mark_virtual_phi_result_for_renaming (phi); | |
1049 | remove_phi_node (&gsi, true); | |
1050 | continue; | |
1051 | } | |
e9a8afaa RG |
1052 | |
1053 | /* Dead PHI do not imply control dependency. */ | |
1054 | if (!gimple_plf (phi, STMT_NECESSARY)) | |
f5045c96 | 1055 | { |
e9a8afaa RG |
1056 | gsi_next (&gsi); |
1057 | continue; | |
f5045c96 | 1058 | } |
e9a8afaa RG |
1059 | |
1060 | op = gimple_phi_arg_def (phi, e2->dest_idx); | |
1061 | locus = gimple_phi_arg_location (phi, e2->dest_idx); | |
9e227d60 | 1062 | add_phi_arg (phi, op, e, locus); |
e9a8afaa RG |
1063 | /* The resulting PHI if not dead can only be degenerate. */ |
1064 | gcc_assert (degenerate_phi_p (phi)); | |
1fc41282 JH |
1065 | gsi_next (&gsi); |
1066 | } | |
1067 | } | |
1068 | return e; | |
1069 | } | |
9b3b55a1 | 1070 | |
206048bd | 1071 | /* Remove dead statement pointed to by iterator I. Receives the basic block BB |
6de9cd9a DN |
1072 | containing I so that we don't have to look it up. */ |
1073 | ||
1074 | static void | |
726a989a | 1075 | remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb) |
6de9cd9a | 1076 | { |
726a989a | 1077 | gimple stmt = gsi_stmt (*i); |
6de9cd9a DN |
1078 | |
1079 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1080 | { | |
1081 | fprintf (dump_file, "Deleting : "); | |
726a989a | 1082 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); |
6de9cd9a DN |
1083 | fprintf (dump_file, "\n"); |
1084 | } | |
1085 | ||
1086 | stats.removed++; | |
1087 | ||
1088 | /* If we have determined that a conditional branch statement contributes | |
1089 | nothing to the program, then we not only remove it, but we also change | |
1090 | the flow graph so that the current block will simply fall-thru to its | |
1091 | immediate post-dominator. The blocks we are circumventing will be | |
6fc0bb99 | 1092 | removed by cleanup_tree_cfg if this change in the flow graph makes them |
6de9cd9a | 1093 | unreachable. */ |
726a989a | 1094 | if (is_ctrl_stmt (stmt)) |
6de9cd9a DN |
1095 | { |
1096 | basic_block post_dom_bb; | |
1fc41282 JH |
1097 | edge e, e2; |
1098 | edge_iterator ei; | |
e18d4a19 | 1099 | |
e9a8afaa | 1100 | post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb); |
0a180c0e | 1101 | |
1fc41282 | 1102 | e = find_edge (bb, post_dom_bb); |
0a180c0e | 1103 | |
1fc41282 JH |
1104 | /* If edge is already there, try to use it. This avoids need to update |
1105 | PHI nodes. Also watch for cases where post dominator does not exists | |
1106 | or is exit block. These can happen for infinite loops as we create | |
1107 | fake edges in the dominator tree. */ | |
1108 | if (e) | |
1109 | ; | |
1110 | else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR) | |
1111 | e = EDGE_SUCC (bb, 0); | |
e18d4a19 | 1112 | else |
1fc41282 JH |
1113 | e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb); |
1114 | gcc_assert (e); | |
1115 | e->probability = REG_BR_PROB_BASE; | |
1116 | e->count = bb->count; | |
6de9cd9a DN |
1117 | |
1118 | /* The edge is no longer associated with a conditional, so it does | |
1119 | not have TRUE/FALSE flags. */ | |
1fc41282 | 1120 | e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE); |
6de9cd9a | 1121 | |
0a180c0e | 1122 | /* The lone outgoing edge from BB will be a fallthru edge. */ |
1fc41282 JH |
1123 | e->flags |= EDGE_FALLTHRU; |
1124 | ||
1125 | /* Remove the remaining outgoing edges. */ | |
1126 | for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); ) | |
1127 | if (e != e2) | |
1128 | { | |
1129 | cfg_altered = true; | |
1130 | remove_edge (e2); | |
1131 | } | |
1132 | else | |
1133 | ei_next (&ei); | |
ec8c1492 JJ |
1134 | } |
1135 | ||
1136 | /* If this is a store into a variable that is being optimized away, | |
1137 | add a debug bind stmt if possible. */ | |
1138 | if (MAY_HAVE_DEBUG_STMTS | |
1139 | && gimple_assign_single_p (stmt) | |
1140 | && is_gimple_val (gimple_assign_rhs1 (stmt))) | |
1141 | { | |
1142 | tree lhs = gimple_assign_lhs (stmt); | |
1143 | if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL) | |
1144 | && !DECL_IGNORED_P (lhs) | |
1145 | && is_gimple_reg_type (TREE_TYPE (lhs)) | |
1146 | && !is_global_var (lhs) | |
1147 | && !DECL_HAS_VALUE_EXPR_P (lhs)) | |
1148 | { | |
1149 | tree rhs = gimple_assign_rhs1 (stmt); | |
1150 | gimple note | |
1151 | = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt); | |
1152 | gsi_insert_after (i, note, GSI_SAME_STMT); | |
1153 | } | |
6de9cd9a | 1154 | } |
5006671f RG |
1155 | |
1156 | unlink_stmt_vdef (stmt); | |
b8698a0f L |
1157 | gsi_remove (i, true); |
1158 | release_defs (stmt); | |
6de9cd9a | 1159 | } |
9b3b55a1 | 1160 | |
9b3b55a1 DN |
1161 | /* Eliminate unnecessary statements. Any instruction not marked as necessary |
1162 | contributes nothing to the program, and can be deleted. */ | |
1163 | ||
7665f023 | 1164 | static bool |
9b3b55a1 DN |
1165 | eliminate_unnecessary_stmts (void) |
1166 | { | |
7665f023 | 1167 | bool something_changed = false; |
9b3b55a1 | 1168 | basic_block bb; |
0ca5af51 | 1169 | gimple_stmt_iterator gsi, psi; |
726a989a RB |
1170 | gimple stmt; |
1171 | tree call; | |
9771b263 | 1172 | vec<basic_block> h; |
9b3b55a1 DN |
1173 | |
1174 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1175 | fprintf (dump_file, "\nEliminating unnecessary statements:\n"); | |
1176 | ||
1177 | clear_special_calls (); | |
9b3b55a1 | 1178 | |
b5b8b0ac AO |
1179 | /* Walking basic blocks and statements in reverse order avoids |
1180 | releasing SSA names before any other DEFs that refer to them are | |
1181 | released. This helps avoid loss of debug information, as we get | |
1182 | a chance to propagate all RHSs of removed SSAs into debug uses, | |
1183 | rather than only the latest ones. E.g., consider: | |
1184 | ||
1185 | x_3 = y_1 + z_2; | |
1186 | a_5 = x_3 - b_4; | |
1187 | # DEBUG a => a_5 | |
1188 | ||
1189 | If we were to release x_3 before a_5, when we reached a_5 and | |
1190 | tried to substitute it into the debug stmt, we'd see x_3 there, | |
1191 | but x_3's DEF, type, etc would have already been disconnected. | |
1192 | By going backwards, the debug stmt first changes to: | |
1193 | ||
1194 | # DEBUG a => x_3 - b_4 | |
1195 | ||
1196 | and then to: | |
1197 | ||
1198 | # DEBUG a => y_1 + z_2 - b_4 | |
1199 | ||
1200 | as desired. */ | |
1201 | gcc_assert (dom_info_available_p (CDI_DOMINATORS)); | |
1202 | h = get_all_dominated_blocks (CDI_DOMINATORS, single_succ (ENTRY_BLOCK_PTR)); | |
1203 | ||
9771b263 | 1204 | while (h.length ()) |
9b3b55a1 | 1205 | { |
9771b263 | 1206 | bb = h.pop (); |
b5b8b0ac | 1207 | |
9b3b55a1 | 1208 | /* Remove dead statements. */ |
0ca5af51 | 1209 | for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi) |
9b3b55a1 | 1210 | { |
726a989a | 1211 | stmt = gsi_stmt (gsi); |
9b3b55a1 | 1212 | |
0ca5af51 AO |
1213 | psi = gsi; |
1214 | gsi_prev (&psi); | |
1215 | ||
9b3b55a1 DN |
1216 | stats.total++; |
1217 | ||
996e1de5 RG |
1218 | /* We can mark a call to free as not necessary if the |
1219 | defining statement of its argument is an allocation | |
1220 | function and that is not necessary itself. */ | |
1221 | if (gimple_call_builtin_p (stmt, BUILT_IN_FREE)) | |
1222 | { | |
1223 | tree ptr = gimple_call_arg (stmt, 0); | |
1224 | tree callee2; | |
1225 | gimple def_stmt; | |
1226 | if (TREE_CODE (ptr) != SSA_NAME) | |
1227 | continue; | |
1228 | def_stmt = SSA_NAME_DEF_STMT (ptr); | |
1229 | if (!is_gimple_call (def_stmt) | |
1230 | || gimple_plf (def_stmt, STMT_NECESSARY)) | |
1231 | continue; | |
1232 | callee2 = gimple_call_fndecl (def_stmt); | |
1233 | if (callee2 == NULL_TREE | |
1234 | || DECL_BUILT_IN_CLASS (callee2) != BUILT_IN_NORMAL | |
1235 | || (DECL_FUNCTION_CODE (callee2) != BUILT_IN_MALLOC | |
1236 | && DECL_FUNCTION_CODE (callee2) != BUILT_IN_CALLOC)) | |
1237 | continue; | |
1238 | gimple_set_plf (stmt, STMT_NECESSARY, false); | |
1239 | } | |
1240 | ||
726a989a RB |
1241 | /* If GSI is not necessary then remove it. */ |
1242 | if (!gimple_plf (stmt, STMT_NECESSARY)) | |
7665f023 | 1243 | { |
717f4048 AO |
1244 | if (!is_gimple_debug (stmt)) |
1245 | something_changed = true; | |
726a989a | 1246 | remove_dead_stmt (&gsi, bb); |
7665f023 | 1247 | } |
726a989a | 1248 | else if (is_gimple_call (stmt)) |
9b3b55a1 | 1249 | { |
f1749ec1 RG |
1250 | tree name = gimple_call_lhs (stmt); |
1251 | ||
1252 | notice_special_calls (stmt); | |
1253 | ||
1254 | /* When LHS of var = call (); is dead, simplify it into | |
1255 | call (); saving one operand. */ | |
1256 | if (name | |
1257 | && TREE_CODE (name) == SSA_NAME | |
d7c028c0 | 1258 | && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)) |
f1749ec1 RG |
1259 | /* Avoid doing so for allocation calls which we |
1260 | did not mark as necessary, it will confuse the | |
1261 | special logic we apply to malloc/free pair removal. */ | |
1262 | && (!(call = gimple_call_fndecl (stmt)) | |
1263 | || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL | |
1264 | || (DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC | |
1265 | && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC | |
1266 | && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA | |
1267 | && (DECL_FUNCTION_CODE (call) | |
1268 | != BUILT_IN_ALLOCA_WITH_ALIGN)))) | |
cf227303 | 1269 | { |
f1749ec1 RG |
1270 | something_changed = true; |
1271 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
cf227303 | 1272 | { |
f1749ec1 RG |
1273 | fprintf (dump_file, "Deleting LHS of call: "); |
1274 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); | |
1275 | fprintf (dump_file, "\n"); | |
cf227303 | 1276 | } |
f1749ec1 RG |
1277 | |
1278 | gimple_call_set_lhs (stmt, NULL_TREE); | |
1279 | maybe_clean_or_replace_eh_stmt (stmt, stmt); | |
1280 | update_stmt (stmt); | |
1281 | release_ssa_name (name); | |
cf227303 | 1282 | } |
726a989a | 1283 | } |
9b3b55a1 DN |
1284 | } |
1285 | } | |
b5b8b0ac | 1286 | |
9771b263 | 1287 | h.release (); |
b5b8b0ac | 1288 | |
7351bcaa JH |
1289 | /* Since we don't track liveness of virtual PHI nodes, it is possible that we |
1290 | rendered some PHI nodes unreachable while they are still in use. | |
1291 | Mark them for renaming. */ | |
1292 | if (cfg_altered) | |
1293 | { | |
b5b8b0ac AO |
1294 | basic_block prev_bb; |
1295 | ||
7351bcaa | 1296 | find_unreachable_blocks (); |
b5b8b0ac AO |
1297 | |
1298 | /* Delete all unreachable basic blocks in reverse dominator order. */ | |
1299 | for (bb = EXIT_BLOCK_PTR->prev_bb; bb != ENTRY_BLOCK_PTR; bb = prev_bb) | |
7351bcaa | 1300 | { |
b5b8b0ac AO |
1301 | prev_bb = bb->prev_bb; |
1302 | ||
d7c028c0 | 1303 | if (!bitmap_bit_p (bb_contains_live_stmts, bb->index) |
a915ab00 | 1304 | || !(bb->flags & BB_REACHABLE)) |
7351bcaa JH |
1305 | { |
1306 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
ea057359 | 1307 | if (virtual_operand_p (gimple_phi_result (gsi_stmt (gsi)))) |
7351bcaa JH |
1308 | { |
1309 | bool found = false; | |
1310 | imm_use_iterator iter; | |
1311 | ||
1312 | FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (gsi_stmt (gsi))) | |
1313 | { | |
1314 | if (!(gimple_bb (stmt)->flags & BB_REACHABLE)) | |
1315 | continue; | |
1316 | if (gimple_code (stmt) == GIMPLE_PHI | |
1317 | || gimple_plf (stmt, STMT_NECESSARY)) | |
1318 | { | |
1319 | found = true; | |
1320 | BREAK_FROM_IMM_USE_STMT (iter); | |
1321 | } | |
1322 | } | |
1323 | if (found) | |
1324 | mark_virtual_phi_result_for_renaming (gsi_stmt (gsi)); | |
1325 | } | |
b5b8b0ac | 1326 | |
a915ab00 | 1327 | if (!(bb->flags & BB_REACHABLE)) |
b5b8b0ac AO |
1328 | { |
1329 | /* Speed up the removal of blocks that don't | |
1330 | dominate others. Walking backwards, this should | |
1331 | be the common case. ??? Do we need to recompute | |
1332 | dominators because of cfg_altered? */ | |
1333 | if (!MAY_HAVE_DEBUG_STMTS | |
1334 | || !first_dom_son (CDI_DOMINATORS, bb)) | |
1335 | delete_basic_block (bb); | |
1336 | else | |
1337 | { | |
1338 | h = get_all_dominated_blocks (CDI_DOMINATORS, bb); | |
1339 | ||
9771b263 | 1340 | while (h.length ()) |
b5b8b0ac | 1341 | { |
9771b263 | 1342 | bb = h.pop (); |
b5b8b0ac AO |
1343 | prev_bb = bb->prev_bb; |
1344 | /* Rearrangements to the CFG may have failed | |
1345 | to update the dominators tree, so that | |
1346 | formerly-dominated blocks are now | |
1347 | otherwise reachable. */ | |
1348 | if (!!(bb->flags & BB_REACHABLE)) | |
1349 | continue; | |
1350 | delete_basic_block (bb); | |
1351 | } | |
1352 | ||
9771b263 | 1353 | h.release (); |
b5b8b0ac AO |
1354 | } |
1355 | } | |
7351bcaa JH |
1356 | } |
1357 | } | |
1358 | } | |
5006671f RG |
1359 | FOR_EACH_BB (bb) |
1360 | { | |
1361 | /* Remove dead PHI nodes. */ | |
1362 | something_changed |= remove_dead_phis (bb); | |
1363 | } | |
1364 | ||
7665f023 | 1365 | return something_changed; |
9b3b55a1 DN |
1366 | } |
1367 | ||
1368 | ||
6de9cd9a DN |
1369 | /* Print out removed statement statistics. */ |
1370 | ||
1371 | static void | |
1372 | print_stats (void) | |
1373 | { | |
01902653 | 1374 | float percg; |
6de9cd9a | 1375 | |
01902653 RG |
1376 | percg = ((float) stats.removed / (float) stats.total) * 100; |
1377 | fprintf (dump_file, "Removed %d of %d statements (%d%%)\n", | |
1378 | stats.removed, stats.total, (int) percg); | |
6de9cd9a | 1379 | |
01902653 RG |
1380 | if (stats.total_phis == 0) |
1381 | percg = 0; | |
1382 | else | |
1383 | percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100; | |
6de9cd9a | 1384 | |
01902653 RG |
1385 | fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n", |
1386 | stats.removed_phis, stats.total_phis, (int) percg); | |
6de9cd9a | 1387 | } |
726a989a | 1388 | |
6de9cd9a DN |
1389 | /* Initialization for this pass. Set up the used data structures. */ |
1390 | ||
1391 | static void | |
1392 | tree_dce_init (bool aggressive) | |
1393 | { | |
1394 | memset ((void *) &stats, 0, sizeof (stats)); | |
1395 | ||
1396 | if (aggressive) | |
1397 | { | |
6de9cd9a | 1398 | last_stmt_necessary = sbitmap_alloc (last_basic_block); |
f61e445a | 1399 | bitmap_clear (last_stmt_necessary); |
1fc41282 | 1400 | bb_contains_live_stmts = sbitmap_alloc (last_basic_block); |
f61e445a | 1401 | bitmap_clear (bb_contains_live_stmts); |
6de9cd9a DN |
1402 | } |
1403 | ||
95a3742c | 1404 | processed = sbitmap_alloc (num_ssa_names + 1); |
f61e445a | 1405 | bitmap_clear (processed); |
6de9cd9a | 1406 | |
9771b263 | 1407 | worklist.create (64); |
9da4058c | 1408 | cfg_altered = false; |
6de9cd9a DN |
1409 | } |
1410 | ||
1411 | /* Cleanup after this pass. */ | |
1412 | ||
1413 | static void | |
1414 | tree_dce_done (bool aggressive) | |
1415 | { | |
1416 | if (aggressive) | |
1417 | { | |
c8e9d8c3 | 1418 | delete cd; |
a28fee03 | 1419 | sbitmap_free (visited_control_parents); |
6de9cd9a | 1420 | sbitmap_free (last_stmt_necessary); |
1fc41282 JH |
1421 | sbitmap_free (bb_contains_live_stmts); |
1422 | bb_contains_live_stmts = NULL; | |
6de9cd9a DN |
1423 | } |
1424 | ||
1425 | sbitmap_free (processed); | |
906532aa | 1426 | |
9771b263 | 1427 | worklist.release (); |
6de9cd9a | 1428 | } |
726a989a | 1429 | |
6de9cd9a DN |
1430 | /* Main routine to eliminate dead code. |
1431 | ||
1432 | AGGRESSIVE controls the aggressiveness of the algorithm. | |
1433 | In conservative mode, we ignore control dependence and simply declare | |
1434 | all but the most trivially dead branches necessary. This mode is fast. | |
1435 | In aggressive mode, control dependences are taken into account, which | |
1436 | results in more dead code elimination, but at the cost of some time. | |
1437 | ||
1438 | FIXME: Aggressive mode before PRE doesn't work currently because | |
1439 | the dominance info is not invalidated after DCE1. This is | |
1440 | not an issue right now because we only run aggressive DCE | |
1441 | as the last tree SSA pass, but keep this in mind when you | |
1442 | start experimenting with pass ordering. */ | |
1443 | ||
7665f023 | 1444 | static unsigned int |
6de9cd9a DN |
1445 | perform_tree_ssa_dce (bool aggressive) |
1446 | { | |
7665f023 | 1447 | bool something_changed = 0; |
6de9cd9a | 1448 | |
5ac60b56 RG |
1449 | calculate_dominance_info (CDI_DOMINATORS); |
1450 | ||
7351bcaa JH |
1451 | /* Preheaders are needed for SCEV to work. |
1452 | Simple lateches and recorded exits improve chances that loop will | |
1453 | proved to be finite in testcases such as in loop-15.c and loop-24.c */ | |
1454 | if (aggressive) | |
1455 | loop_optimizer_init (LOOPS_NORMAL | |
1456 | | LOOPS_HAVE_RECORDED_EXITS); | |
1457 | ||
6de9cd9a DN |
1458 | tree_dce_init (aggressive); |
1459 | ||
1460 | if (aggressive) | |
1461 | { | |
1462 | /* Compute control dependence. */ | |
6de9cd9a | 1463 | calculate_dominance_info (CDI_POST_DOMINATORS); |
c8e9d8c3 | 1464 | cd = new control_dependences (create_edge_list ()); |
6de9cd9a | 1465 | |
a28fee03 | 1466 | visited_control_parents = sbitmap_alloc (last_basic_block); |
f61e445a | 1467 | bitmap_clear (visited_control_parents); |
a28fee03 | 1468 | |
6de9cd9a DN |
1469 | mark_dfs_back_edges (); |
1470 | } | |
1471 | ||
c8e9d8c3 | 1472 | find_obviously_necessary_stmts (aggressive); |
6de9cd9a | 1473 | |
7351bcaa JH |
1474 | if (aggressive) |
1475 | loop_optimizer_finalize (); | |
1476 | ||
5006671f RG |
1477 | longest_chain = 0; |
1478 | total_chain = 0; | |
a61f9cc0 | 1479 | nr_walks = 0; |
5006671f | 1480 | chain_ovfl = false; |
d9b99b4c | 1481 | visited = BITMAP_ALLOC (NULL); |
c8e9d8c3 | 1482 | propagate_necessity (aggressive); |
5006671f | 1483 | BITMAP_FREE (visited); |
bc300fec | 1484 | |
7665f023 JH |
1485 | something_changed |= eliminate_unnecessary_stmts (); |
1486 | something_changed |= cfg_altered; | |
6de9cd9a | 1487 | |
30251f7a ZD |
1488 | /* We do not update postdominators, so free them unconditionally. */ |
1489 | free_dominance_info (CDI_POST_DOMINATORS); | |
6de9cd9a | 1490 | |
9da4058c JL |
1491 | /* If we removed paths in the CFG, then we need to update |
1492 | dominators as well. I haven't investigated the possibility | |
1493 | of incrementally updating dominators. */ | |
1494 | if (cfg_altered) | |
1495 | free_dominance_info (CDI_DOMINATORS); | |
1496 | ||
01902653 RG |
1497 | statistics_counter_event (cfun, "Statements deleted", stats.removed); |
1498 | statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis); | |
1499 | ||
6de9cd9a | 1500 | /* Debugging dumps. */ |
01902653 | 1501 | if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS))) |
88a40e67 | 1502 | print_stats (); |
6de9cd9a DN |
1503 | |
1504 | tree_dce_done (aggressive); | |
960076d9 | 1505 | |
7665f023 | 1506 | if (something_changed) |
c0e50f72 RB |
1507 | return TODO_update_ssa | TODO_cleanup_cfg; |
1508 | return 0; | |
6de9cd9a DN |
1509 | } |
1510 | ||
1511 | /* Pass entry points. */ | |
c2924966 | 1512 | static unsigned int |
6de9cd9a DN |
1513 | tree_ssa_dce (void) |
1514 | { | |
7665f023 | 1515 | return perform_tree_ssa_dce (/*aggressive=*/false); |
6de9cd9a DN |
1516 | } |
1517 | ||
c2924966 | 1518 | static unsigned int |
49896738 RH |
1519 | tree_ssa_dce_loop (void) |
1520 | { | |
7665f023 JH |
1521 | unsigned int todo; |
1522 | todo = perform_tree_ssa_dce (/*aggressive=*/false); | |
1523 | if (todo) | |
1524 | { | |
1525 | free_numbers_of_iterations_estimates (); | |
1526 | scev_reset (); | |
1527 | } | |
1528 | return todo; | |
49896738 RH |
1529 | } |
1530 | ||
c2924966 | 1531 | static unsigned int |
6de9cd9a DN |
1532 | tree_ssa_cd_dce (void) |
1533 | { | |
7665f023 | 1534 | return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2); |
6de9cd9a DN |
1535 | } |
1536 | ||
1537 | static bool | |
1538 | gate_dce (void) | |
1539 | { | |
1540 | return flag_tree_dce != 0; | |
1541 | } | |
1542 | ||
27a4cd48 DM |
1543 | namespace { |
1544 | ||
1545 | const pass_data pass_data_dce = | |
6de9cd9a | 1546 | { |
27a4cd48 DM |
1547 | GIMPLE_PASS, /* type */ |
1548 | "dce", /* name */ | |
1549 | OPTGROUP_NONE, /* optinfo_flags */ | |
1550 | true, /* has_gate */ | |
1551 | true, /* has_execute */ | |
1552 | TV_TREE_DCE, /* tv_id */ | |
1553 | ( PROP_cfg | PROP_ssa ), /* properties_required */ | |
1554 | 0, /* properties_provided */ | |
1555 | 0, /* properties_destroyed */ | |
1556 | 0, /* todo_flags_start */ | |
1557 | TODO_verify_ssa, /* todo_flags_finish */ | |
6de9cd9a DN |
1558 | }; |
1559 | ||
27a4cd48 DM |
1560 | class pass_dce : public gimple_opt_pass |
1561 | { | |
1562 | public: | |
1563 | pass_dce(gcc::context *ctxt) | |
1564 | : gimple_opt_pass(pass_data_dce, ctxt) | |
1565 | {} | |
1566 | ||
1567 | /* opt_pass methods: */ | |
1568 | opt_pass * clone () { return new pass_dce (ctxt_); } | |
1569 | bool gate () { return gate_dce (); } | |
1570 | unsigned int execute () { return tree_ssa_dce (); } | |
1571 | ||
1572 | }; // class pass_dce | |
1573 | ||
1574 | } // anon namespace | |
1575 | ||
1576 | gimple_opt_pass * | |
1577 | make_pass_dce (gcc::context *ctxt) | |
1578 | { | |
1579 | return new pass_dce (ctxt); | |
1580 | } | |
1581 | ||
1582 | namespace { | |
1583 | ||
1584 | const pass_data pass_data_dce_loop = | |
49896738 | 1585 | { |
27a4cd48 DM |
1586 | GIMPLE_PASS, /* type */ |
1587 | "dceloop", /* name */ | |
1588 | OPTGROUP_NONE, /* optinfo_flags */ | |
1589 | true, /* has_gate */ | |
1590 | true, /* has_execute */ | |
1591 | TV_TREE_DCE, /* tv_id */ | |
1592 | ( PROP_cfg | PROP_ssa ), /* properties_required */ | |
1593 | 0, /* properties_provided */ | |
1594 | 0, /* properties_destroyed */ | |
1595 | 0, /* todo_flags_start */ | |
1596 | TODO_verify_ssa, /* todo_flags_finish */ | |
49896738 RH |
1597 | }; |
1598 | ||
27a4cd48 | 1599 | class pass_dce_loop : public gimple_opt_pass |
6de9cd9a | 1600 | { |
27a4cd48 DM |
1601 | public: |
1602 | pass_dce_loop(gcc::context *ctxt) | |
1603 | : gimple_opt_pass(pass_data_dce_loop, ctxt) | |
1604 | {} | |
1605 | ||
1606 | /* opt_pass methods: */ | |
1607 | opt_pass * clone () { return new pass_dce_loop (ctxt_); } | |
1608 | bool gate () { return gate_dce (); } | |
1609 | unsigned int execute () { return tree_ssa_dce_loop (); } | |
1610 | ||
1611 | }; // class pass_dce_loop | |
1612 | ||
1613 | } // anon namespace | |
1614 | ||
1615 | gimple_opt_pass * | |
1616 | make_pass_dce_loop (gcc::context *ctxt) | |
1617 | { | |
1618 | return new pass_dce_loop (ctxt); | |
1619 | } | |
1620 | ||
1621 | namespace { | |
1622 | ||
1623 | const pass_data pass_data_cd_dce = | |
1624 | { | |
1625 | GIMPLE_PASS, /* type */ | |
1626 | "cddce", /* name */ | |
1627 | OPTGROUP_NONE, /* optinfo_flags */ | |
1628 | true, /* has_gate */ | |
1629 | true, /* has_execute */ | |
1630 | TV_TREE_CD_DCE, /* tv_id */ | |
1631 | ( PROP_cfg | PROP_ssa ), /* properties_required */ | |
1632 | 0, /* properties_provided */ | |
1633 | 0, /* properties_destroyed */ | |
1634 | 0, /* todo_flags_start */ | |
1635 | ( TODO_verify_ssa | TODO_verify_flow ), /* todo_flags_finish */ | |
6de9cd9a | 1636 | }; |
27a4cd48 DM |
1637 | |
1638 | class pass_cd_dce : public gimple_opt_pass | |
1639 | { | |
1640 | public: | |
1641 | pass_cd_dce(gcc::context *ctxt) | |
1642 | : gimple_opt_pass(pass_data_cd_dce, ctxt) | |
1643 | {} | |
1644 | ||
1645 | /* opt_pass methods: */ | |
1646 | opt_pass * clone () { return new pass_cd_dce (ctxt_); } | |
1647 | bool gate () { return gate_dce (); } | |
1648 | unsigned int execute () { return tree_ssa_cd_dce (); } | |
1649 | ||
1650 | }; // class pass_cd_dce | |
1651 | ||
1652 | } // anon namespace | |
1653 | ||
1654 | gimple_opt_pass * | |
1655 | make_pass_cd_dce (gcc::context *ctxt) | |
1656 | { | |
1657 | return new pass_cd_dce (ctxt); | |
1658 | } |