]>
Commit | Line | Data |
---|---|---|
6de9cd9a | 1 | /* Dead code elimination pass for the GNU compiler. |
ddb555ed | 2 | Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 |
058dcc25 | 3 | Free Software Foundation, Inc. |
6de9cd9a DN |
4 | Contributed by Ben Elliston <bje@redhat.com> |
5 | and Andrew MacLeod <amacleod@redhat.com> | |
6 | Adapted to use control dependence by Steven Bosscher, SUSE Labs. | |
b8698a0f | 7 | |
6de9cd9a | 8 | This file is part of GCC. |
b8698a0f | 9 | |
6de9cd9a DN |
10 | GCC is free software; you can redistribute it and/or modify it |
11 | under the terms of the GNU General Public License as published by the | |
9dcd6f09 | 12 | Free Software Foundation; either version 3, or (at your option) any |
6de9cd9a | 13 | later version. |
b8698a0f | 14 | |
6de9cd9a DN |
15 | GCC is distributed in the hope that it will be useful, but WITHOUT |
16 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
17 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
18 | for more details. | |
b8698a0f | 19 | |
6de9cd9a | 20 | You should have received a copy of the GNU General Public License |
9dcd6f09 NC |
21 | along with GCC; see the file COPYING3. If not see |
22 | <http://www.gnu.org/licenses/>. */ | |
6de9cd9a DN |
23 | |
24 | /* Dead code elimination. | |
25 | ||
26 | References: | |
27 | ||
28 | Building an Optimizing Compiler, | |
29 | Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9. | |
30 | ||
31 | Advanced Compiler Design and Implementation, | |
32 | Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10. | |
33 | ||
34 | Dead-code elimination is the removal of statements which have no | |
35 | impact on the program's output. "Dead statements" have no impact | |
36 | on the program's output, while "necessary statements" may have | |
37 | impact on the output. | |
38 | ||
39 | The algorithm consists of three phases: | |
40 | 1. Marking as necessary all statements known to be necessary, | |
41 | e.g. most function calls, writing a value to memory, etc; | |
42 | 2. Propagating necessary statements, e.g., the statements | |
43 | giving values to operands in necessary statements; and | |
44 | 3. Removing dead statements. */ | |
45 | ||
46 | #include "config.h" | |
47 | #include "system.h" | |
48 | #include "coretypes.h" | |
49 | #include "tm.h" | |
6de9cd9a DN |
50 | |
51 | #include "tree.h" | |
cf835838 JM |
52 | #include "tree-pretty-print.h" |
53 | #include "gimple-pretty-print.h" | |
40013784 | 54 | #include "basic-block.h" |
6de9cd9a | 55 | #include "tree-flow.h" |
726a989a | 56 | #include "gimple.h" |
6de9cd9a DN |
57 | #include "tree-dump.h" |
58 | #include "tree-pass.h" | |
59 | #include "timevar.h" | |
60 | #include "flags.h" | |
49896738 | 61 | #include "cfgloop.h" |
a4176272 | 62 | #include "tree-scalar-evolution.h" |
726a989a | 63 | |
6de9cd9a DN |
64 | static struct stmt_stats |
65 | { | |
66 | int total; | |
67 | int total_phis; | |
68 | int removed; | |
69 | int removed_phis; | |
70 | } stats; | |
71 | ||
726a989a RB |
72 | #define STMT_NECESSARY GF_PLF_1 |
73 | ||
74 | static VEC(gimple,heap) *worklist; | |
6de9cd9a DN |
75 | |
76 | /* Vector indicating an SSA name has already been processed and marked | |
77 | as necessary. */ | |
78 | static sbitmap processed; | |
79 | ||
ad06ee51 EB |
80 | /* Vector indicating that the last statement of a basic block has already |
81 | been marked as necessary. */ | |
6de9cd9a DN |
82 | static sbitmap last_stmt_necessary; |
83 | ||
1fc41282 JH |
84 | /* Vector indicating that BB contains statements that are live. */ |
85 | static sbitmap bb_contains_live_stmts; | |
86 | ||
6de9cd9a DN |
87 | /* Before we can determine whether a control branch is dead, we need to |
88 | compute which blocks are control dependent on which edges. | |
89 | ||
90 | We expect each block to be control dependent on very few edges so we | |
91 | use a bitmap for each block recording its edges. An array holds the | |
92 | bitmap. The Ith bit in the bitmap is set if that block is dependent | |
93 | on the Ith edge. */ | |
8c80c4aa | 94 | static bitmap *control_dependence_map; |
6de9cd9a | 95 | |
a28fee03 SB |
96 | /* Vector indicating that a basic block has already had all the edges |
97 | processed that it is control dependent on. */ | |
8c80c4aa | 98 | static sbitmap visited_control_parents; |
a28fee03 | 99 | |
9da4058c JL |
100 | /* TRUE if this pass alters the CFG (by removing control statements). |
101 | FALSE otherwise. | |
102 | ||
103 | If this pass alters the CFG, then it will arrange for the dominators | |
104 | to be recomputed. */ | |
105 | static bool cfg_altered; | |
106 | ||
db490c39 KH |
107 | /* Execute code that follows the macro for each edge (given number |
108 | EDGE_NUMBER within the CODE) for which the block with index N is | |
109 | control dependent. */ | |
110 | #define EXECUTE_IF_CONTROL_DEPENDENT(BI, N, EDGE_NUMBER) \ | |
111 | EXECUTE_IF_SET_IN_BITMAP (control_dependence_map[(N)], 0, \ | |
112 | (EDGE_NUMBER), (BI)) | |
6de9cd9a | 113 | |
6de9cd9a | 114 | |
6de9cd9a DN |
115 | /* Indicate block BB is control dependent on an edge with index EDGE_INDEX. */ |
116 | static inline void | |
117 | set_control_dependence_map_bit (basic_block bb, int edge_index) | |
118 | { | |
119 | if (bb == ENTRY_BLOCK_PTR) | |
120 | return; | |
1e128c5f | 121 | gcc_assert (bb != EXIT_BLOCK_PTR); |
6de9cd9a DN |
122 | bitmap_set_bit (control_dependence_map[bb->index], edge_index); |
123 | } | |
124 | ||
125 | /* Clear all control dependences for block BB. */ | |
83f676b3 RS |
126 | static inline void |
127 | clear_control_dependence_bitmap (basic_block bb) | |
6de9cd9a DN |
128 | { |
129 | bitmap_clear (control_dependence_map[bb->index]); | |
130 | } | |
131 | ||
6de9cd9a | 132 | |
9b3b55a1 DN |
133 | /* Find the immediate postdominator PDOM of the specified basic block BLOCK. |
134 | This function is necessary because some blocks have negative numbers. */ | |
135 | ||
136 | static inline basic_block | |
137 | find_pdom (basic_block block) | |
6de9cd9a | 138 | { |
9b3b55a1 | 139 | gcc_assert (block != ENTRY_BLOCK_PTR); |
6de9cd9a | 140 | |
9b3b55a1 DN |
141 | if (block == EXIT_BLOCK_PTR) |
142 | return EXIT_BLOCK_PTR; | |
143 | else | |
144 | { | |
145 | basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block); | |
146 | if (! bb) | |
147 | return EXIT_BLOCK_PTR; | |
148 | return bb; | |
149 | } | |
6de9cd9a DN |
150 | } |
151 | ||
9b3b55a1 | 152 | |
6de9cd9a DN |
153 | /* Determine all blocks' control dependences on the given edge with edge_list |
154 | EL index EDGE_INDEX, ala Morgan, Section 3.6. */ | |
155 | ||
156 | static void | |
157 | find_control_dependence (struct edge_list *el, int edge_index) | |
158 | { | |
159 | basic_block current_block; | |
160 | basic_block ending_block; | |
161 | ||
1e128c5f | 162 | gcc_assert (INDEX_EDGE_PRED_BB (el, edge_index) != EXIT_BLOCK_PTR); |
6de9cd9a DN |
163 | |
164 | if (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR) | |
953ff289 | 165 | ending_block = single_succ (ENTRY_BLOCK_PTR); |
6de9cd9a DN |
166 | else |
167 | ending_block = find_pdom (INDEX_EDGE_PRED_BB (el, edge_index)); | |
168 | ||
169 | for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index); | |
170 | current_block != ending_block && current_block != EXIT_BLOCK_PTR; | |
171 | current_block = find_pdom (current_block)) | |
172 | { | |
173 | edge e = INDEX_EDGE (el, edge_index); | |
174 | ||
175 | /* For abnormal edges, we don't make current_block control | |
176 | dependent because instructions that throw are always necessary | |
177 | anyway. */ | |
178 | if (e->flags & EDGE_ABNORMAL) | |
179 | continue; | |
180 | ||
181 | set_control_dependence_map_bit (current_block, edge_index); | |
182 | } | |
183 | } | |
184 | ||
6de9cd9a | 185 | |
9b3b55a1 DN |
186 | /* Record all blocks' control dependences on all edges in the edge |
187 | list EL, ala Morgan, Section 3.6. */ | |
188 | ||
189 | static void | |
190 | find_all_control_dependences (struct edge_list *el) | |
6de9cd9a | 191 | { |
9b3b55a1 | 192 | int i; |
1e128c5f | 193 | |
9b3b55a1 DN |
194 | for (i = 0; i < NUM_EDGES (el); ++i) |
195 | find_control_dependence (el, i); | |
6de9cd9a | 196 | } |
9b3b55a1 | 197 | |
6de9cd9a DN |
198 | /* If STMT is not already marked necessary, mark it, and add it to the |
199 | worklist if ADD_TO_WORKLIST is true. */ | |
ad06ee51 | 200 | |
6de9cd9a | 201 | static inline void |
726a989a | 202 | mark_stmt_necessary (gimple stmt, bool add_to_worklist) |
6de9cd9a | 203 | { |
1e128c5f | 204 | gcc_assert (stmt); |
6de9cd9a | 205 | |
726a989a | 206 | if (gimple_plf (stmt, STMT_NECESSARY)) |
6de9cd9a DN |
207 | return; |
208 | ||
209 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
210 | { | |
211 | fprintf (dump_file, "Marking useful stmt: "); | |
726a989a | 212 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); |
6de9cd9a DN |
213 | fprintf (dump_file, "\n"); |
214 | } | |
215 | ||
726a989a | 216 | gimple_set_plf (stmt, STMT_NECESSARY, true); |
6de9cd9a | 217 | if (add_to_worklist) |
726a989a | 218 | VEC_safe_push (gimple, heap, worklist, stmt); |
b5b8b0ac | 219 | if (bb_contains_live_stmts && !is_gimple_debug (stmt)) |
1fc41282 | 220 | SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index); |
6de9cd9a DN |
221 | } |
222 | ||
38635499 DN |
223 | |
224 | /* Mark the statement defining operand OP as necessary. */ | |
6de9cd9a DN |
225 | |
226 | static inline void | |
38635499 | 227 | mark_operand_necessary (tree op) |
6de9cd9a | 228 | { |
726a989a | 229 | gimple stmt; |
6de9cd9a DN |
230 | int ver; |
231 | ||
1e128c5f | 232 | gcc_assert (op); |
6de9cd9a DN |
233 | |
234 | ver = SSA_NAME_VERSION (op); | |
235 | if (TEST_BIT (processed, ver)) | |
5006671f RG |
236 | { |
237 | stmt = SSA_NAME_DEF_STMT (op); | |
238 | gcc_assert (gimple_nop_p (stmt) | |
239 | || gimple_plf (stmt, STMT_NECESSARY)); | |
240 | return; | |
241 | } | |
6de9cd9a DN |
242 | SET_BIT (processed, ver); |
243 | ||
244 | stmt = SSA_NAME_DEF_STMT (op); | |
1e128c5f | 245 | gcc_assert (stmt); |
6de9cd9a | 246 | |
726a989a | 247 | if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt)) |
6de9cd9a DN |
248 | return; |
249 | ||
5006671f RG |
250 | if (dump_file && (dump_flags & TDF_DETAILS)) |
251 | { | |
252 | fprintf (dump_file, "marking necessary through "); | |
253 | print_generic_expr (dump_file, op, 0); | |
254 | fprintf (dump_file, " stmt "); | |
255 | print_gimple_stmt (dump_file, stmt, 0, 0); | |
256 | } | |
257 | ||
726a989a | 258 | gimple_set_plf (stmt, STMT_NECESSARY, true); |
1fc41282 JH |
259 | if (bb_contains_live_stmts) |
260 | SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index); | |
726a989a | 261 | VEC_safe_push (gimple, heap, worklist, stmt); |
6de9cd9a | 262 | } |
9b3b55a1 | 263 | |
6de9cd9a | 264 | |
adb35797 | 265 | /* Mark STMT as necessary if it obviously is. Add it to the worklist if |
6de9cd9a DN |
266 | it can make other statements necessary. |
267 | ||
268 | If AGGRESSIVE is false, control statements are conservatively marked as | |
269 | necessary. */ | |
270 | ||
271 | static void | |
726a989a | 272 | mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive) |
6de9cd9a | 273 | { |
75b80166 | 274 | /* With non-call exceptions, we have to assume that all statements could |
2da02156 EB |
275 | throw. If a statement could throw, it can be deemed necessary. */ |
276 | if (cfun->can_throw_non_call_exceptions | |
277 | && !cfun->can_delete_dead_exceptions | |
278 | && stmt_could_throw_p (stmt)) | |
75b80166 SB |
279 | { |
280 | mark_stmt_necessary (stmt, true); | |
281 | return; | |
282 | } | |
283 | ||
726a989a RB |
284 | /* Statements that are implicitly live. Most function calls, asm |
285 | and return statements are required. Labels and GIMPLE_BIND nodes | |
286 | are kept because they are control flow, and we have no way of | |
287 | knowing whether they can be removed. DCE can eliminate all the | |
288 | other statements in a block, and CFG can then remove the block | |
289 | and labels. */ | |
290 | switch (gimple_code (stmt)) | |
6de9cd9a | 291 | { |
726a989a RB |
292 | case GIMPLE_PREDICT: |
293 | case GIMPLE_LABEL: | |
6de9cd9a DN |
294 | mark_stmt_necessary (stmt, false); |
295 | return; | |
296 | ||
726a989a RB |
297 | case GIMPLE_ASM: |
298 | case GIMPLE_RESX: | |
299 | case GIMPLE_RETURN: | |
6de9cd9a DN |
300 | mark_stmt_necessary (stmt, true); |
301 | return; | |
302 | ||
726a989a | 303 | case GIMPLE_CALL: |
c22c0db2 RG |
304 | { |
305 | tree callee = gimple_call_fndecl (stmt); | |
306 | if (callee != NULL_TREE | |
307 | && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) | |
308 | switch (DECL_FUNCTION_CODE (callee)) | |
309 | { | |
310 | case BUILT_IN_MALLOC: | |
311 | case BUILT_IN_CALLOC: | |
312 | case BUILT_IN_ALLOCA: | |
13e49da9 | 313 | case BUILT_IN_ALLOCA_WITH_ALIGN: |
c22c0db2 | 314 | return; |
996e1de5 RG |
315 | |
316 | default:; | |
c22c0db2 RG |
317 | } |
318 | /* Most, but not all function calls are required. Function calls that | |
319 | produce no result and have no side effects (i.e. const pure | |
320 | functions) are unnecessary. */ | |
321 | if (gimple_has_side_effects (stmt)) | |
322 | { | |
323 | mark_stmt_necessary (stmt, true); | |
324 | return; | |
325 | } | |
326 | if (!gimple_call_lhs (stmt)) | |
6de9cd9a | 327 | return; |
c22c0db2 RG |
328 | break; |
329 | } | |
6de9cd9a | 330 | |
b5b8b0ac | 331 | case GIMPLE_DEBUG: |
0ca5af51 AO |
332 | /* Debug temps without a value are not useful. ??? If we could |
333 | easily locate the debug temp bind stmt for a use thereof, | |
334 | would could refrain from marking all debug temps here, and | |
335 | mark them only if they're used. */ | |
ddb555ed JJ |
336 | if (!gimple_debug_bind_p (stmt) |
337 | || gimple_debug_bind_has_value_p (stmt) | |
0ca5af51 AO |
338 | || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL) |
339 | mark_stmt_necessary (stmt, false); | |
b5b8b0ac AO |
340 | return; |
341 | ||
726a989a | 342 | case GIMPLE_GOTO: |
7f604986 KH |
343 | gcc_assert (!simple_goto_p (stmt)); |
344 | mark_stmt_necessary (stmt, true); | |
6de9cd9a DN |
345 | return; |
346 | ||
726a989a RB |
347 | case GIMPLE_COND: |
348 | gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2); | |
6de9cd9a DN |
349 | /* Fall through. */ |
350 | ||
726a989a | 351 | case GIMPLE_SWITCH: |
6de9cd9a DN |
352 | if (! aggressive) |
353 | mark_stmt_necessary (stmt, true); | |
354 | break; | |
355 | ||
47598145 MM |
356 | case GIMPLE_ASSIGN: |
357 | if (TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME | |
358 | && TREE_CLOBBER_P (gimple_assign_rhs1 (stmt))) | |
359 | return; | |
360 | break; | |
361 | ||
6de9cd9a DN |
362 | default: |
363 | break; | |
364 | } | |
365 | ||
c597ef4e DN |
366 | /* If the statement has volatile operands, it needs to be preserved. |
367 | Same for statements that can alter control flow in unpredictable | |
368 | ways. */ | |
726a989a | 369 | if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt)) |
6de9cd9a DN |
370 | { |
371 | mark_stmt_necessary (stmt, true); | |
372 | return; | |
373 | } | |
374 | ||
209be553 | 375 | if (stmt_may_clobber_global_p (stmt)) |
a32b97a2 | 376 | { |
fa555252 DB |
377 | mark_stmt_necessary (stmt, true); |
378 | return; | |
6de9cd9a DN |
379 | } |
380 | ||
381 | return; | |
382 | } | |
9b3b55a1 DN |
383 | |
384 | ||
ad06ee51 EB |
385 | /* Mark the last statement of BB as necessary. */ |
386 | ||
387 | static void | |
388 | mark_last_stmt_necessary (basic_block bb) | |
389 | { | |
390 | gimple stmt = last_stmt (bb); | |
391 | ||
392 | SET_BIT (last_stmt_necessary, bb->index); | |
393 | SET_BIT (bb_contains_live_stmts, bb->index); | |
394 | ||
395 | /* We actually mark the statement only if it is a control statement. */ | |
396 | if (stmt && is_ctrl_stmt (stmt)) | |
397 | mark_stmt_necessary (stmt, true); | |
398 | } | |
399 | ||
400 | ||
401 | /* Mark control dependent edges of BB as necessary. We have to do this only | |
402 | once for each basic block so we set the appropriate bit after we're done. | |
403 | ||
404 | When IGNORE_SELF is true, ignore BB in the list of control dependences. */ | |
b1a0b3b4 | 405 | |
9b3b55a1 | 406 | static void |
ad06ee51 EB |
407 | mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el, |
408 | bool ignore_self) | |
9b3b55a1 DN |
409 | { |
410 | bitmap_iterator bi; | |
411 | unsigned edge_number; | |
b1a0b3b4 | 412 | bool skipped = false; |
9b3b55a1 DN |
413 | |
414 | gcc_assert (bb != EXIT_BLOCK_PTR); | |
415 | ||
416 | if (bb == ENTRY_BLOCK_PTR) | |
417 | return; | |
418 | ||
419 | EXECUTE_IF_CONTROL_DEPENDENT (bi, bb->index, edge_number) | |
420 | { | |
9b3b55a1 DN |
421 | basic_block cd_bb = INDEX_EDGE_PRED_BB (el, edge_number); |
422 | ||
b1a0b3b4 JH |
423 | if (ignore_self && cd_bb == bb) |
424 | { | |
425 | skipped = true; | |
426 | continue; | |
427 | } | |
428 | ||
ad06ee51 EB |
429 | if (!TEST_BIT (last_stmt_necessary, cd_bb->index)) |
430 | mark_last_stmt_necessary (cd_bb); | |
9b3b55a1 | 431 | } |
ad06ee51 | 432 | |
b1a0b3b4 JH |
433 | if (!skipped) |
434 | SET_BIT (visited_control_parents, bb->index); | |
9b3b55a1 DN |
435 | } |
436 | ||
437 | ||
6de9cd9a DN |
438 | /* Find obviously necessary statements. These are things like most function |
439 | calls, and stores to file level variables. | |
440 | ||
441 | If EL is NULL, control statements are conservatively marked as | |
442 | necessary. Otherwise it contains the list of edges used by control | |
443 | dependence analysis. */ | |
444 | ||
445 | static void | |
446 | find_obviously_necessary_stmts (struct edge_list *el) | |
447 | { | |
448 | basic_block bb; | |
726a989a | 449 | gimple_stmt_iterator gsi; |
6de9cd9a | 450 | edge e; |
726a989a | 451 | gimple phi, stmt; |
9e3920e9 | 452 | int flags; |
6de9cd9a DN |
453 | |
454 | FOR_EACH_BB (bb) | |
455 | { | |
9b3b55a1 | 456 | /* PHI nodes are never inherently necessary. */ |
726a989a RB |
457 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
458 | { | |
459 | phi = gsi_stmt (gsi); | |
460 | gimple_set_plf (phi, STMT_NECESSARY, false); | |
461 | } | |
6de9cd9a DN |
462 | |
463 | /* Check all statements in the block. */ | |
726a989a | 464 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
6de9cd9a | 465 | { |
726a989a RB |
466 | stmt = gsi_stmt (gsi); |
467 | gimple_set_plf (stmt, STMT_NECESSARY, false); | |
6de9cd9a DN |
468 | mark_stmt_if_obviously_necessary (stmt, el != NULL); |
469 | } | |
6de9cd9a DN |
470 | } |
471 | ||
7351bcaa JH |
472 | /* Pure and const functions are finite and thus have no infinite loops in |
473 | them. */ | |
9e3920e9 JJ |
474 | flags = flags_from_decl_or_type (current_function_decl); |
475 | if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE)) | |
7351bcaa JH |
476 | return; |
477 | ||
478 | /* Prevent the empty possibly infinite loops from being removed. */ | |
6de9cd9a DN |
479 | if (el) |
480 | { | |
7351bcaa JH |
481 | loop_iterator li; |
482 | struct loop *loop; | |
483 | scev_initialize (); | |
484 | if (mark_irreducible_loops ()) | |
485 | FOR_EACH_BB (bb) | |
486 | { | |
487 | edge_iterator ei; | |
488 | FOR_EACH_EDGE (e, ei, bb->succs) | |
489 | if ((e->flags & EDGE_DFS_BACK) | |
490 | && (e->flags & EDGE_IRREDUCIBLE_LOOP)) | |
491 | { | |
492 | if (dump_file) | |
493 | fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n", | |
494 | e->src->index, e->dest->index); | |
b1a0b3b4 | 495 | mark_control_dependent_edges_necessary (e->dest, el, false); |
7351bcaa JH |
496 | } |
497 | } | |
498 | ||
499 | FOR_EACH_LOOP (li, loop, 0) | |
500 | if (!finite_loop_p (loop)) | |
501 | { | |
502 | if (dump_file) | |
503 | fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num); | |
b1a0b3b4 | 504 | mark_control_dependent_edges_necessary (loop->latch, el, false); |
7351bcaa JH |
505 | } |
506 | scev_finalize (); | |
6de9cd9a DN |
507 | } |
508 | } | |
6de9cd9a | 509 | |
6de9cd9a | 510 | |
5006671f RG |
511 | /* Return true if REF is based on an aliased base, otherwise false. */ |
512 | ||
513 | static bool | |
514 | ref_may_be_aliased (tree ref) | |
515 | { | |
b3421a06 | 516 | gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR); |
5006671f RG |
517 | while (handled_component_p (ref)) |
518 | ref = TREE_OPERAND (ref, 0); | |
70f34814 RG |
519 | if (TREE_CODE (ref) == MEM_REF |
520 | && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR) | |
521 | ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0); | |
5006671f RG |
522 | return !(DECL_P (ref) |
523 | && !may_be_aliased (ref)); | |
524 | } | |
525 | ||
5006671f RG |
526 | static bitmap visited = NULL; |
527 | static unsigned int longest_chain = 0; | |
528 | static unsigned int total_chain = 0; | |
a61f9cc0 | 529 | static unsigned int nr_walks = 0; |
5006671f RG |
530 | static bool chain_ovfl = false; |
531 | ||
532 | /* Worker for the walker that marks reaching definitions of REF, | |
533 | which is based on a non-aliased decl, necessary. It returns | |
534 | true whenever the defining statement of the current VDEF is | |
535 | a kill for REF, as no dominating may-defs are necessary for REF | |
1415abc0 RG |
536 | anymore. DATA points to the basic-block that contains the |
537 | stmt that refers to REF. */ | |
5006671f RG |
538 | |
539 | static bool | |
1415abc0 | 540 | mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data) |
5006671f RG |
541 | { |
542 | gimple def_stmt = SSA_NAME_DEF_STMT (vdef); | |
5006671f RG |
543 | |
544 | /* All stmts we visit are necessary. */ | |
545 | mark_operand_necessary (vdef); | |
546 | ||
547 | /* If the stmt lhs kills ref, then we can stop walking. */ | |
548 | if (gimple_has_lhs (def_stmt) | |
094f6ab3 RG |
549 | && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME |
550 | /* The assignment is not necessarily carried out if it can throw | |
551 | and we can catch it in the current function where we could inspect | |
552 | the previous value. | |
553 | ??? We only need to care about the RHS throwing. For aggregate | |
554 | assignments or similar calls and non-call exceptions the LHS | |
555 | might throw as well. */ | |
556 | && !stmt_can_throw_internal (def_stmt)) | |
5006671f RG |
557 | { |
558 | tree base, lhs = gimple_get_lhs (def_stmt); | |
559 | HOST_WIDE_INT size, offset, max_size; | |
42bc61e0 | 560 | ao_ref_base (ref); |
5006671f RG |
561 | base = get_ref_base_and_extent (lhs, &offset, &size, &max_size); |
562 | /* We can get MEM[symbol: sZ, index: D.8862_1] here, | |
563 | so base == refd->base does not always hold. */ | |
42bc61e0 | 564 | if (base == ref->base) |
5006671f RG |
565 | { |
566 | /* For a must-alias check we need to be able to constrain | |
567 | the accesses properly. */ | |
568 | if (size != -1 && size == max_size | |
42bc61e0 | 569 | && ref->max_size != -1) |
5006671f | 570 | { |
42bc61e0 RG |
571 | if (offset <= ref->offset |
572 | && offset + size >= ref->offset + ref->max_size) | |
5006671f RG |
573 | return true; |
574 | } | |
575 | /* Or they need to be exactly the same. */ | |
42bc61e0 | 576 | else if (ref->ref |
1415abc0 RG |
577 | /* Make sure there is no induction variable involved |
578 | in the references (gcc.c-torture/execute/pr42142.c). | |
579 | The simplest way is to check if the kill dominates | |
580 | the use. */ | |
581 | && dominated_by_p (CDI_DOMINATORS, (basic_block) data, | |
582 | gimple_bb (def_stmt)) | |
42bc61e0 | 583 | && operand_equal_p (ref->ref, lhs, 0)) |
5006671f RG |
584 | return true; |
585 | } | |
586 | } | |
587 | ||
588 | /* Otherwise keep walking. */ | |
589 | return false; | |
590 | } | |
591 | ||
592 | static void | |
593 | mark_aliased_reaching_defs_necessary (gimple stmt, tree ref) | |
594 | { | |
5006671f | 595 | unsigned int chain; |
42bc61e0 | 596 | ao_ref refd; |
5006671f | 597 | gcc_assert (!chain_ovfl); |
42bc61e0 RG |
598 | ao_ref_init (&refd, ref); |
599 | chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt), | |
5006671f | 600 | mark_aliased_reaching_defs_necessary_1, |
1415abc0 | 601 | gimple_bb (stmt), NULL); |
5006671f RG |
602 | if (chain > longest_chain) |
603 | longest_chain = chain; | |
604 | total_chain += chain; | |
a61f9cc0 | 605 | nr_walks++; |
5006671f RG |
606 | } |
607 | ||
608 | /* Worker for the walker that marks reaching definitions of REF, which | |
609 | is not based on a non-aliased decl. For simplicity we need to end | |
610 | up marking all may-defs necessary that are not based on a non-aliased | |
611 | decl. The only job of this walker is to skip may-defs based on | |
612 | a non-aliased decl. */ | |
613 | ||
614 | static bool | |
42bc61e0 RG |
615 | mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED, |
616 | tree vdef, void *data ATTRIBUTE_UNUSED) | |
5006671f RG |
617 | { |
618 | gimple def_stmt = SSA_NAME_DEF_STMT (vdef); | |
619 | ||
620 | /* We have to skip already visited (and thus necessary) statements | |
621 | to make the chaining work after we dropped back to simple mode. */ | |
622 | if (chain_ovfl | |
623 | && TEST_BIT (processed, SSA_NAME_VERSION (vdef))) | |
624 | { | |
625 | gcc_assert (gimple_nop_p (def_stmt) | |
626 | || gimple_plf (def_stmt, STMT_NECESSARY)); | |
627 | return false; | |
628 | } | |
629 | ||
630 | /* We want to skip stores to non-aliased variables. */ | |
631 | if (!chain_ovfl | |
632 | && gimple_assign_single_p (def_stmt)) | |
633 | { | |
634 | tree lhs = gimple_assign_lhs (def_stmt); | |
635 | if (!ref_may_be_aliased (lhs)) | |
636 | return false; | |
637 | } | |
638 | ||
996e1de5 RG |
639 | /* We want to skip statments that do not constitute stores but have |
640 | a virtual definition. */ | |
641 | if (is_gimple_call (def_stmt)) | |
642 | { | |
643 | tree callee = gimple_call_fndecl (def_stmt); | |
644 | if (callee != NULL_TREE | |
645 | && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) | |
646 | switch (DECL_FUNCTION_CODE (callee)) | |
647 | { | |
648 | case BUILT_IN_MALLOC: | |
649 | case BUILT_IN_CALLOC: | |
650 | case BUILT_IN_ALLOCA: | |
13e49da9 | 651 | case BUILT_IN_ALLOCA_WITH_ALIGN: |
996e1de5 RG |
652 | case BUILT_IN_FREE: |
653 | return false; | |
654 | ||
655 | default:; | |
656 | } | |
657 | } | |
658 | ||
5006671f | 659 | mark_operand_necessary (vdef); |
67635176 RG |
660 | |
661 | return false; | |
5006671f RG |
662 | } |
663 | ||
664 | static void | |
665 | mark_all_reaching_defs_necessary (gimple stmt) | |
666 | { | |
667 | walk_aliased_vdefs (NULL, gimple_vuse (stmt), | |
668 | mark_all_reaching_defs_necessary_1, NULL, &visited); | |
669 | } | |
670 | ||
7351bcaa JH |
671 | /* Return true for PHI nodes with one or identical arguments |
672 | can be removed. */ | |
673 | static bool | |
674 | degenerate_phi_p (gimple phi) | |
675 | { | |
676 | unsigned int i; | |
677 | tree op = gimple_phi_arg_def (phi, 0); | |
678 | for (i = 1; i < gimple_phi_num_args (phi); i++) | |
679 | if (gimple_phi_arg_def (phi, i) != op) | |
680 | return false; | |
681 | return true; | |
682 | } | |
683 | ||
9b3b55a1 DN |
684 | /* Propagate necessity using the operands of necessary statements. |
685 | Process the uses on each statement in the worklist, and add all | |
686 | feeding statements which contribute to the calculation of this | |
b8698a0f | 687 | value to the worklist. |
6de9cd9a DN |
688 | |
689 | In conservative mode, EL is NULL. */ | |
690 | ||
691 | static void | |
692 | propagate_necessity (struct edge_list *el) | |
693 | { | |
726a989a | 694 | gimple stmt; |
b8698a0f | 695 | bool aggressive = (el ? true : false); |
6de9cd9a DN |
696 | |
697 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
698 | fprintf (dump_file, "\nProcessing worklist:\n"); | |
699 | ||
726a989a | 700 | while (VEC_length (gimple, worklist) > 0) |
6de9cd9a | 701 | { |
9b3b55a1 | 702 | /* Take STMT from worklist. */ |
726a989a | 703 | stmt = VEC_pop (gimple, worklist); |
6de9cd9a DN |
704 | |
705 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
706 | { | |
707 | fprintf (dump_file, "processing: "); | |
726a989a | 708 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); |
6de9cd9a DN |
709 | fprintf (dump_file, "\n"); |
710 | } | |
711 | ||
712 | if (aggressive) | |
713 | { | |
ad06ee51 EB |
714 | /* Mark the last statement of the basic blocks on which the block |
715 | containing STMT is control dependent, but only if we haven't | |
6de9cd9a | 716 | already done so. */ |
726a989a | 717 | basic_block bb = gimple_bb (stmt); |
a28fee03 | 718 | if (bb != ENTRY_BLOCK_PTR |
ad06ee51 | 719 | && !TEST_BIT (visited_control_parents, bb->index)) |
b1a0b3b4 | 720 | mark_control_dependent_edges_necessary (bb, el, false); |
6de9cd9a DN |
721 | } |
722 | ||
5006671f RG |
723 | if (gimple_code (stmt) == GIMPLE_PHI |
724 | /* We do not process virtual PHI nodes nor do we track their | |
725 | necessity. */ | |
726 | && is_gimple_reg (gimple_phi_result (stmt))) | |
6de9cd9a DN |
727 | { |
728 | /* PHI nodes are somewhat special in that each PHI alternative has | |
729 | data and control dependencies. All the statements feeding the | |
730 | PHI node's arguments are always necessary. In aggressive mode, | |
731 | we also consider the control dependent edges leading to the | |
732 | predecessor block associated with each PHI alternative as | |
733 | necessary. */ | |
726a989a | 734 | size_t k; |
9b3b55a1 | 735 | |
726a989a | 736 | for (k = 0; k < gimple_phi_num_args (stmt); k++) |
6de9cd9a | 737 | { |
9b3b55a1 | 738 | tree arg = PHI_ARG_DEF (stmt, k); |
6de9cd9a | 739 | if (TREE_CODE (arg) == SSA_NAME) |
38635499 | 740 | mark_operand_necessary (arg); |
6de9cd9a DN |
741 | } |
742 | ||
b1a0b3b4 JH |
743 | /* For PHI operands it matters from where the control flow arrives |
744 | to the BB. Consider the following example: | |
745 | ||
746 | a=exp1; | |
747 | b=exp2; | |
748 | if (test) | |
749 | ; | |
750 | else | |
751 | ; | |
752 | c=PHI(a,b) | |
753 | ||
754 | We need to mark control dependence of the empty basic blocks, since they | |
755 | contains computation of PHI operands. | |
756 | ||
757 | Doing so is too restrictive in the case the predecestor block is in | |
758 | the loop. Consider: | |
759 | ||
760 | if (b) | |
761 | { | |
762 | int i; | |
763 | for (i = 0; i<1000; ++i) | |
764 | ; | |
765 | j = 0; | |
766 | } | |
767 | return j; | |
768 | ||
769 | There is PHI for J in the BB containing return statement. | |
770 | In this case the control dependence of predecestor block (that is | |
771 | within the empty loop) also contains the block determining number | |
772 | of iterations of the block that would prevent removing of empty | |
773 | loop in this case. | |
774 | ||
775 | This scenario can be avoided by splitting critical edges. | |
776 | To save the critical edge splitting pass we identify how the control | |
777 | dependence would look like if the edge was split. | |
778 | ||
779 | Consider the modified CFG created from current CFG by splitting | |
780 | edge B->C. In the postdominance tree of modified CFG, C' is | |
781 | always child of C. There are two cases how chlids of C' can look | |
782 | like: | |
783 | ||
784 | 1) C' is leaf | |
785 | ||
786 | In this case the only basic block C' is control dependent on is B. | |
787 | ||
788 | 2) C' has single child that is B | |
789 | ||
790 | In this case control dependence of C' is same as control | |
791 | dependence of B in original CFG except for block B itself. | |
792 | (since C' postdominate B in modified CFG) | |
793 | ||
794 | Now how to decide what case happens? There are two basic options: | |
795 | ||
796 | a) C postdominate B. Then C immediately postdominate B and | |
797 | case 2 happens iff there is no other way from B to C except | |
798 | the edge B->C. | |
799 | ||
800 | There is other way from B to C iff there is succesor of B that | |
801 | is not postdominated by B. Testing this condition is somewhat | |
802 | expensive, because we need to iterate all succesors of B. | |
803 | We are safe to assume that this does not happen: we will mark B | |
804 | as needed when processing the other path from B to C that is | |
805 | conrol dependent on B and marking control dependencies of B | |
806 | itself is harmless because they will be processed anyway after | |
807 | processing control statement in B. | |
808 | ||
809 | b) C does not postdominate B. Always case 1 happens since there is | |
810 | path from C to exit that does not go through B and thus also C'. */ | |
811 | ||
7351bcaa | 812 | if (aggressive && !degenerate_phi_p (stmt)) |
6de9cd9a | 813 | { |
726a989a | 814 | for (k = 0; k < gimple_phi_num_args (stmt); k++) |
6de9cd9a | 815 | { |
726a989a | 816 | basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src; |
b1a0b3b4 JH |
817 | |
818 | if (gimple_bb (stmt) | |
819 | != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb)) | |
6de9cd9a | 820 | { |
b1a0b3b4 | 821 | if (!TEST_BIT (last_stmt_necessary, arg_bb->index)) |
ad06ee51 | 822 | mark_last_stmt_necessary (arg_bb); |
6de9cd9a | 823 | } |
b1a0b3b4 | 824 | else if (arg_bb != ENTRY_BLOCK_PTR |
ad06ee51 EB |
825 | && !TEST_BIT (visited_control_parents, |
826 | arg_bb->index)) | |
b1a0b3b4 | 827 | mark_control_dependent_edges_necessary (arg_bb, el, true); |
6de9cd9a DN |
828 | } |
829 | } | |
830 | } | |
831 | else | |
832 | { | |
833 | /* Propagate through the operands. Examine all the USE, VUSE and | |
b8698a0f | 834 | VDEF operands in this statement. Mark all the statements |
5006671f | 835 | which feed this statement's uses as necessary. */ |
38635499 DN |
836 | ssa_op_iter iter; |
837 | tree use; | |
4c124b4c | 838 | |
996e1de5 RG |
839 | /* If this is a call to free which is directly fed by an |
840 | allocation function do not mark that necessary through | |
841 | processing the argument. */ | |
842 | if (gimple_call_builtin_p (stmt, BUILT_IN_FREE)) | |
843 | { | |
844 | tree ptr = gimple_call_arg (stmt, 0); | |
845 | gimple def_stmt; | |
846 | tree def_callee; | |
847 | /* If the pointer we free is defined by an allocation | |
848 | function do not add the call to the worklist. */ | |
849 | if (TREE_CODE (ptr) == SSA_NAME | |
850 | && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr)) | |
851 | && (def_callee = gimple_call_fndecl (def_stmt)) | |
852 | && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL | |
853 | && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC | |
854 | || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC)) | |
855 | continue; | |
856 | } | |
857 | ||
5006671f | 858 | FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE) |
38635499 | 859 | mark_operand_necessary (use); |
5006671f RG |
860 | |
861 | use = gimple_vuse (stmt); | |
862 | if (!use) | |
863 | continue; | |
864 | ||
865 | /* If we dropped to simple mode make all immediately | |
866 | reachable definitions necessary. */ | |
867 | if (chain_ovfl) | |
868 | { | |
869 | mark_all_reaching_defs_necessary (stmt); | |
870 | continue; | |
871 | } | |
872 | ||
873 | /* For statements that may load from memory (have a VUSE) we | |
874 | have to mark all reaching (may-)definitions as necessary. | |
875 | We partition this task into two cases: | |
876 | 1) explicit loads based on decls that are not aliased | |
877 | 2) implicit loads (like calls) and explicit loads not | |
878 | based on decls that are not aliased (like indirect | |
879 | references or loads from globals) | |
880 | For 1) we mark all reaching may-defs as necessary, stopping | |
881 | at dominating kills. For 2) we want to mark all dominating | |
882 | references necessary, but non-aliased ones which we handle | |
67635176 RG |
883 | in 1). By keeping a global visited bitmap for references |
884 | we walk for 2) we avoid quadratic behavior for those. */ | |
5006671f RG |
885 | |
886 | if (is_gimple_call (stmt)) | |
887 | { | |
14c41b9b | 888 | tree callee = gimple_call_fndecl (stmt); |
5006671f RG |
889 | unsigned i; |
890 | ||
14c41b9b RG |
891 | /* Calls to functions that are merely acting as barriers |
892 | or that only store to memory do not make any previous | |
893 | stores necessary. */ | |
894 | if (callee != NULL_TREE | |
895 | && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL | |
896 | && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET | |
36dc1a88 | 897 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK |
14c41b9b | 898 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC |
36dc1a88 | 899 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC |
ab4472fa | 900 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE |
df2f6100 | 901 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END |
ab4472fa | 902 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA |
13e49da9 TV |
903 | || (DECL_FUNCTION_CODE (callee) |
904 | == BUILT_IN_ALLOCA_WITH_ALIGN) | |
ab4472fa | 905 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE |
45d439ac JJ |
906 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE |
907 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED)) | |
14c41b9b RG |
908 | continue; |
909 | ||
5006671f | 910 | /* Calls implicitly load from memory, their arguments |
67635176 | 911 | in addition may explicitly perform memory loads. */ |
5006671f RG |
912 | mark_all_reaching_defs_necessary (stmt); |
913 | for (i = 0; i < gimple_call_num_args (stmt); ++i) | |
914 | { | |
915 | tree arg = gimple_call_arg (stmt, i); | |
916 | if (TREE_CODE (arg) == SSA_NAME | |
917 | || is_gimple_min_invariant (arg)) | |
918 | continue; | |
b3421a06 TV |
919 | if (TREE_CODE (arg) == WITH_SIZE_EXPR) |
920 | arg = TREE_OPERAND (arg, 0); | |
5006671f RG |
921 | if (!ref_may_be_aliased (arg)) |
922 | mark_aliased_reaching_defs_necessary (stmt, arg); | |
923 | } | |
924 | } | |
925 | else if (gimple_assign_single_p (stmt)) | |
926 | { | |
67635176 | 927 | tree rhs; |
5006671f RG |
928 | /* If this is a load mark things necessary. */ |
929 | rhs = gimple_assign_rhs1 (stmt); | |
930 | if (TREE_CODE (rhs) != SSA_NAME | |
47598145 MM |
931 | && !is_gimple_min_invariant (rhs) |
932 | && TREE_CODE (rhs) != CONSTRUCTOR) | |
5006671f RG |
933 | { |
934 | if (!ref_may_be_aliased (rhs)) | |
935 | mark_aliased_reaching_defs_necessary (stmt, rhs); | |
936 | else | |
47598145 | 937 | mark_all_reaching_defs_necessary (stmt); |
5006671f | 938 | } |
5006671f RG |
939 | } |
940 | else if (gimple_code (stmt) == GIMPLE_RETURN) | |
941 | { | |
942 | tree rhs = gimple_return_retval (stmt); | |
943 | /* A return statement may perform a load. */ | |
e106efc7 RG |
944 | if (rhs |
945 | && TREE_CODE (rhs) != SSA_NAME | |
47598145 MM |
946 | && !is_gimple_min_invariant (rhs) |
947 | && TREE_CODE (rhs) != CONSTRUCTOR) | |
5006671f RG |
948 | { |
949 | if (!ref_may_be_aliased (rhs)) | |
950 | mark_aliased_reaching_defs_necessary (stmt, rhs); | |
951 | else | |
952 | mark_all_reaching_defs_necessary (stmt); | |
953 | } | |
954 | } | |
955 | else if (gimple_code (stmt) == GIMPLE_ASM) | |
956 | { | |
957 | unsigned i; | |
958 | mark_all_reaching_defs_necessary (stmt); | |
959 | /* Inputs may perform loads. */ | |
960 | for (i = 0; i < gimple_asm_ninputs (stmt); ++i) | |
961 | { | |
962 | tree op = TREE_VALUE (gimple_asm_input_op (stmt, i)); | |
963 | if (TREE_CODE (op) != SSA_NAME | |
964 | && !is_gimple_min_invariant (op) | |
47598145 | 965 | && TREE_CODE (op) != CONSTRUCTOR |
5006671f RG |
966 | && !ref_may_be_aliased (op)) |
967 | mark_aliased_reaching_defs_necessary (stmt, op); | |
968 | } | |
969 | } | |
1d4fb493 RH |
970 | else if (gimple_code (stmt) == GIMPLE_TRANSACTION) |
971 | { | |
972 | /* The beginning of a transaction is a memory barrier. */ | |
973 | /* ??? If we were really cool, we'd only be a barrier | |
974 | for the memories touched within the transaction. */ | |
975 | mark_all_reaching_defs_necessary (stmt); | |
976 | } | |
5006671f RG |
977 | else |
978 | gcc_unreachable (); | |
979 | ||
980 | /* If we over-used our alias oracle budget drop to simple | |
a61f9cc0 RG |
981 | mode. The cost metric allows quadratic behavior |
982 | (number of uses times number of may-defs queries) up to | |
983 | a constant maximal number of queries and after that falls back to | |
5006671f | 984 | super-linear complexity. */ |
a61f9cc0 RG |
985 | if (/* Constant but quadratic for small functions. */ |
986 | total_chain > 128 * 128 | |
987 | /* Linear in the number of may-defs. */ | |
988 | && total_chain > 32 * longest_chain | |
989 | /* Linear in the number of uses. */ | |
990 | && total_chain > nr_walks * 32) | |
5006671f RG |
991 | { |
992 | chain_ovfl = true; | |
993 | if (visited) | |
994 | bitmap_clear (visited); | |
995 | } | |
6de9cd9a DN |
996 | } |
997 | } | |
998 | } | |
52328bf6 | 999 | |
266fbb79 | 1000 | /* Replace all uses of NAME by underlying variable and mark it |
1fc41282 JH |
1001 | for renaming. */ |
1002 | ||
1d65f45c | 1003 | void |
266fbb79 | 1004 | mark_virtual_operand_for_renaming (tree name) |
1fc41282 JH |
1005 | { |
1006 | bool used = false; | |
1007 | imm_use_iterator iter; | |
1008 | use_operand_p use_p; | |
1009 | gimple stmt; | |
266fbb79 TV |
1010 | tree name_var; |
1011 | ||
1012 | name_var = SSA_NAME_VAR (name); | |
1013 | FOR_EACH_IMM_USE_STMT (stmt, iter, name) | |
1014 | { | |
1015 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
1016 | SET_USE (use_p, name_var); | |
1017 | update_stmt (stmt); | |
1018 | used = true; | |
1019 | } | |
1020 | if (used) | |
1021 | mark_sym_for_renaming (name_var); | |
1022 | } | |
1d65f45c | 1023 | |
266fbb79 TV |
1024 | /* Replace all uses of result of PHI by underlying variable and mark it |
1025 | for renaming. */ | |
1026 | ||
1027 | void | |
1028 | mark_virtual_phi_result_for_renaming (gimple phi) | |
1029 | { | |
1fc41282 JH |
1030 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1031 | { | |
1032 | fprintf (dump_file, "Marking result for renaming : "); | |
1033 | print_gimple_stmt (dump_file, phi, 0, TDF_SLIM); | |
1034 | fprintf (dump_file, "\n"); | |
1035 | } | |
1d65f45c | 1036 | |
266fbb79 | 1037 | mark_virtual_operand_for_renaming (gimple_phi_result (phi)); |
1fc41282 | 1038 | } |
52328bf6 | 1039 | |
266fbb79 | 1040 | |
6de9cd9a DN |
1041 | /* Remove dead PHI nodes from block BB. */ |
1042 | ||
7665f023 | 1043 | static bool |
6de9cd9a DN |
1044 | remove_dead_phis (basic_block bb) |
1045 | { | |
7665f023 | 1046 | bool something_changed = false; |
726a989a RB |
1047 | gimple phi; |
1048 | gimple_stmt_iterator gsi; | |
6de9cd9a | 1049 | |
355a7673 | 1050 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);) |
6de9cd9a DN |
1051 | { |
1052 | stats.total_phis++; | |
726a989a | 1053 | phi = gsi_stmt (gsi); |
6de9cd9a | 1054 | |
5006671f RG |
1055 | /* We do not track necessity of virtual PHI nodes. Instead do |
1056 | very simple dead PHI removal here. */ | |
1057 | if (!is_gimple_reg (gimple_phi_result (phi))) | |
1058 | { | |
5006671f RG |
1059 | /* Virtual PHI nodes with one or identical arguments |
1060 | can be removed. */ | |
7351bcaa | 1061 | if (degenerate_phi_p (phi)) |
5006671f RG |
1062 | { |
1063 | tree vdef = gimple_phi_result (phi); | |
7351bcaa JH |
1064 | tree vuse = gimple_phi_arg_def (phi, 0); |
1065 | ||
5006671f RG |
1066 | use_operand_p use_p; |
1067 | imm_use_iterator iter; | |
1068 | gimple use_stmt; | |
1069 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef) | |
1070 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
1071 | SET_USE (use_p, vuse); | |
eab09a51 JH |
1072 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef) |
1073 | && TREE_CODE (vuse) == SSA_NAME) | |
5006671f RG |
1074 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1; |
1075 | } | |
1076 | else | |
1077 | gimple_set_plf (phi, STMT_NECESSARY, true); | |
1078 | } | |
1079 | ||
726a989a | 1080 | if (!gimple_plf (phi, STMT_NECESSARY)) |
6de9cd9a | 1081 | { |
7665f023 | 1082 | something_changed = true; |
6de9cd9a DN |
1083 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1084 | { | |
1085 | fprintf (dump_file, "Deleting : "); | |
726a989a | 1086 | print_gimple_stmt (dump_file, phi, 0, TDF_SLIM); |
6de9cd9a DN |
1087 | fprintf (dump_file, "\n"); |
1088 | } | |
1089 | ||
726a989a | 1090 | remove_phi_node (&gsi, true); |
6de9cd9a | 1091 | stats.removed_phis++; |
5006671f | 1092 | continue; |
6de9cd9a | 1093 | } |
5006671f RG |
1094 | |
1095 | gsi_next (&gsi); | |
6de9cd9a | 1096 | } |
7665f023 | 1097 | return something_changed; |
6de9cd9a | 1098 | } |
9b3b55a1 | 1099 | |
1fc41282 JH |
1100 | /* Forward edge E to respective POST_DOM_BB and update PHIs. */ |
1101 | ||
1102 | static edge | |
1103 | forward_edge_to_pdom (edge e, basic_block post_dom_bb) | |
1104 | { | |
1105 | gimple_stmt_iterator gsi; | |
7351bcaa | 1106 | edge e2 = NULL; |
1fc41282 JH |
1107 | edge_iterator ei; |
1108 | ||
1109 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1110 | fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index, | |
1111 | e->dest->index, post_dom_bb->index); | |
1112 | ||
1113 | e2 = redirect_edge_and_branch (e, post_dom_bb); | |
1114 | cfg_altered = true; | |
1115 | ||
1116 | /* If edge was already around, no updating is neccesary. */ | |
1117 | if (e2 != e) | |
1118 | return e2; | |
1119 | ||
8eacd016 | 1120 | if (!gimple_seq_empty_p (phi_nodes (post_dom_bb))) |
1fc41282 JH |
1121 | { |
1122 | /* We are sure that for every live PHI we are seeing control dependent BB. | |
e9a8afaa | 1123 | This means that we can pick any edge to duplicate PHI args from. */ |
1fc41282 | 1124 | FOR_EACH_EDGE (e2, ei, post_dom_bb->preds) |
e9a8afaa | 1125 | if (e2 != e) |
1fc41282 JH |
1126 | break; |
1127 | for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);) | |
1128 | { | |
1129 | gimple phi = gsi_stmt (gsi); | |
7351bcaa | 1130 | tree op; |
f5045c96 | 1131 | source_location locus; |
1fc41282 | 1132 | |
e9a8afaa RG |
1133 | /* PHIs for virtuals have no control dependency relation on them. |
1134 | We are lost here and must force renaming of the symbol. */ | |
1fc41282 JH |
1135 | if (!is_gimple_reg (gimple_phi_result (phi))) |
1136 | { | |
1137 | mark_virtual_phi_result_for_renaming (phi); | |
1138 | remove_phi_node (&gsi, true); | |
1139 | continue; | |
1140 | } | |
e9a8afaa RG |
1141 | |
1142 | /* Dead PHI do not imply control dependency. */ | |
1143 | if (!gimple_plf (phi, STMT_NECESSARY)) | |
f5045c96 | 1144 | { |
e9a8afaa RG |
1145 | gsi_next (&gsi); |
1146 | continue; | |
f5045c96 | 1147 | } |
e9a8afaa RG |
1148 | |
1149 | op = gimple_phi_arg_def (phi, e2->dest_idx); | |
1150 | locus = gimple_phi_arg_location (phi, e2->dest_idx); | |
9e227d60 | 1151 | add_phi_arg (phi, op, e, locus); |
e9a8afaa RG |
1152 | /* The resulting PHI if not dead can only be degenerate. */ |
1153 | gcc_assert (degenerate_phi_p (phi)); | |
1fc41282 JH |
1154 | gsi_next (&gsi); |
1155 | } | |
1156 | } | |
1157 | return e; | |
1158 | } | |
9b3b55a1 | 1159 | |
206048bd | 1160 | /* Remove dead statement pointed to by iterator I. Receives the basic block BB |
6de9cd9a DN |
1161 | containing I so that we don't have to look it up. */ |
1162 | ||
1163 | static void | |
726a989a | 1164 | remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb) |
6de9cd9a | 1165 | { |
726a989a | 1166 | gimple stmt = gsi_stmt (*i); |
6de9cd9a DN |
1167 | |
1168 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1169 | { | |
1170 | fprintf (dump_file, "Deleting : "); | |
726a989a | 1171 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); |
6de9cd9a DN |
1172 | fprintf (dump_file, "\n"); |
1173 | } | |
1174 | ||
1175 | stats.removed++; | |
1176 | ||
1177 | /* If we have determined that a conditional branch statement contributes | |
1178 | nothing to the program, then we not only remove it, but we also change | |
1179 | the flow graph so that the current block will simply fall-thru to its | |
1180 | immediate post-dominator. The blocks we are circumventing will be | |
6fc0bb99 | 1181 | removed by cleanup_tree_cfg if this change in the flow graph makes them |
6de9cd9a | 1182 | unreachable. */ |
726a989a | 1183 | if (is_ctrl_stmt (stmt)) |
6de9cd9a DN |
1184 | { |
1185 | basic_block post_dom_bb; | |
1fc41282 JH |
1186 | edge e, e2; |
1187 | edge_iterator ei; | |
e18d4a19 | 1188 | |
e9a8afaa | 1189 | post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb); |
0a180c0e | 1190 | |
1fc41282 | 1191 | e = find_edge (bb, post_dom_bb); |
0a180c0e | 1192 | |
1fc41282 JH |
1193 | /* If edge is already there, try to use it. This avoids need to update |
1194 | PHI nodes. Also watch for cases where post dominator does not exists | |
1195 | or is exit block. These can happen for infinite loops as we create | |
1196 | fake edges in the dominator tree. */ | |
1197 | if (e) | |
1198 | ; | |
1199 | else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR) | |
1200 | e = EDGE_SUCC (bb, 0); | |
e18d4a19 | 1201 | else |
1fc41282 JH |
1202 | e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb); |
1203 | gcc_assert (e); | |
1204 | e->probability = REG_BR_PROB_BASE; | |
1205 | e->count = bb->count; | |
6de9cd9a DN |
1206 | |
1207 | /* The edge is no longer associated with a conditional, so it does | |
1208 | not have TRUE/FALSE flags. */ | |
1fc41282 | 1209 | e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE); |
6de9cd9a | 1210 | |
0a180c0e | 1211 | /* The lone outgoing edge from BB will be a fallthru edge. */ |
1fc41282 JH |
1212 | e->flags |= EDGE_FALLTHRU; |
1213 | ||
1214 | /* Remove the remaining outgoing edges. */ | |
1215 | for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); ) | |
1216 | if (e != e2) | |
1217 | { | |
1218 | cfg_altered = true; | |
1219 | remove_edge (e2); | |
1220 | } | |
1221 | else | |
1222 | ei_next (&ei); | |
ec8c1492 JJ |
1223 | } |
1224 | ||
1225 | /* If this is a store into a variable that is being optimized away, | |
1226 | add a debug bind stmt if possible. */ | |
1227 | if (MAY_HAVE_DEBUG_STMTS | |
1228 | && gimple_assign_single_p (stmt) | |
1229 | && is_gimple_val (gimple_assign_rhs1 (stmt))) | |
1230 | { | |
1231 | tree lhs = gimple_assign_lhs (stmt); | |
1232 | if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL) | |
1233 | && !DECL_IGNORED_P (lhs) | |
1234 | && is_gimple_reg_type (TREE_TYPE (lhs)) | |
1235 | && !is_global_var (lhs) | |
1236 | && !DECL_HAS_VALUE_EXPR_P (lhs)) | |
1237 | { | |
1238 | tree rhs = gimple_assign_rhs1 (stmt); | |
1239 | gimple note | |
1240 | = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt); | |
1241 | gsi_insert_after (i, note, GSI_SAME_STMT); | |
1242 | } | |
6de9cd9a | 1243 | } |
5006671f RG |
1244 | |
1245 | unlink_stmt_vdef (stmt); | |
b8698a0f L |
1246 | gsi_remove (i, true); |
1247 | release_defs (stmt); | |
6de9cd9a | 1248 | } |
9b3b55a1 | 1249 | |
9b3b55a1 DN |
1250 | /* Eliminate unnecessary statements. Any instruction not marked as necessary |
1251 | contributes nothing to the program, and can be deleted. */ | |
1252 | ||
7665f023 | 1253 | static bool |
9b3b55a1 DN |
1254 | eliminate_unnecessary_stmts (void) |
1255 | { | |
7665f023 | 1256 | bool something_changed = false; |
9b3b55a1 | 1257 | basic_block bb; |
0ca5af51 | 1258 | gimple_stmt_iterator gsi, psi; |
726a989a RB |
1259 | gimple stmt; |
1260 | tree call; | |
b5b8b0ac | 1261 | VEC (basic_block, heap) *h; |
9b3b55a1 DN |
1262 | |
1263 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1264 | fprintf (dump_file, "\nEliminating unnecessary statements:\n"); | |
1265 | ||
1266 | clear_special_calls (); | |
9b3b55a1 | 1267 | |
b5b8b0ac AO |
1268 | /* Walking basic blocks and statements in reverse order avoids |
1269 | releasing SSA names before any other DEFs that refer to them are | |
1270 | released. This helps avoid loss of debug information, as we get | |
1271 | a chance to propagate all RHSs of removed SSAs into debug uses, | |
1272 | rather than only the latest ones. E.g., consider: | |
1273 | ||
1274 | x_3 = y_1 + z_2; | |
1275 | a_5 = x_3 - b_4; | |
1276 | # DEBUG a => a_5 | |
1277 | ||
1278 | If we were to release x_3 before a_5, when we reached a_5 and | |
1279 | tried to substitute it into the debug stmt, we'd see x_3 there, | |
1280 | but x_3's DEF, type, etc would have already been disconnected. | |
1281 | By going backwards, the debug stmt first changes to: | |
1282 | ||
1283 | # DEBUG a => x_3 - b_4 | |
1284 | ||
1285 | and then to: | |
1286 | ||
1287 | # DEBUG a => y_1 + z_2 - b_4 | |
1288 | ||
1289 | as desired. */ | |
1290 | gcc_assert (dom_info_available_p (CDI_DOMINATORS)); | |
1291 | h = get_all_dominated_blocks (CDI_DOMINATORS, single_succ (ENTRY_BLOCK_PTR)); | |
1292 | ||
1293 | while (VEC_length (basic_block, h)) | |
9b3b55a1 | 1294 | { |
b5b8b0ac AO |
1295 | bb = VEC_pop (basic_block, h); |
1296 | ||
9b3b55a1 | 1297 | /* Remove dead statements. */ |
0ca5af51 | 1298 | for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi) |
9b3b55a1 | 1299 | { |
726a989a | 1300 | stmt = gsi_stmt (gsi); |
9b3b55a1 | 1301 | |
0ca5af51 AO |
1302 | psi = gsi; |
1303 | gsi_prev (&psi); | |
1304 | ||
9b3b55a1 DN |
1305 | stats.total++; |
1306 | ||
996e1de5 RG |
1307 | /* We can mark a call to free as not necessary if the |
1308 | defining statement of its argument is an allocation | |
1309 | function and that is not necessary itself. */ | |
1310 | if (gimple_call_builtin_p (stmt, BUILT_IN_FREE)) | |
1311 | { | |
1312 | tree ptr = gimple_call_arg (stmt, 0); | |
1313 | tree callee2; | |
1314 | gimple def_stmt; | |
1315 | if (TREE_CODE (ptr) != SSA_NAME) | |
1316 | continue; | |
1317 | def_stmt = SSA_NAME_DEF_STMT (ptr); | |
1318 | if (!is_gimple_call (def_stmt) | |
1319 | || gimple_plf (def_stmt, STMT_NECESSARY)) | |
1320 | continue; | |
1321 | callee2 = gimple_call_fndecl (def_stmt); | |
1322 | if (callee2 == NULL_TREE | |
1323 | || DECL_BUILT_IN_CLASS (callee2) != BUILT_IN_NORMAL | |
1324 | || (DECL_FUNCTION_CODE (callee2) != BUILT_IN_MALLOC | |
1325 | && DECL_FUNCTION_CODE (callee2) != BUILT_IN_CALLOC)) | |
1326 | continue; | |
1327 | gimple_set_plf (stmt, STMT_NECESSARY, false); | |
1328 | } | |
1329 | ||
726a989a RB |
1330 | /* If GSI is not necessary then remove it. */ |
1331 | if (!gimple_plf (stmt, STMT_NECESSARY)) | |
7665f023 | 1332 | { |
717f4048 AO |
1333 | if (!is_gimple_debug (stmt)) |
1334 | something_changed = true; | |
726a989a | 1335 | remove_dead_stmt (&gsi, bb); |
7665f023 | 1336 | } |
726a989a | 1337 | else if (is_gimple_call (stmt)) |
9b3b55a1 | 1338 | { |
f1749ec1 RG |
1339 | tree name = gimple_call_lhs (stmt); |
1340 | ||
1341 | notice_special_calls (stmt); | |
1342 | ||
1343 | /* When LHS of var = call (); is dead, simplify it into | |
1344 | call (); saving one operand. */ | |
1345 | if (name | |
1346 | && TREE_CODE (name) == SSA_NAME | |
1347 | && !TEST_BIT (processed, SSA_NAME_VERSION (name)) | |
1348 | /* Avoid doing so for allocation calls which we | |
1349 | did not mark as necessary, it will confuse the | |
1350 | special logic we apply to malloc/free pair removal. */ | |
1351 | && (!(call = gimple_call_fndecl (stmt)) | |
1352 | || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL | |
1353 | || (DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC | |
1354 | && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC | |
1355 | && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA | |
1356 | && (DECL_FUNCTION_CODE (call) | |
1357 | != BUILT_IN_ALLOCA_WITH_ALIGN)))) | |
cf227303 | 1358 | { |
f1749ec1 RG |
1359 | something_changed = true; |
1360 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
cf227303 | 1361 | { |
f1749ec1 RG |
1362 | fprintf (dump_file, "Deleting LHS of call: "); |
1363 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); | |
1364 | fprintf (dump_file, "\n"); | |
cf227303 | 1365 | } |
f1749ec1 RG |
1366 | |
1367 | gimple_call_set_lhs (stmt, NULL_TREE); | |
1368 | maybe_clean_or_replace_eh_stmt (stmt, stmt); | |
1369 | update_stmt (stmt); | |
1370 | release_ssa_name (name); | |
cf227303 | 1371 | } |
726a989a | 1372 | } |
9b3b55a1 DN |
1373 | } |
1374 | } | |
b5b8b0ac AO |
1375 | |
1376 | VEC_free (basic_block, heap, h); | |
1377 | ||
7351bcaa JH |
1378 | /* Since we don't track liveness of virtual PHI nodes, it is possible that we |
1379 | rendered some PHI nodes unreachable while they are still in use. | |
1380 | Mark them for renaming. */ | |
1381 | if (cfg_altered) | |
1382 | { | |
b5b8b0ac AO |
1383 | basic_block prev_bb; |
1384 | ||
7351bcaa | 1385 | find_unreachable_blocks (); |
b5b8b0ac AO |
1386 | |
1387 | /* Delete all unreachable basic blocks in reverse dominator order. */ | |
1388 | for (bb = EXIT_BLOCK_PTR->prev_bb; bb != ENTRY_BLOCK_PTR; bb = prev_bb) | |
7351bcaa | 1389 | { |
b5b8b0ac AO |
1390 | prev_bb = bb->prev_bb; |
1391 | ||
a915ab00 JH |
1392 | if (!TEST_BIT (bb_contains_live_stmts, bb->index) |
1393 | || !(bb->flags & BB_REACHABLE)) | |
7351bcaa JH |
1394 | { |
1395 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1396 | if (!is_gimple_reg (gimple_phi_result (gsi_stmt (gsi)))) | |
1397 | { | |
1398 | bool found = false; | |
1399 | imm_use_iterator iter; | |
1400 | ||
1401 | FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (gsi_stmt (gsi))) | |
1402 | { | |
1403 | if (!(gimple_bb (stmt)->flags & BB_REACHABLE)) | |
1404 | continue; | |
1405 | if (gimple_code (stmt) == GIMPLE_PHI | |
1406 | || gimple_plf (stmt, STMT_NECESSARY)) | |
1407 | { | |
1408 | found = true; | |
1409 | BREAK_FROM_IMM_USE_STMT (iter); | |
1410 | } | |
1411 | } | |
1412 | if (found) | |
1413 | mark_virtual_phi_result_for_renaming (gsi_stmt (gsi)); | |
1414 | } | |
b5b8b0ac | 1415 | |
a915ab00 | 1416 | if (!(bb->flags & BB_REACHABLE)) |
b5b8b0ac AO |
1417 | { |
1418 | /* Speed up the removal of blocks that don't | |
1419 | dominate others. Walking backwards, this should | |
1420 | be the common case. ??? Do we need to recompute | |
1421 | dominators because of cfg_altered? */ | |
1422 | if (!MAY_HAVE_DEBUG_STMTS | |
1423 | || !first_dom_son (CDI_DOMINATORS, bb)) | |
1424 | delete_basic_block (bb); | |
1425 | else | |
1426 | { | |
1427 | h = get_all_dominated_blocks (CDI_DOMINATORS, bb); | |
1428 | ||
1429 | while (VEC_length (basic_block, h)) | |
1430 | { | |
1431 | bb = VEC_pop (basic_block, h); | |
1432 | prev_bb = bb->prev_bb; | |
1433 | /* Rearrangements to the CFG may have failed | |
1434 | to update the dominators tree, so that | |
1435 | formerly-dominated blocks are now | |
1436 | otherwise reachable. */ | |
1437 | if (!!(bb->flags & BB_REACHABLE)) | |
1438 | continue; | |
1439 | delete_basic_block (bb); | |
1440 | } | |
1441 | ||
1442 | VEC_free (basic_block, heap, h); | |
1443 | } | |
1444 | } | |
7351bcaa JH |
1445 | } |
1446 | } | |
1447 | } | |
5006671f RG |
1448 | FOR_EACH_BB (bb) |
1449 | { | |
1450 | /* Remove dead PHI nodes. */ | |
1451 | something_changed |= remove_dead_phis (bb); | |
1452 | } | |
1453 | ||
7665f023 | 1454 | return something_changed; |
9b3b55a1 DN |
1455 | } |
1456 | ||
1457 | ||
6de9cd9a DN |
1458 | /* Print out removed statement statistics. */ |
1459 | ||
1460 | static void | |
1461 | print_stats (void) | |
1462 | { | |
01902653 | 1463 | float percg; |
6de9cd9a | 1464 | |
01902653 RG |
1465 | percg = ((float) stats.removed / (float) stats.total) * 100; |
1466 | fprintf (dump_file, "Removed %d of %d statements (%d%%)\n", | |
1467 | stats.removed, stats.total, (int) percg); | |
6de9cd9a | 1468 | |
01902653 RG |
1469 | if (stats.total_phis == 0) |
1470 | percg = 0; | |
1471 | else | |
1472 | percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100; | |
6de9cd9a | 1473 | |
01902653 RG |
1474 | fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n", |
1475 | stats.removed_phis, stats.total_phis, (int) percg); | |
6de9cd9a | 1476 | } |
726a989a | 1477 | |
6de9cd9a DN |
1478 | /* Initialization for this pass. Set up the used data structures. */ |
1479 | ||
1480 | static void | |
1481 | tree_dce_init (bool aggressive) | |
1482 | { | |
1483 | memset ((void *) &stats, 0, sizeof (stats)); | |
1484 | ||
1485 | if (aggressive) | |
1486 | { | |
1487 | int i; | |
1488 | ||
e1111e8e | 1489 | control_dependence_map = XNEWVEC (bitmap, last_basic_block); |
6de9cd9a | 1490 | for (i = 0; i < last_basic_block; ++i) |
8bdbfff5 | 1491 | control_dependence_map[i] = BITMAP_ALLOC (NULL); |
6de9cd9a DN |
1492 | |
1493 | last_stmt_necessary = sbitmap_alloc (last_basic_block); | |
1494 | sbitmap_zero (last_stmt_necessary); | |
1fc41282 JH |
1495 | bb_contains_live_stmts = sbitmap_alloc (last_basic_block); |
1496 | sbitmap_zero (bb_contains_live_stmts); | |
6de9cd9a DN |
1497 | } |
1498 | ||
95a3742c | 1499 | processed = sbitmap_alloc (num_ssa_names + 1); |
6de9cd9a DN |
1500 | sbitmap_zero (processed); |
1501 | ||
726a989a | 1502 | worklist = VEC_alloc (gimple, heap, 64); |
9da4058c | 1503 | cfg_altered = false; |
6de9cd9a DN |
1504 | } |
1505 | ||
1506 | /* Cleanup after this pass. */ | |
1507 | ||
1508 | static void | |
1509 | tree_dce_done (bool aggressive) | |
1510 | { | |
1511 | if (aggressive) | |
1512 | { | |
1513 | int i; | |
1514 | ||
1515 | for (i = 0; i < last_basic_block; ++i) | |
8bdbfff5 | 1516 | BITMAP_FREE (control_dependence_map[i]); |
6de9cd9a DN |
1517 | free (control_dependence_map); |
1518 | ||
a28fee03 | 1519 | sbitmap_free (visited_control_parents); |
6de9cd9a | 1520 | sbitmap_free (last_stmt_necessary); |
1fc41282 JH |
1521 | sbitmap_free (bb_contains_live_stmts); |
1522 | bb_contains_live_stmts = NULL; | |
6de9cd9a DN |
1523 | } |
1524 | ||
1525 | sbitmap_free (processed); | |
906532aa | 1526 | |
726a989a | 1527 | VEC_free (gimple, heap, worklist); |
6de9cd9a | 1528 | } |
726a989a | 1529 | |
6de9cd9a DN |
1530 | /* Main routine to eliminate dead code. |
1531 | ||
1532 | AGGRESSIVE controls the aggressiveness of the algorithm. | |
1533 | In conservative mode, we ignore control dependence and simply declare | |
1534 | all but the most trivially dead branches necessary. This mode is fast. | |
1535 | In aggressive mode, control dependences are taken into account, which | |
1536 | results in more dead code elimination, but at the cost of some time. | |
1537 | ||
1538 | FIXME: Aggressive mode before PRE doesn't work currently because | |
1539 | the dominance info is not invalidated after DCE1. This is | |
1540 | not an issue right now because we only run aggressive DCE | |
1541 | as the last tree SSA pass, but keep this in mind when you | |
1542 | start experimenting with pass ordering. */ | |
1543 | ||
7665f023 | 1544 | static unsigned int |
6de9cd9a DN |
1545 | perform_tree_ssa_dce (bool aggressive) |
1546 | { | |
1547 | struct edge_list *el = NULL; | |
7665f023 | 1548 | bool something_changed = 0; |
6de9cd9a | 1549 | |
5ac60b56 RG |
1550 | calculate_dominance_info (CDI_DOMINATORS); |
1551 | ||
7351bcaa JH |
1552 | /* Preheaders are needed for SCEV to work. |
1553 | Simple lateches and recorded exits improve chances that loop will | |
1554 | proved to be finite in testcases such as in loop-15.c and loop-24.c */ | |
1555 | if (aggressive) | |
1556 | loop_optimizer_init (LOOPS_NORMAL | |
1557 | | LOOPS_HAVE_RECORDED_EXITS); | |
1558 | ||
6de9cd9a DN |
1559 | tree_dce_init (aggressive); |
1560 | ||
1561 | if (aggressive) | |
1562 | { | |
1563 | /* Compute control dependence. */ | |
1564 | timevar_push (TV_CONTROL_DEPENDENCES); | |
1565 | calculate_dominance_info (CDI_POST_DOMINATORS); | |
1566 | el = create_edge_list (); | |
1567 | find_all_control_dependences (el); | |
1568 | timevar_pop (TV_CONTROL_DEPENDENCES); | |
1569 | ||
a28fee03 SB |
1570 | visited_control_parents = sbitmap_alloc (last_basic_block); |
1571 | sbitmap_zero (visited_control_parents); | |
1572 | ||
6de9cd9a DN |
1573 | mark_dfs_back_edges (); |
1574 | } | |
1575 | ||
1576 | find_obviously_necessary_stmts (el); | |
1577 | ||
7351bcaa JH |
1578 | if (aggressive) |
1579 | loop_optimizer_finalize (); | |
1580 | ||
5006671f RG |
1581 | longest_chain = 0; |
1582 | total_chain = 0; | |
a61f9cc0 | 1583 | nr_walks = 0; |
5006671f | 1584 | chain_ovfl = false; |
d9b99b4c | 1585 | visited = BITMAP_ALLOC (NULL); |
6de9cd9a | 1586 | propagate_necessity (el); |
5006671f | 1587 | BITMAP_FREE (visited); |
bc300fec | 1588 | |
7665f023 JH |
1589 | something_changed |= eliminate_unnecessary_stmts (); |
1590 | something_changed |= cfg_altered; | |
6de9cd9a | 1591 | |
30251f7a ZD |
1592 | /* We do not update postdominators, so free them unconditionally. */ |
1593 | free_dominance_info (CDI_POST_DOMINATORS); | |
6de9cd9a | 1594 | |
9da4058c JL |
1595 | /* If we removed paths in the CFG, then we need to update |
1596 | dominators as well. I haven't investigated the possibility | |
1597 | of incrementally updating dominators. */ | |
1598 | if (cfg_altered) | |
1599 | free_dominance_info (CDI_DOMINATORS); | |
1600 | ||
01902653 RG |
1601 | statistics_counter_event (cfun, "Statements deleted", stats.removed); |
1602 | statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis); | |
1603 | ||
6de9cd9a | 1604 | /* Debugging dumps. */ |
01902653 | 1605 | if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS))) |
88a40e67 | 1606 | print_stats (); |
6de9cd9a DN |
1607 | |
1608 | tree_dce_done (aggressive); | |
960076d9 AP |
1609 | |
1610 | free_edge_list (el); | |
7665f023 JH |
1611 | |
1612 | if (something_changed) | |
b8698a0f | 1613 | return (TODO_update_ssa | TODO_cleanup_cfg | TODO_ggc_collect |
7665f023 JH |
1614 | | TODO_remove_unused_locals); |
1615 | else | |
1616 | return 0; | |
6de9cd9a DN |
1617 | } |
1618 | ||
1619 | /* Pass entry points. */ | |
c2924966 | 1620 | static unsigned int |
6de9cd9a DN |
1621 | tree_ssa_dce (void) |
1622 | { | |
7665f023 | 1623 | return perform_tree_ssa_dce (/*aggressive=*/false); |
6de9cd9a DN |
1624 | } |
1625 | ||
c2924966 | 1626 | static unsigned int |
49896738 RH |
1627 | tree_ssa_dce_loop (void) |
1628 | { | |
7665f023 JH |
1629 | unsigned int todo; |
1630 | todo = perform_tree_ssa_dce (/*aggressive=*/false); | |
1631 | if (todo) | |
1632 | { | |
1633 | free_numbers_of_iterations_estimates (); | |
1634 | scev_reset (); | |
1635 | } | |
1636 | return todo; | |
49896738 RH |
1637 | } |
1638 | ||
c2924966 | 1639 | static unsigned int |
6de9cd9a DN |
1640 | tree_ssa_cd_dce (void) |
1641 | { | |
7665f023 | 1642 | return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2); |
6de9cd9a DN |
1643 | } |
1644 | ||
1645 | static bool | |
1646 | gate_dce (void) | |
1647 | { | |
1648 | return flag_tree_dce != 0; | |
1649 | } | |
1650 | ||
8ddbbcae | 1651 | struct gimple_opt_pass pass_dce = |
6de9cd9a | 1652 | { |
8ddbbcae JH |
1653 | { |
1654 | GIMPLE_PASS, | |
6de9cd9a DN |
1655 | "dce", /* name */ |
1656 | gate_dce, /* gate */ | |
1657 | tree_ssa_dce, /* execute */ | |
1658 | NULL, /* sub */ | |
1659 | NULL, /* next */ | |
1660 | 0, /* static_pass_number */ | |
1661 | TV_TREE_DCE, /* tv_id */ | |
7faade0f | 1662 | PROP_cfg | PROP_ssa, /* properties_required */ |
6de9cd9a DN |
1663 | 0, /* properties_provided */ |
1664 | 0, /* properties_destroyed */ | |
1665 | 0, /* todo_flags_start */ | |
22c5fa5f | 1666 | TODO_verify_ssa /* todo_flags_finish */ |
8ddbbcae | 1667 | } |
6de9cd9a DN |
1668 | }; |
1669 | ||
8ddbbcae | 1670 | struct gimple_opt_pass pass_dce_loop = |
49896738 | 1671 | { |
8ddbbcae JH |
1672 | { |
1673 | GIMPLE_PASS, | |
49896738 RH |
1674 | "dceloop", /* name */ |
1675 | gate_dce, /* gate */ | |
1676 | tree_ssa_dce_loop, /* execute */ | |
1677 | NULL, /* sub */ | |
1678 | NULL, /* next */ | |
1679 | 0, /* static_pass_number */ | |
1680 | TV_TREE_DCE, /* tv_id */ | |
7faade0f | 1681 | PROP_cfg | PROP_ssa, /* properties_required */ |
49896738 RH |
1682 | 0, /* properties_provided */ |
1683 | 0, /* properties_destroyed */ | |
1684 | 0, /* todo_flags_start */ | |
22c5fa5f | 1685 | TODO_verify_ssa /* todo_flags_finish */ |
8ddbbcae | 1686 | } |
49896738 RH |
1687 | }; |
1688 | ||
8ddbbcae | 1689 | struct gimple_opt_pass pass_cd_dce = |
6de9cd9a | 1690 | { |
8ddbbcae JH |
1691 | { |
1692 | GIMPLE_PASS, | |
6de9cd9a DN |
1693 | "cddce", /* name */ |
1694 | gate_dce, /* gate */ | |
1695 | tree_ssa_cd_dce, /* execute */ | |
1696 | NULL, /* sub */ | |
1697 | NULL, /* next */ | |
1698 | 0, /* static_pass_number */ | |
1699 | TV_TREE_CD_DCE, /* tv_id */ | |
7faade0f | 1700 | PROP_cfg | PROP_ssa, /* properties_required */ |
6de9cd9a DN |
1701 | 0, /* properties_provided */ |
1702 | 0, /* properties_destroyed */ | |
1703 | 0, /* todo_flags_start */ | |
22c5fa5f | 1704 | TODO_verify_ssa |
8ddbbcae JH |
1705 | | TODO_verify_flow /* todo_flags_finish */ |
1706 | } | |
6de9cd9a | 1707 | }; |