]>
Commit | Line | Data |
---|---|---|
6de9cd9a | 1 | /* Dead code elimination pass for the GNU compiler. |
85ec4feb | 2 | Copyright (C) 2002-2018 Free Software Foundation, Inc. |
6de9cd9a DN |
3 | Contributed by Ben Elliston <bje@redhat.com> |
4 | and Andrew MacLeod <amacleod@redhat.com> | |
5 | Adapted to use control dependence by Steven Bosscher, SUSE Labs. | |
b8698a0f | 6 | |
6de9cd9a | 7 | This file is part of GCC. |
b8698a0f | 8 | |
6de9cd9a DN |
9 | GCC is free software; you can redistribute it and/or modify it |
10 | under the terms of the GNU General Public License as published by the | |
9dcd6f09 | 11 | Free Software Foundation; either version 3, or (at your option) any |
6de9cd9a | 12 | later version. |
b8698a0f | 13 | |
6de9cd9a DN |
14 | GCC is distributed in the hope that it will be useful, but WITHOUT |
15 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
16 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
17 | for more details. | |
b8698a0f | 18 | |
6de9cd9a | 19 | You should have received a copy of the GNU General Public License |
9dcd6f09 NC |
20 | along with GCC; see the file COPYING3. If not see |
21 | <http://www.gnu.org/licenses/>. */ | |
6de9cd9a DN |
22 | |
23 | /* Dead code elimination. | |
24 | ||
25 | References: | |
26 | ||
27 | Building an Optimizing Compiler, | |
28 | Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9. | |
29 | ||
30 | Advanced Compiler Design and Implementation, | |
31 | Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10. | |
32 | ||
33 | Dead-code elimination is the removal of statements which have no | |
34 | impact on the program's output. "Dead statements" have no impact | |
35 | on the program's output, while "necessary statements" may have | |
36 | impact on the output. | |
37 | ||
38 | The algorithm consists of three phases: | |
39 | 1. Marking as necessary all statements known to be necessary, | |
40 | e.g. most function calls, writing a value to memory, etc; | |
41 | 2. Propagating necessary statements, e.g., the statements | |
42 | giving values to operands in necessary statements; and | |
43 | 3. Removing dead statements. */ | |
44 | ||
45 | #include "config.h" | |
46 | #include "system.h" | |
47 | #include "coretypes.h" | |
c7131fb2 | 48 | #include "backend.h" |
957060b5 | 49 | #include "rtl.h" |
6de9cd9a | 50 | #include "tree.h" |
c7131fb2 | 51 | #include "gimple.h" |
957060b5 AM |
52 | #include "cfghooks.h" |
53 | #include "tree-pass.h" | |
c7131fb2 | 54 | #include "ssa.h" |
957060b5 | 55 | #include "gimple-pretty-print.h" |
40e23961 | 56 | #include "fold-const.h" |
d8a2d370 | 57 | #include "calls.h" |
60393bbc | 58 | #include "cfganal.h" |
2fb9a547 | 59 | #include "tree-eh.h" |
45b0be94 | 60 | #include "gimplify.h" |
5be5c238 | 61 | #include "gimple-iterator.h" |
442b4905 | 62 | #include "tree-cfg.h" |
e28030cf | 63 | #include "tree-ssa-loop-niter.h" |
442b4905 AM |
64 | #include "tree-into-ssa.h" |
65 | #include "tree-dfa.h" | |
49896738 | 66 | #include "cfgloop.h" |
a4176272 | 67 | #include "tree-scalar-evolution.h" |
1304953e JJ |
68 | #include "tree-ssa-propagate.h" |
69 | #include "gimple-fold.h" | |
726a989a | 70 | |
6de9cd9a DN |
71 | static struct stmt_stats |
72 | { | |
73 | int total; | |
74 | int total_phis; | |
75 | int removed; | |
76 | int removed_phis; | |
77 | } stats; | |
78 | ||
726a989a RB |
79 | #define STMT_NECESSARY GF_PLF_1 |
80 | ||
355fe088 | 81 | static vec<gimple *> worklist; |
6de9cd9a DN |
82 | |
83 | /* Vector indicating an SSA name has already been processed and marked | |
84 | as necessary. */ | |
85 | static sbitmap processed; | |
86 | ||
ad06ee51 EB |
87 | /* Vector indicating that the last statement of a basic block has already |
88 | been marked as necessary. */ | |
6de9cd9a DN |
89 | static sbitmap last_stmt_necessary; |
90 | ||
1fc41282 JH |
91 | /* Vector indicating that BB contains statements that are live. */ |
92 | static sbitmap bb_contains_live_stmts; | |
93 | ||
6de9cd9a DN |
94 | /* Before we can determine whether a control branch is dead, we need to |
95 | compute which blocks are control dependent on which edges. | |
96 | ||
97 | We expect each block to be control dependent on very few edges so we | |
98 | use a bitmap for each block recording its edges. An array holds the | |
99 | bitmap. The Ith bit in the bitmap is set if that block is dependent | |
100 | on the Ith edge. */ | |
c8e9d8c3 | 101 | static control_dependences *cd; |
6de9cd9a | 102 | |
a28fee03 SB |
103 | /* Vector indicating that a basic block has already had all the edges |
104 | processed that it is control dependent on. */ | |
8c80c4aa | 105 | static sbitmap visited_control_parents; |
a28fee03 | 106 | |
9da4058c JL |
107 | /* TRUE if this pass alters the CFG (by removing control statements). |
108 | FALSE otherwise. | |
109 | ||
110 | If this pass alters the CFG, then it will arrange for the dominators | |
111 | to be recomputed. */ | |
112 | static bool cfg_altered; | |
113 | ||
e4dbb0d4 JH |
114 | /* When non-NULL holds map from basic block index into the postorder. */ |
115 | static int *bb_postorder; | |
116 | ||
9b3b55a1 | 117 | |
6de9cd9a DN |
118 | /* If STMT is not already marked necessary, mark it, and add it to the |
119 | worklist if ADD_TO_WORKLIST is true. */ | |
ad06ee51 | 120 | |
6de9cd9a | 121 | static inline void |
355fe088 | 122 | mark_stmt_necessary (gimple *stmt, bool add_to_worklist) |
6de9cd9a | 123 | { |
1e128c5f | 124 | gcc_assert (stmt); |
6de9cd9a | 125 | |
726a989a | 126 | if (gimple_plf (stmt, STMT_NECESSARY)) |
6de9cd9a DN |
127 | return; |
128 | ||
129 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
130 | { | |
131 | fprintf (dump_file, "Marking useful stmt: "); | |
726a989a | 132 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); |
6de9cd9a DN |
133 | fprintf (dump_file, "\n"); |
134 | } | |
135 | ||
726a989a | 136 | gimple_set_plf (stmt, STMT_NECESSARY, true); |
6de9cd9a | 137 | if (add_to_worklist) |
9771b263 | 138 | worklist.safe_push (stmt); |
e4dbb0d4 | 139 | if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt)) |
d7c028c0 | 140 | bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index); |
6de9cd9a DN |
141 | } |
142 | ||
38635499 DN |
143 | |
144 | /* Mark the statement defining operand OP as necessary. */ | |
6de9cd9a DN |
145 | |
146 | static inline void | |
38635499 | 147 | mark_operand_necessary (tree op) |
6de9cd9a | 148 | { |
355fe088 | 149 | gimple *stmt; |
6de9cd9a DN |
150 | int ver; |
151 | ||
1e128c5f | 152 | gcc_assert (op); |
6de9cd9a DN |
153 | |
154 | ver = SSA_NAME_VERSION (op); | |
d7c028c0 | 155 | if (bitmap_bit_p (processed, ver)) |
5006671f RG |
156 | { |
157 | stmt = SSA_NAME_DEF_STMT (op); | |
158 | gcc_assert (gimple_nop_p (stmt) | |
159 | || gimple_plf (stmt, STMT_NECESSARY)); | |
160 | return; | |
161 | } | |
d7c028c0 | 162 | bitmap_set_bit (processed, ver); |
6de9cd9a DN |
163 | |
164 | stmt = SSA_NAME_DEF_STMT (op); | |
1e128c5f | 165 | gcc_assert (stmt); |
6de9cd9a | 166 | |
726a989a | 167 | if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt)) |
6de9cd9a DN |
168 | return; |
169 | ||
5006671f RG |
170 | if (dump_file && (dump_flags & TDF_DETAILS)) |
171 | { | |
172 | fprintf (dump_file, "marking necessary through "); | |
ef6cb4c7 | 173 | print_generic_expr (dump_file, op); |
5006671f | 174 | fprintf (dump_file, " stmt "); |
ef6cb4c7 | 175 | print_gimple_stmt (dump_file, stmt, 0); |
5006671f RG |
176 | } |
177 | ||
726a989a | 178 | gimple_set_plf (stmt, STMT_NECESSARY, true); |
1fc41282 | 179 | if (bb_contains_live_stmts) |
d7c028c0 | 180 | bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index); |
9771b263 | 181 | worklist.safe_push (stmt); |
6de9cd9a | 182 | } |
9b3b55a1 | 183 | |
6de9cd9a | 184 | |
adb35797 | 185 | /* Mark STMT as necessary if it obviously is. Add it to the worklist if |
6de9cd9a DN |
186 | it can make other statements necessary. |
187 | ||
188 | If AGGRESSIVE is false, control statements are conservatively marked as | |
189 | necessary. */ | |
190 | ||
191 | static void | |
355fe088 | 192 | mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive) |
6de9cd9a | 193 | { |
75b80166 | 194 | /* With non-call exceptions, we have to assume that all statements could |
2da02156 EB |
195 | throw. If a statement could throw, it can be deemed necessary. */ |
196 | if (cfun->can_throw_non_call_exceptions | |
197 | && !cfun->can_delete_dead_exceptions | |
198 | && stmt_could_throw_p (stmt)) | |
75b80166 SB |
199 | { |
200 | mark_stmt_necessary (stmt, true); | |
201 | return; | |
202 | } | |
203 | ||
726a989a RB |
204 | /* Statements that are implicitly live. Most function calls, asm |
205 | and return statements are required. Labels and GIMPLE_BIND nodes | |
206 | are kept because they are control flow, and we have no way of | |
207 | knowing whether they can be removed. DCE can eliminate all the | |
208 | other statements in a block, and CFG can then remove the block | |
209 | and labels. */ | |
210 | switch (gimple_code (stmt)) | |
6de9cd9a | 211 | { |
726a989a RB |
212 | case GIMPLE_PREDICT: |
213 | case GIMPLE_LABEL: | |
6de9cd9a DN |
214 | mark_stmt_necessary (stmt, false); |
215 | return; | |
216 | ||
726a989a RB |
217 | case GIMPLE_ASM: |
218 | case GIMPLE_RESX: | |
219 | case GIMPLE_RETURN: | |
6de9cd9a DN |
220 | mark_stmt_necessary (stmt, true); |
221 | return; | |
222 | ||
726a989a | 223 | case GIMPLE_CALL: |
c22c0db2 RG |
224 | { |
225 | tree callee = gimple_call_fndecl (stmt); | |
226 | if (callee != NULL_TREE | |
3d78e008 | 227 | && fndecl_built_in_p (callee, BUILT_IN_NORMAL)) |
c22c0db2 RG |
228 | switch (DECL_FUNCTION_CODE (callee)) |
229 | { | |
230 | case BUILT_IN_MALLOC: | |
22869a37 | 231 | case BUILT_IN_ALIGNED_ALLOC: |
c22c0db2 | 232 | case BUILT_IN_CALLOC: |
9e878cf1 | 233 | CASE_BUILT_IN_ALLOCA: |
fe75f732 PK |
234 | case BUILT_IN_STRDUP: |
235 | case BUILT_IN_STRNDUP: | |
c22c0db2 | 236 | return; |
996e1de5 RG |
237 | |
238 | default:; | |
c22c0db2 RG |
239 | } |
240 | /* Most, but not all function calls are required. Function calls that | |
241 | produce no result and have no side effects (i.e. const pure | |
242 | functions) are unnecessary. */ | |
243 | if (gimple_has_side_effects (stmt)) | |
244 | { | |
245 | mark_stmt_necessary (stmt, true); | |
246 | return; | |
247 | } | |
248 | if (!gimple_call_lhs (stmt)) | |
6de9cd9a | 249 | return; |
c22c0db2 RG |
250 | break; |
251 | } | |
6de9cd9a | 252 | |
b5b8b0ac | 253 | case GIMPLE_DEBUG: |
0ca5af51 AO |
254 | /* Debug temps without a value are not useful. ??? If we could |
255 | easily locate the debug temp bind stmt for a use thereof, | |
256 | would could refrain from marking all debug temps here, and | |
257 | mark them only if they're used. */ | |
65f4b875 AO |
258 | if (gimple_debug_nonbind_marker_p (stmt) |
259 | || !gimple_debug_bind_p (stmt) | |
ddb555ed | 260 | || gimple_debug_bind_has_value_p (stmt) |
0ca5af51 AO |
261 | || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL) |
262 | mark_stmt_necessary (stmt, false); | |
b5b8b0ac AO |
263 | return; |
264 | ||
726a989a | 265 | case GIMPLE_GOTO: |
7f604986 KH |
266 | gcc_assert (!simple_goto_p (stmt)); |
267 | mark_stmt_necessary (stmt, true); | |
6de9cd9a DN |
268 | return; |
269 | ||
726a989a RB |
270 | case GIMPLE_COND: |
271 | gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2); | |
6de9cd9a DN |
272 | /* Fall through. */ |
273 | ||
726a989a | 274 | case GIMPLE_SWITCH: |
6de9cd9a DN |
275 | if (! aggressive) |
276 | mark_stmt_necessary (stmt, true); | |
277 | break; | |
278 | ||
47598145 | 279 | case GIMPLE_ASSIGN: |
12822768 | 280 | if (gimple_clobber_p (stmt)) |
47598145 MM |
281 | return; |
282 | break; | |
283 | ||
6de9cd9a DN |
284 | default: |
285 | break; | |
286 | } | |
287 | ||
c597ef4e DN |
288 | /* If the statement has volatile operands, it needs to be preserved. |
289 | Same for statements that can alter control flow in unpredictable | |
290 | ways. */ | |
726a989a | 291 | if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt)) |
6de9cd9a DN |
292 | { |
293 | mark_stmt_necessary (stmt, true); | |
294 | return; | |
295 | } | |
296 | ||
209be553 | 297 | if (stmt_may_clobber_global_p (stmt)) |
a32b97a2 | 298 | { |
fa555252 DB |
299 | mark_stmt_necessary (stmt, true); |
300 | return; | |
6de9cd9a DN |
301 | } |
302 | ||
303 | return; | |
304 | } | |
9b3b55a1 DN |
305 | |
306 | ||
ad06ee51 EB |
307 | /* Mark the last statement of BB as necessary. */ |
308 | ||
309 | static void | |
310 | mark_last_stmt_necessary (basic_block bb) | |
311 | { | |
355fe088 | 312 | gimple *stmt = last_stmt (bb); |
ad06ee51 | 313 | |
d7c028c0 LC |
314 | bitmap_set_bit (last_stmt_necessary, bb->index); |
315 | bitmap_set_bit (bb_contains_live_stmts, bb->index); | |
ad06ee51 EB |
316 | |
317 | /* We actually mark the statement only if it is a control statement. */ | |
318 | if (stmt && is_ctrl_stmt (stmt)) | |
319 | mark_stmt_necessary (stmt, true); | |
320 | } | |
321 | ||
322 | ||
323 | /* Mark control dependent edges of BB as necessary. We have to do this only | |
324 | once for each basic block so we set the appropriate bit after we're done. | |
325 | ||
326 | When IGNORE_SELF is true, ignore BB in the list of control dependences. */ | |
b1a0b3b4 | 327 | |
9b3b55a1 | 328 | static void |
c8e9d8c3 | 329 | mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self) |
9b3b55a1 DN |
330 | { |
331 | bitmap_iterator bi; | |
332 | unsigned edge_number; | |
b1a0b3b4 | 333 | bool skipped = false; |
9b3b55a1 | 334 | |
fefa31b5 | 335 | gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)); |
9b3b55a1 | 336 | |
fefa31b5 | 337 | if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)) |
9b3b55a1 DN |
338 | return; |
339 | ||
c8e9d8c3 RB |
340 | EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index), |
341 | 0, edge_number, bi) | |
9b3b55a1 | 342 | { |
30fd2977 | 343 | basic_block cd_bb = cd->get_edge_src (edge_number); |
9b3b55a1 | 344 | |
b1a0b3b4 JH |
345 | if (ignore_self && cd_bb == bb) |
346 | { | |
347 | skipped = true; | |
348 | continue; | |
349 | } | |
350 | ||
d7c028c0 | 351 | if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index)) |
ad06ee51 | 352 | mark_last_stmt_necessary (cd_bb); |
9b3b55a1 | 353 | } |
ad06ee51 | 354 | |
b1a0b3b4 | 355 | if (!skipped) |
d7c028c0 | 356 | bitmap_set_bit (visited_control_parents, bb->index); |
9b3b55a1 DN |
357 | } |
358 | ||
359 | ||
6de9cd9a DN |
360 | /* Find obviously necessary statements. These are things like most function |
361 | calls, and stores to file level variables. | |
362 | ||
363 | If EL is NULL, control statements are conservatively marked as | |
364 | necessary. Otherwise it contains the list of edges used by control | |
365 | dependence analysis. */ | |
366 | ||
367 | static void | |
c8e9d8c3 | 368 | find_obviously_necessary_stmts (bool aggressive) |
6de9cd9a DN |
369 | { |
370 | basic_block bb; | |
726a989a | 371 | gimple_stmt_iterator gsi; |
6de9cd9a | 372 | edge e; |
355fe088 | 373 | gimple *phi, *stmt; |
9e3920e9 | 374 | int flags; |
6de9cd9a | 375 | |
11cd3bed | 376 | FOR_EACH_BB_FN (bb, cfun) |
6de9cd9a | 377 | { |
9b3b55a1 | 378 | /* PHI nodes are never inherently necessary. */ |
726a989a RB |
379 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
380 | { | |
381 | phi = gsi_stmt (gsi); | |
382 | gimple_set_plf (phi, STMT_NECESSARY, false); | |
383 | } | |
6de9cd9a DN |
384 | |
385 | /* Check all statements in the block. */ | |
726a989a | 386 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
6de9cd9a | 387 | { |
726a989a RB |
388 | stmt = gsi_stmt (gsi); |
389 | gimple_set_plf (stmt, STMT_NECESSARY, false); | |
c8e9d8c3 | 390 | mark_stmt_if_obviously_necessary (stmt, aggressive); |
6de9cd9a | 391 | } |
6de9cd9a DN |
392 | } |
393 | ||
7351bcaa JH |
394 | /* Pure and const functions are finite and thus have no infinite loops in |
395 | them. */ | |
9e3920e9 JJ |
396 | flags = flags_from_decl_or_type (current_function_decl); |
397 | if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE)) | |
7351bcaa JH |
398 | return; |
399 | ||
400 | /* Prevent the empty possibly infinite loops from being removed. */ | |
c8e9d8c3 | 401 | if (aggressive) |
6de9cd9a | 402 | { |
7351bcaa | 403 | struct loop *loop; |
7351bcaa | 404 | if (mark_irreducible_loops ()) |
11cd3bed | 405 | FOR_EACH_BB_FN (bb, cfun) |
7351bcaa JH |
406 | { |
407 | edge_iterator ei; | |
408 | FOR_EACH_EDGE (e, ei, bb->succs) | |
409 | if ((e->flags & EDGE_DFS_BACK) | |
410 | && (e->flags & EDGE_IRREDUCIBLE_LOOP)) | |
411 | { | |
412 | if (dump_file) | |
413 | fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n", | |
414 | e->src->index, e->dest->index); | |
c8e9d8c3 | 415 | mark_control_dependent_edges_necessary (e->dest, false); |
7351bcaa JH |
416 | } |
417 | } | |
418 | ||
f0bd40b1 | 419 | FOR_EACH_LOOP (loop, 0) |
7351bcaa JH |
420 | if (!finite_loop_p (loop)) |
421 | { | |
422 | if (dump_file) | |
423 | fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num); | |
c8e9d8c3 | 424 | mark_control_dependent_edges_necessary (loop->latch, false); |
7351bcaa | 425 | } |
6de9cd9a DN |
426 | } |
427 | } | |
6de9cd9a | 428 | |
6de9cd9a | 429 | |
5006671f RG |
430 | /* Return true if REF is based on an aliased base, otherwise false. */ |
431 | ||
432 | static bool | |
433 | ref_may_be_aliased (tree ref) | |
434 | { | |
b3421a06 | 435 | gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR); |
5006671f RG |
436 | while (handled_component_p (ref)) |
437 | ref = TREE_OPERAND (ref, 0); | |
70f34814 RG |
438 | if (TREE_CODE (ref) == MEM_REF |
439 | && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR) | |
440 | ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0); | |
5006671f RG |
441 | return !(DECL_P (ref) |
442 | && !may_be_aliased (ref)); | |
443 | } | |
444 | ||
5006671f RG |
445 | static bitmap visited = NULL; |
446 | static unsigned int longest_chain = 0; | |
447 | static unsigned int total_chain = 0; | |
a61f9cc0 | 448 | static unsigned int nr_walks = 0; |
5006671f RG |
449 | static bool chain_ovfl = false; |
450 | ||
451 | /* Worker for the walker that marks reaching definitions of REF, | |
452 | which is based on a non-aliased decl, necessary. It returns | |
453 | true whenever the defining statement of the current VDEF is | |
454 | a kill for REF, as no dominating may-defs are necessary for REF | |
1415abc0 RG |
455 | anymore. DATA points to the basic-block that contains the |
456 | stmt that refers to REF. */ | |
5006671f RG |
457 | |
458 | static bool | |
1415abc0 | 459 | mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data) |
5006671f | 460 | { |
355fe088 | 461 | gimple *def_stmt = SSA_NAME_DEF_STMT (vdef); |
5006671f RG |
462 | |
463 | /* All stmts we visit are necessary. */ | |
d4abfcc5 RB |
464 | if (! gimple_clobber_p (def_stmt)) |
465 | mark_operand_necessary (vdef); | |
5006671f RG |
466 | |
467 | /* If the stmt lhs kills ref, then we can stop walking. */ | |
468 | if (gimple_has_lhs (def_stmt) | |
094f6ab3 RG |
469 | && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME |
470 | /* The assignment is not necessarily carried out if it can throw | |
471 | and we can catch it in the current function where we could inspect | |
472 | the previous value. | |
473 | ??? We only need to care about the RHS throwing. For aggregate | |
474 | assignments or similar calls and non-call exceptions the LHS | |
475 | might throw as well. */ | |
476 | && !stmt_can_throw_internal (def_stmt)) | |
5006671f RG |
477 | { |
478 | tree base, lhs = gimple_get_lhs (def_stmt); | |
588db50c | 479 | poly_int64 size, offset, max_size; |
ee45a32d | 480 | bool reverse; |
42bc61e0 | 481 | ao_ref_base (ref); |
ee45a32d EB |
482 | base |
483 | = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse); | |
5006671f RG |
484 | /* We can get MEM[symbol: sZ, index: D.8862_1] here, |
485 | so base == refd->base does not always hold. */ | |
42bc61e0 | 486 | if (base == ref->base) |
5006671f RG |
487 | { |
488 | /* For a must-alias check we need to be able to constrain | |
489 | the accesses properly. */ | |
588db50c | 490 | if (known_eq (size, max_size) |
b9c25734 RS |
491 | && known_subrange_p (ref->offset, ref->max_size, offset, size)) |
492 | return true; | |
5006671f | 493 | /* Or they need to be exactly the same. */ |
42bc61e0 | 494 | else if (ref->ref |
1415abc0 RG |
495 | /* Make sure there is no induction variable involved |
496 | in the references (gcc.c-torture/execute/pr42142.c). | |
497 | The simplest way is to check if the kill dominates | |
498 | the use. */ | |
301bbc16 RB |
499 | /* But when both are in the same block we cannot |
500 | easily tell whether we came from a backedge | |
501 | unless we decide to compute stmt UIDs | |
502 | (see PR58246). */ | |
503 | && (basic_block) data != gimple_bb (def_stmt) | |
1415abc0 RG |
504 | && dominated_by_p (CDI_DOMINATORS, (basic_block) data, |
505 | gimple_bb (def_stmt)) | |
42bc61e0 | 506 | && operand_equal_p (ref->ref, lhs, 0)) |
5006671f RG |
507 | return true; |
508 | } | |
509 | } | |
510 | ||
511 | /* Otherwise keep walking. */ | |
512 | return false; | |
513 | } | |
514 | ||
515 | static void | |
355fe088 | 516 | mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref) |
5006671f | 517 | { |
5006671f | 518 | unsigned int chain; |
42bc61e0 | 519 | ao_ref refd; |
5006671f | 520 | gcc_assert (!chain_ovfl); |
42bc61e0 RG |
521 | ao_ref_init (&refd, ref); |
522 | chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt), | |
5006671f | 523 | mark_aliased_reaching_defs_necessary_1, |
1415abc0 | 524 | gimple_bb (stmt), NULL); |
5006671f RG |
525 | if (chain > longest_chain) |
526 | longest_chain = chain; | |
527 | total_chain += chain; | |
a61f9cc0 | 528 | nr_walks++; |
5006671f RG |
529 | } |
530 | ||
531 | /* Worker for the walker that marks reaching definitions of REF, which | |
532 | is not based on a non-aliased decl. For simplicity we need to end | |
533 | up marking all may-defs necessary that are not based on a non-aliased | |
534 | decl. The only job of this walker is to skip may-defs based on | |
535 | a non-aliased decl. */ | |
536 | ||
537 | static bool | |
42bc61e0 RG |
538 | mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED, |
539 | tree vdef, void *data ATTRIBUTE_UNUSED) | |
5006671f | 540 | { |
355fe088 | 541 | gimple *def_stmt = SSA_NAME_DEF_STMT (vdef); |
5006671f RG |
542 | |
543 | /* We have to skip already visited (and thus necessary) statements | |
544 | to make the chaining work after we dropped back to simple mode. */ | |
545 | if (chain_ovfl | |
d7c028c0 | 546 | && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef))) |
5006671f RG |
547 | { |
548 | gcc_assert (gimple_nop_p (def_stmt) | |
549 | || gimple_plf (def_stmt, STMT_NECESSARY)); | |
550 | return false; | |
551 | } | |
552 | ||
553 | /* We want to skip stores to non-aliased variables. */ | |
554 | if (!chain_ovfl | |
555 | && gimple_assign_single_p (def_stmt)) | |
556 | { | |
557 | tree lhs = gimple_assign_lhs (def_stmt); | |
558 | if (!ref_may_be_aliased (lhs)) | |
559 | return false; | |
560 | } | |
561 | ||
996e1de5 RG |
562 | /* We want to skip statments that do not constitute stores but have |
563 | a virtual definition. */ | |
564 | if (is_gimple_call (def_stmt)) | |
565 | { | |
566 | tree callee = gimple_call_fndecl (def_stmt); | |
567 | if (callee != NULL_TREE | |
3d78e008 | 568 | && fndecl_built_in_p (callee, BUILT_IN_NORMAL)) |
996e1de5 RG |
569 | switch (DECL_FUNCTION_CODE (callee)) |
570 | { | |
571 | case BUILT_IN_MALLOC: | |
22869a37 | 572 | case BUILT_IN_ALIGNED_ALLOC: |
996e1de5 | 573 | case BUILT_IN_CALLOC: |
9e878cf1 | 574 | CASE_BUILT_IN_ALLOCA: |
996e1de5 RG |
575 | case BUILT_IN_FREE: |
576 | return false; | |
577 | ||
578 | default:; | |
579 | } | |
580 | } | |
581 | ||
d4abfcc5 RB |
582 | if (! gimple_clobber_p (def_stmt)) |
583 | mark_operand_necessary (vdef); | |
67635176 RG |
584 | |
585 | return false; | |
5006671f RG |
586 | } |
587 | ||
588 | static void | |
355fe088 | 589 | mark_all_reaching_defs_necessary (gimple *stmt) |
5006671f RG |
590 | { |
591 | walk_aliased_vdefs (NULL, gimple_vuse (stmt), | |
592 | mark_all_reaching_defs_necessary_1, NULL, &visited); | |
593 | } | |
594 | ||
7351bcaa JH |
595 | /* Return true for PHI nodes with one or identical arguments |
596 | can be removed. */ | |
597 | static bool | |
355fe088 | 598 | degenerate_phi_p (gimple *phi) |
7351bcaa JH |
599 | { |
600 | unsigned int i; | |
601 | tree op = gimple_phi_arg_def (phi, 0); | |
602 | for (i = 1; i < gimple_phi_num_args (phi); i++) | |
603 | if (gimple_phi_arg_def (phi, i) != op) | |
604 | return false; | |
605 | return true; | |
606 | } | |
607 | ||
9b3b55a1 DN |
608 | /* Propagate necessity using the operands of necessary statements. |
609 | Process the uses on each statement in the worklist, and add all | |
610 | feeding statements which contribute to the calculation of this | |
b8698a0f | 611 | value to the worklist. |
6de9cd9a DN |
612 | |
613 | In conservative mode, EL is NULL. */ | |
614 | ||
615 | static void | |
c8e9d8c3 | 616 | propagate_necessity (bool aggressive) |
6de9cd9a | 617 | { |
355fe088 | 618 | gimple *stmt; |
6de9cd9a DN |
619 | |
620 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
621 | fprintf (dump_file, "\nProcessing worklist:\n"); | |
622 | ||
9771b263 | 623 | while (worklist.length () > 0) |
6de9cd9a | 624 | { |
9b3b55a1 | 625 | /* Take STMT from worklist. */ |
9771b263 | 626 | stmt = worklist.pop (); |
6de9cd9a DN |
627 | |
628 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
629 | { | |
630 | fprintf (dump_file, "processing: "); | |
726a989a | 631 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); |
6de9cd9a DN |
632 | fprintf (dump_file, "\n"); |
633 | } | |
634 | ||
635 | if (aggressive) | |
636 | { | |
ad06ee51 EB |
637 | /* Mark the last statement of the basic blocks on which the block |
638 | containing STMT is control dependent, but only if we haven't | |
6de9cd9a | 639 | already done so. */ |
726a989a | 640 | basic_block bb = gimple_bb (stmt); |
fefa31b5 | 641 | if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun) |
d7c028c0 | 642 | && !bitmap_bit_p (visited_control_parents, bb->index)) |
c8e9d8c3 | 643 | mark_control_dependent_edges_necessary (bb, false); |
6de9cd9a DN |
644 | } |
645 | ||
5006671f RG |
646 | if (gimple_code (stmt) == GIMPLE_PHI |
647 | /* We do not process virtual PHI nodes nor do we track their | |
648 | necessity. */ | |
ea057359 | 649 | && !virtual_operand_p (gimple_phi_result (stmt))) |
6de9cd9a DN |
650 | { |
651 | /* PHI nodes are somewhat special in that each PHI alternative has | |
652 | data and control dependencies. All the statements feeding the | |
653 | PHI node's arguments are always necessary. In aggressive mode, | |
654 | we also consider the control dependent edges leading to the | |
655 | predecessor block associated with each PHI alternative as | |
656 | necessary. */ | |
538dd0b7 | 657 | gphi *phi = as_a <gphi *> (stmt); |
726a989a | 658 | size_t k; |
9b3b55a1 | 659 | |
726a989a | 660 | for (k = 0; k < gimple_phi_num_args (stmt); k++) |
6de9cd9a | 661 | { |
9b3b55a1 | 662 | tree arg = PHI_ARG_DEF (stmt, k); |
6de9cd9a | 663 | if (TREE_CODE (arg) == SSA_NAME) |
38635499 | 664 | mark_operand_necessary (arg); |
6de9cd9a DN |
665 | } |
666 | ||
b1a0b3b4 JH |
667 | /* For PHI operands it matters from where the control flow arrives |
668 | to the BB. Consider the following example: | |
669 | ||
670 | a=exp1; | |
671 | b=exp2; | |
672 | if (test) | |
673 | ; | |
674 | else | |
675 | ; | |
676 | c=PHI(a,b) | |
677 | ||
678 | We need to mark control dependence of the empty basic blocks, since they | |
679 | contains computation of PHI operands. | |
680 | ||
681 | Doing so is too restrictive in the case the predecestor block is in | |
682 | the loop. Consider: | |
683 | ||
684 | if (b) | |
685 | { | |
686 | int i; | |
687 | for (i = 0; i<1000; ++i) | |
688 | ; | |
689 | j = 0; | |
690 | } | |
691 | return j; | |
692 | ||
693 | There is PHI for J in the BB containing return statement. | |
694 | In this case the control dependence of predecestor block (that is | |
695 | within the empty loop) also contains the block determining number | |
696 | of iterations of the block that would prevent removing of empty | |
697 | loop in this case. | |
698 | ||
699 | This scenario can be avoided by splitting critical edges. | |
700 | To save the critical edge splitting pass we identify how the control | |
701 | dependence would look like if the edge was split. | |
702 | ||
703 | Consider the modified CFG created from current CFG by splitting | |
704 | edge B->C. In the postdominance tree of modified CFG, C' is | |
705 | always child of C. There are two cases how chlids of C' can look | |
706 | like: | |
707 | ||
708 | 1) C' is leaf | |
709 | ||
710 | In this case the only basic block C' is control dependent on is B. | |
711 | ||
712 | 2) C' has single child that is B | |
713 | ||
714 | In this case control dependence of C' is same as control | |
715 | dependence of B in original CFG except for block B itself. | |
716 | (since C' postdominate B in modified CFG) | |
717 | ||
718 | Now how to decide what case happens? There are two basic options: | |
719 | ||
720 | a) C postdominate B. Then C immediately postdominate B and | |
721 | case 2 happens iff there is no other way from B to C except | |
722 | the edge B->C. | |
723 | ||
724 | There is other way from B to C iff there is succesor of B that | |
725 | is not postdominated by B. Testing this condition is somewhat | |
726 | expensive, because we need to iterate all succesors of B. | |
727 | We are safe to assume that this does not happen: we will mark B | |
728 | as needed when processing the other path from B to C that is | |
729 | conrol dependent on B and marking control dependencies of B | |
730 | itself is harmless because they will be processed anyway after | |
731 | processing control statement in B. | |
732 | ||
733 | b) C does not postdominate B. Always case 1 happens since there is | |
734 | path from C to exit that does not go through B and thus also C'. */ | |
735 | ||
7351bcaa | 736 | if (aggressive && !degenerate_phi_p (stmt)) |
6de9cd9a | 737 | { |
726a989a | 738 | for (k = 0; k < gimple_phi_num_args (stmt); k++) |
6de9cd9a | 739 | { |
538dd0b7 | 740 | basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src; |
b1a0b3b4 JH |
741 | |
742 | if (gimple_bb (stmt) | |
743 | != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb)) | |
6de9cd9a | 744 | { |
d7c028c0 | 745 | if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index)) |
ad06ee51 | 746 | mark_last_stmt_necessary (arg_bb); |
6de9cd9a | 747 | } |
fefa31b5 | 748 | else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun) |
d7c028c0 | 749 | && !bitmap_bit_p (visited_control_parents, |
ad06ee51 | 750 | arg_bb->index)) |
c8e9d8c3 | 751 | mark_control_dependent_edges_necessary (arg_bb, true); |
6de9cd9a DN |
752 | } |
753 | } | |
754 | } | |
755 | else | |
756 | { | |
757 | /* Propagate through the operands. Examine all the USE, VUSE and | |
b8698a0f | 758 | VDEF operands in this statement. Mark all the statements |
5006671f | 759 | which feed this statement's uses as necessary. */ |
38635499 DN |
760 | ssa_op_iter iter; |
761 | tree use; | |
4c124b4c | 762 | |
996e1de5 RG |
763 | /* If this is a call to free which is directly fed by an |
764 | allocation function do not mark that necessary through | |
765 | processing the argument. */ | |
766 | if (gimple_call_builtin_p (stmt, BUILT_IN_FREE)) | |
767 | { | |
768 | tree ptr = gimple_call_arg (stmt, 0); | |
355fe088 | 769 | gimple *def_stmt; |
996e1de5 RG |
770 | tree def_callee; |
771 | /* If the pointer we free is defined by an allocation | |
772 | function do not add the call to the worklist. */ | |
773 | if (TREE_CODE (ptr) == SSA_NAME | |
774 | && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr)) | |
775 | && (def_callee = gimple_call_fndecl (def_stmt)) | |
776 | && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL | |
22869a37 RB |
777 | && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC |
778 | || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC | |
c02d2d0a | 779 | || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC)) |
31db0fe0 | 780 | continue; |
996e1de5 RG |
781 | } |
782 | ||
5006671f | 783 | FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE) |
38635499 | 784 | mark_operand_necessary (use); |
5006671f RG |
785 | |
786 | use = gimple_vuse (stmt); | |
787 | if (!use) | |
788 | continue; | |
789 | ||
790 | /* If we dropped to simple mode make all immediately | |
791 | reachable definitions necessary. */ | |
792 | if (chain_ovfl) | |
793 | { | |
794 | mark_all_reaching_defs_necessary (stmt); | |
795 | continue; | |
796 | } | |
797 | ||
798 | /* For statements that may load from memory (have a VUSE) we | |
799 | have to mark all reaching (may-)definitions as necessary. | |
800 | We partition this task into two cases: | |
801 | 1) explicit loads based on decls that are not aliased | |
802 | 2) implicit loads (like calls) and explicit loads not | |
803 | based on decls that are not aliased (like indirect | |
804 | references or loads from globals) | |
805 | For 1) we mark all reaching may-defs as necessary, stopping | |
806 | at dominating kills. For 2) we want to mark all dominating | |
807 | references necessary, but non-aliased ones which we handle | |
67635176 RG |
808 | in 1). By keeping a global visited bitmap for references |
809 | we walk for 2) we avoid quadratic behavior for those. */ | |
5006671f RG |
810 | |
811 | if (is_gimple_call (stmt)) | |
812 | { | |
14c41b9b | 813 | tree callee = gimple_call_fndecl (stmt); |
5006671f RG |
814 | unsigned i; |
815 | ||
14c41b9b RG |
816 | /* Calls to functions that are merely acting as barriers |
817 | or that only store to memory do not make any previous | |
818 | stores necessary. */ | |
819 | if (callee != NULL_TREE | |
820 | && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL | |
821 | && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET | |
36dc1a88 | 822 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK |
14c41b9b | 823 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC |
22869a37 | 824 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC |
36dc1a88 | 825 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC |
ab4472fa | 826 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE |
df2f6100 | 827 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END |
9e878cf1 | 828 | || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee)) |
ab4472fa | 829 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE |
45d439ac JJ |
830 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE |
831 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED)) | |
14c41b9b RG |
832 | continue; |
833 | ||
5006671f | 834 | /* Calls implicitly load from memory, their arguments |
67635176 | 835 | in addition may explicitly perform memory loads. */ |
5006671f RG |
836 | mark_all_reaching_defs_necessary (stmt); |
837 | for (i = 0; i < gimple_call_num_args (stmt); ++i) | |
838 | { | |
839 | tree arg = gimple_call_arg (stmt, i); | |
840 | if (TREE_CODE (arg) == SSA_NAME | |
841 | || is_gimple_min_invariant (arg)) | |
842 | continue; | |
b3421a06 TV |
843 | if (TREE_CODE (arg) == WITH_SIZE_EXPR) |
844 | arg = TREE_OPERAND (arg, 0); | |
5006671f RG |
845 | if (!ref_may_be_aliased (arg)) |
846 | mark_aliased_reaching_defs_necessary (stmt, arg); | |
847 | } | |
848 | } | |
849 | else if (gimple_assign_single_p (stmt)) | |
850 | { | |
67635176 | 851 | tree rhs; |
5006671f RG |
852 | /* If this is a load mark things necessary. */ |
853 | rhs = gimple_assign_rhs1 (stmt); | |
854 | if (TREE_CODE (rhs) != SSA_NAME | |
47598145 MM |
855 | && !is_gimple_min_invariant (rhs) |
856 | && TREE_CODE (rhs) != CONSTRUCTOR) | |
5006671f RG |
857 | { |
858 | if (!ref_may_be_aliased (rhs)) | |
859 | mark_aliased_reaching_defs_necessary (stmt, rhs); | |
860 | else | |
47598145 | 861 | mark_all_reaching_defs_necessary (stmt); |
5006671f | 862 | } |
5006671f | 863 | } |
538dd0b7 | 864 | else if (greturn *return_stmt = dyn_cast <greturn *> (stmt)) |
5006671f | 865 | { |
538dd0b7 | 866 | tree rhs = gimple_return_retval (return_stmt); |
5006671f | 867 | /* A return statement may perform a load. */ |
e106efc7 RG |
868 | if (rhs |
869 | && TREE_CODE (rhs) != SSA_NAME | |
47598145 MM |
870 | && !is_gimple_min_invariant (rhs) |
871 | && TREE_CODE (rhs) != CONSTRUCTOR) | |
5006671f RG |
872 | { |
873 | if (!ref_may_be_aliased (rhs)) | |
874 | mark_aliased_reaching_defs_necessary (stmt, rhs); | |
875 | else | |
876 | mark_all_reaching_defs_necessary (stmt); | |
877 | } | |
878 | } | |
538dd0b7 | 879 | else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt)) |
5006671f RG |
880 | { |
881 | unsigned i; | |
882 | mark_all_reaching_defs_necessary (stmt); | |
883 | /* Inputs may perform loads. */ | |
538dd0b7 | 884 | for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) |
5006671f | 885 | { |
538dd0b7 | 886 | tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i)); |
5006671f RG |
887 | if (TREE_CODE (op) != SSA_NAME |
888 | && !is_gimple_min_invariant (op) | |
47598145 | 889 | && TREE_CODE (op) != CONSTRUCTOR |
5006671f RG |
890 | && !ref_may_be_aliased (op)) |
891 | mark_aliased_reaching_defs_necessary (stmt, op); | |
892 | } | |
893 | } | |
1d4fb493 RH |
894 | else if (gimple_code (stmt) == GIMPLE_TRANSACTION) |
895 | { | |
896 | /* The beginning of a transaction is a memory barrier. */ | |
897 | /* ??? If we were really cool, we'd only be a barrier | |
898 | for the memories touched within the transaction. */ | |
899 | mark_all_reaching_defs_necessary (stmt); | |
900 | } | |
5006671f RG |
901 | else |
902 | gcc_unreachable (); | |
903 | ||
904 | /* If we over-used our alias oracle budget drop to simple | |
a61f9cc0 RG |
905 | mode. The cost metric allows quadratic behavior |
906 | (number of uses times number of may-defs queries) up to | |
907 | a constant maximal number of queries and after that falls back to | |
5006671f | 908 | super-linear complexity. */ |
a61f9cc0 RG |
909 | if (/* Constant but quadratic for small functions. */ |
910 | total_chain > 128 * 128 | |
911 | /* Linear in the number of may-defs. */ | |
912 | && total_chain > 32 * longest_chain | |
913 | /* Linear in the number of uses. */ | |
914 | && total_chain > nr_walks * 32) | |
5006671f RG |
915 | { |
916 | chain_ovfl = true; | |
917 | if (visited) | |
918 | bitmap_clear (visited); | |
919 | } | |
6de9cd9a DN |
920 | } |
921 | } | |
922 | } | |
52328bf6 | 923 | |
6de9cd9a DN |
924 | /* Remove dead PHI nodes from block BB. */ |
925 | ||
7665f023 | 926 | static bool |
6de9cd9a DN |
927 | remove_dead_phis (basic_block bb) |
928 | { | |
7665f023 | 929 | bool something_changed = false; |
538dd0b7 DM |
930 | gphi *phi; |
931 | gphi_iterator gsi; | |
6de9cd9a | 932 | |
355a7673 | 933 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);) |
6de9cd9a DN |
934 | { |
935 | stats.total_phis++; | |
538dd0b7 | 936 | phi = gsi.phi (); |
6de9cd9a | 937 | |
5006671f RG |
938 | /* We do not track necessity of virtual PHI nodes. Instead do |
939 | very simple dead PHI removal here. */ | |
ea057359 | 940 | if (virtual_operand_p (gimple_phi_result (phi))) |
5006671f | 941 | { |
5006671f RG |
942 | /* Virtual PHI nodes with one or identical arguments |
943 | can be removed. */ | |
7351bcaa | 944 | if (degenerate_phi_p (phi)) |
5006671f RG |
945 | { |
946 | tree vdef = gimple_phi_result (phi); | |
7351bcaa JH |
947 | tree vuse = gimple_phi_arg_def (phi, 0); |
948 | ||
5006671f RG |
949 | use_operand_p use_p; |
950 | imm_use_iterator iter; | |
355fe088 | 951 | gimple *use_stmt; |
5006671f RG |
952 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef) |
953 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
954 | SET_USE (use_p, vuse); | |
eab09a51 JH |
955 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef) |
956 | && TREE_CODE (vuse) == SSA_NAME) | |
5006671f RG |
957 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1; |
958 | } | |
959 | else | |
960 | gimple_set_plf (phi, STMT_NECESSARY, true); | |
961 | } | |
962 | ||
726a989a | 963 | if (!gimple_plf (phi, STMT_NECESSARY)) |
6de9cd9a | 964 | { |
7665f023 | 965 | something_changed = true; |
6de9cd9a DN |
966 | if (dump_file && (dump_flags & TDF_DETAILS)) |
967 | { | |
968 | fprintf (dump_file, "Deleting : "); | |
726a989a | 969 | print_gimple_stmt (dump_file, phi, 0, TDF_SLIM); |
6de9cd9a DN |
970 | fprintf (dump_file, "\n"); |
971 | } | |
972 | ||
726a989a | 973 | remove_phi_node (&gsi, true); |
6de9cd9a | 974 | stats.removed_phis++; |
5006671f | 975 | continue; |
6de9cd9a | 976 | } |
5006671f RG |
977 | |
978 | gsi_next (&gsi); | |
6de9cd9a | 979 | } |
7665f023 | 980 | return something_changed; |
6de9cd9a | 981 | } |
9b3b55a1 DN |
982 | |
983 | ||
206048bd | 984 | /* Remove dead statement pointed to by iterator I. Receives the basic block BB |
6de9cd9a DN |
985 | containing I so that we don't have to look it up. */ |
986 | ||
987 | static void | |
726a989a | 988 | remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb) |
6de9cd9a | 989 | { |
355fe088 | 990 | gimple *stmt = gsi_stmt (*i); |
6de9cd9a DN |
991 | |
992 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
993 | { | |
994 | fprintf (dump_file, "Deleting : "); | |
726a989a | 995 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); |
6de9cd9a DN |
996 | fprintf (dump_file, "\n"); |
997 | } | |
998 | ||
999 | stats.removed++; | |
1000 | ||
1001 | /* If we have determined that a conditional branch statement contributes | |
e4dbb0d4 JH |
1002 | nothing to the program, then we not only remove it, but we need to update |
1003 | the CFG. We can chose any of edges out of BB as long as we are sure to not | |
1004 | close infinite loops. This is done by always choosing the edge closer to | |
1005 | exit in inverted_post_order_compute order. */ | |
726a989a | 1006 | if (is_ctrl_stmt (stmt)) |
6de9cd9a | 1007 | { |
1fc41282 | 1008 | edge_iterator ei; |
e4dbb0d4 JH |
1009 | edge e = NULL, e2; |
1010 | ||
1011 | /* See if there is only one non-abnormal edge. */ | |
1012 | if (single_succ_p (bb)) | |
1013 | e = single_succ_edge (bb); | |
1014 | /* Otherwise chose one that is closer to bb with live statement in it. | |
1015 | To be able to chose one, we compute inverted post order starting from | |
1016 | all BBs with live statements. */ | |
1017 | if (!e) | |
1018 | { | |
1019 | if (!bb_postorder) | |
1020 | { | |
6fa95e09 TS |
1021 | auto_vec<int, 20> postorder; |
1022 | inverted_post_order_compute (&postorder, | |
1023 | &bb_contains_live_stmts); | |
e4dbb0d4 | 1024 | bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun)); |
6fa95e09 | 1025 | for (unsigned int i = 0; i < postorder.length (); ++i) |
e4dbb0d4 | 1026 | bb_postorder[postorder[i]] = i; |
e4dbb0d4 JH |
1027 | } |
1028 | FOR_EACH_EDGE (e2, ei, bb->succs) | |
1029 | if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun) | |
1030 | || bb_postorder [e->dest->index] | |
1031 | < bb_postorder [e2->dest->index]) | |
1032 | e = e2; | |
1033 | } | |
1fc41282 | 1034 | gcc_assert (e); |
357067f2 | 1035 | e->probability = profile_probability::always (); |
6de9cd9a DN |
1036 | |
1037 | /* The edge is no longer associated with a conditional, so it does | |
e4dbb0d4 JH |
1038 | not have TRUE/FALSE flags. |
1039 | We are also safe to drop EH/ABNORMAL flags and turn them into | |
1040 | normal control flow, because we know that all the destinations (including | |
1041 | those odd edges) are equivalent for program execution. */ | |
1042 | e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL); | |
6de9cd9a | 1043 | |
0a180c0e | 1044 | /* The lone outgoing edge from BB will be a fallthru edge. */ |
1fc41282 JH |
1045 | e->flags |= EDGE_FALLTHRU; |
1046 | ||
1047 | /* Remove the remaining outgoing edges. */ | |
1048 | for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); ) | |
1049 | if (e != e2) | |
1050 | { | |
1051 | cfg_altered = true; | |
b7e36a33 MP |
1052 | /* If we made a BB unconditionally exit a loop or removed |
1053 | an entry into an irreducible region, then this transform | |
1054 | alters the set of BBs in the loop. Schedule a fixup. */ | |
1055 | if (loop_exit_edge_p (bb->loop_father, e) | |
1056 | || (e2->dest->flags & BB_IRREDUCIBLE_LOOP)) | |
7e37fe6f RB |
1057 | loops_state_set (LOOPS_NEED_FIXUP); |
1058 | remove_edge (e2); | |
1fc41282 JH |
1059 | } |
1060 | else | |
1061 | ei_next (&ei); | |
ec8c1492 JJ |
1062 | } |
1063 | ||
1064 | /* If this is a store into a variable that is being optimized away, | |
1065 | add a debug bind stmt if possible. */ | |
36f52e8f | 1066 | if (MAY_HAVE_DEBUG_BIND_STMTS |
ec8c1492 JJ |
1067 | && gimple_assign_single_p (stmt) |
1068 | && is_gimple_val (gimple_assign_rhs1 (stmt))) | |
1069 | { | |
1070 | tree lhs = gimple_assign_lhs (stmt); | |
8813a647 | 1071 | if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL) |
ec8c1492 JJ |
1072 | && !DECL_IGNORED_P (lhs) |
1073 | && is_gimple_reg_type (TREE_TYPE (lhs)) | |
1074 | && !is_global_var (lhs) | |
1075 | && !DECL_HAS_VALUE_EXPR_P (lhs)) | |
1076 | { | |
1077 | tree rhs = gimple_assign_rhs1 (stmt); | |
538dd0b7 | 1078 | gdebug *note |
ec8c1492 JJ |
1079 | = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt); |
1080 | gsi_insert_after (i, note, GSI_SAME_STMT); | |
1081 | } | |
6de9cd9a | 1082 | } |
5006671f RG |
1083 | |
1084 | unlink_stmt_vdef (stmt); | |
b8698a0f L |
1085 | gsi_remove (i, true); |
1086 | release_defs (stmt); | |
6de9cd9a | 1087 | } |
9b3b55a1 | 1088 | |
1304953e JJ |
1089 | /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any |
1090 | uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */ | |
1091 | ||
1092 | static tree | |
1093 | find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data) | |
1094 | { | |
1095 | if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR) | |
1096 | *walk_subtrees = 0; | |
1097 | if (*tp == (tree) data) | |
1098 | return *tp; | |
1099 | return NULL_TREE; | |
1100 | } | |
1101 | ||
1102 | /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used, | |
1103 | but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls | |
1104 | into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug | |
1105 | uses. */ | |
1106 | ||
1107 | static void | |
1108 | maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi, | |
1109 | enum tree_code subcode) | |
1110 | { | |
355fe088 | 1111 | gimple *stmt = gsi_stmt (*gsi); |
1304953e JJ |
1112 | tree lhs = gimple_call_lhs (stmt); |
1113 | ||
1114 | if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME) | |
1115 | return; | |
1116 | ||
1117 | imm_use_iterator imm_iter; | |
1118 | use_operand_p use_p; | |
1119 | bool has_debug_uses = false; | |
1120 | bool has_realpart_uses = false; | |
1121 | bool has_other_uses = false; | |
1122 | FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs) | |
1123 | { | |
355fe088 | 1124 | gimple *use_stmt = USE_STMT (use_p); |
1304953e JJ |
1125 | if (is_gimple_debug (use_stmt)) |
1126 | has_debug_uses = true; | |
1127 | else if (is_gimple_assign (use_stmt) | |
1128 | && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR | |
1129 | && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs) | |
1130 | has_realpart_uses = true; | |
1131 | else | |
1132 | { | |
1133 | has_other_uses = true; | |
1134 | break; | |
1135 | } | |
1136 | } | |
1137 | ||
1138 | if (!has_realpart_uses || has_other_uses) | |
1139 | return; | |
1140 | ||
1141 | tree arg0 = gimple_call_arg (stmt, 0); | |
1142 | tree arg1 = gimple_call_arg (stmt, 1); | |
1143 | location_t loc = gimple_location (stmt); | |
1144 | tree type = TREE_TYPE (TREE_TYPE (lhs)); | |
1145 | tree utype = type; | |
1146 | if (!TYPE_UNSIGNED (type)) | |
1147 | utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1); | |
1148 | tree result = fold_build2_loc (loc, subcode, utype, | |
1149 | fold_convert_loc (loc, utype, arg0), | |
1150 | fold_convert_loc (loc, utype, arg1)); | |
1151 | result = fold_convert_loc (loc, type, result); | |
1152 | ||
1153 | if (has_debug_uses) | |
1154 | { | |
355fe088 | 1155 | gimple *use_stmt; |
1304953e JJ |
1156 | FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs) |
1157 | { | |
1158 | if (!gimple_debug_bind_p (use_stmt)) | |
1159 | continue; | |
1160 | tree v = gimple_debug_bind_get_value (use_stmt); | |
1161 | if (walk_tree (&v, find_non_realpart_uses, lhs, NULL)) | |
1162 | { | |
1163 | gimple_debug_bind_reset_value (use_stmt); | |
1164 | update_stmt (use_stmt); | |
1165 | } | |
1166 | } | |
1167 | } | |
1168 | ||
1169 | if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result)) | |
1170 | result = drop_tree_overflow (result); | |
1171 | tree overflow = build_zero_cst (type); | |
1172 | tree ctype = build_complex_type (type); | |
1173 | if (TREE_CODE (result) == INTEGER_CST) | |
1174 | result = build_complex (ctype, result, overflow); | |
1175 | else | |
1176 | result = build2_loc (gimple_location (stmt), COMPLEX_EXPR, | |
1177 | ctype, result, overflow); | |
1178 | ||
1179 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1180 | { | |
1181 | fprintf (dump_file, "Transforming call: "); | |
1182 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); | |
1183 | fprintf (dump_file, "because the overflow result is never used into: "); | |
1184 | print_generic_stmt (dump_file, result, TDF_SLIM); | |
1185 | fprintf (dump_file, "\n"); | |
1186 | } | |
1187 | ||
1188 | if (!update_call_from_tree (gsi, result)) | |
1189 | gimplify_and_update_call_from_tree (gsi, result); | |
1190 | } | |
1191 | ||
9b3b55a1 DN |
1192 | /* Eliminate unnecessary statements. Any instruction not marked as necessary |
1193 | contributes nothing to the program, and can be deleted. */ | |
1194 | ||
7665f023 | 1195 | static bool |
9b3b55a1 DN |
1196 | eliminate_unnecessary_stmts (void) |
1197 | { | |
7665f023 | 1198 | bool something_changed = false; |
9b3b55a1 | 1199 | basic_block bb; |
0ca5af51 | 1200 | gimple_stmt_iterator gsi, psi; |
355fe088 | 1201 | gimple *stmt; |
726a989a | 1202 | tree call; |
9771b263 | 1203 | vec<basic_block> h; |
9b3b55a1 DN |
1204 | |
1205 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1206 | fprintf (dump_file, "\nEliminating unnecessary statements:\n"); | |
1207 | ||
1208 | clear_special_calls (); | |
9b3b55a1 | 1209 | |
b5b8b0ac AO |
1210 | /* Walking basic blocks and statements in reverse order avoids |
1211 | releasing SSA names before any other DEFs that refer to them are | |
1212 | released. This helps avoid loss of debug information, as we get | |
1213 | a chance to propagate all RHSs of removed SSAs into debug uses, | |
1214 | rather than only the latest ones. E.g., consider: | |
1215 | ||
1216 | x_3 = y_1 + z_2; | |
1217 | a_5 = x_3 - b_4; | |
1218 | # DEBUG a => a_5 | |
1219 | ||
1220 | If we were to release x_3 before a_5, when we reached a_5 and | |
1221 | tried to substitute it into the debug stmt, we'd see x_3 there, | |
1222 | but x_3's DEF, type, etc would have already been disconnected. | |
1223 | By going backwards, the debug stmt first changes to: | |
1224 | ||
1225 | # DEBUG a => x_3 - b_4 | |
1226 | ||
1227 | and then to: | |
1228 | ||
1229 | # DEBUG a => y_1 + z_2 - b_4 | |
1230 | ||
1231 | as desired. */ | |
1232 | gcc_assert (dom_info_available_p (CDI_DOMINATORS)); | |
fefa31b5 DM |
1233 | h = get_all_dominated_blocks (CDI_DOMINATORS, |
1234 | single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun))); | |
b5b8b0ac | 1235 | |
9771b263 | 1236 | while (h.length ()) |
9b3b55a1 | 1237 | { |
9771b263 | 1238 | bb = h.pop (); |
b5b8b0ac | 1239 | |
9b3b55a1 | 1240 | /* Remove dead statements. */ |
0ca5af51 | 1241 | for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi) |
9b3b55a1 | 1242 | { |
726a989a | 1243 | stmt = gsi_stmt (gsi); |
9b3b55a1 | 1244 | |
0ca5af51 AO |
1245 | psi = gsi; |
1246 | gsi_prev (&psi); | |
1247 | ||
9b3b55a1 DN |
1248 | stats.total++; |
1249 | ||
996e1de5 | 1250 | /* We can mark a call to free as not necessary if the |
c54e2022 RB |
1251 | defining statement of its argument is not necessary |
1252 | (and thus is getting removed). */ | |
1253 | if (gimple_plf (stmt, STMT_NECESSARY) | |
1254 | && gimple_call_builtin_p (stmt, BUILT_IN_FREE)) | |
996e1de5 RG |
1255 | { |
1256 | tree ptr = gimple_call_arg (stmt, 0); | |
c54e2022 RB |
1257 | if (TREE_CODE (ptr) == SSA_NAME) |
1258 | { | |
355fe088 | 1259 | gimple *def_stmt = SSA_NAME_DEF_STMT (ptr); |
b54fc5b7 RB |
1260 | if (!gimple_nop_p (def_stmt) |
1261 | && !gimple_plf (def_stmt, STMT_NECESSARY)) | |
c54e2022 RB |
1262 | gimple_set_plf (stmt, STMT_NECESSARY, false); |
1263 | } | |
996e1de5 RG |
1264 | } |
1265 | ||
726a989a RB |
1266 | /* If GSI is not necessary then remove it. */ |
1267 | if (!gimple_plf (stmt, STMT_NECESSARY)) | |
7665f023 | 1268 | { |
12822768 RB |
1269 | /* Keep clobbers that we can keep live live. */ |
1270 | if (gimple_clobber_p (stmt)) | |
1271 | { | |
1272 | ssa_op_iter iter; | |
1273 | use_operand_p use_p; | |
1274 | bool dead = false; | |
1275 | FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE) | |
1276 | { | |
1277 | tree name = USE_FROM_PTR (use_p); | |
1278 | if (!SSA_NAME_IS_DEFAULT_DEF (name) | |
1279 | && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))) | |
1280 | { | |
1281 | dead = true; | |
1282 | break; | |
1283 | } | |
1284 | } | |
1285 | if (!dead) | |
1286 | continue; | |
1287 | } | |
717f4048 AO |
1288 | if (!is_gimple_debug (stmt)) |
1289 | something_changed = true; | |
726a989a | 1290 | remove_dead_stmt (&gsi, bb); |
7665f023 | 1291 | } |
726a989a | 1292 | else if (is_gimple_call (stmt)) |
9b3b55a1 | 1293 | { |
f1749ec1 RG |
1294 | tree name = gimple_call_lhs (stmt); |
1295 | ||
538dd0b7 | 1296 | notice_special_calls (as_a <gcall *> (stmt)); |
f1749ec1 RG |
1297 | |
1298 | /* When LHS of var = call (); is dead, simplify it into | |
1299 | call (); saving one operand. */ | |
1300 | if (name | |
1301 | && TREE_CODE (name) == SSA_NAME | |
d7c028c0 | 1302 | && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)) |
f1749ec1 RG |
1303 | /* Avoid doing so for allocation calls which we |
1304 | did not mark as necessary, it will confuse the | |
1305 | special logic we apply to malloc/free pair removal. */ | |
1306 | && (!(call = gimple_call_fndecl (stmt)) | |
1307 | || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL | |
22869a37 RB |
1308 | || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC |
1309 | && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC | |
f1749ec1 | 1310 | && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC |
9e878cf1 | 1311 | && !ALLOCA_FUNCTION_CODE_P |
31db0fe0 | 1312 | (DECL_FUNCTION_CODE (call))))) |
cf227303 | 1313 | { |
f1749ec1 RG |
1314 | something_changed = true; |
1315 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
cf227303 | 1316 | { |
f1749ec1 RG |
1317 | fprintf (dump_file, "Deleting LHS of call: "); |
1318 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); | |
1319 | fprintf (dump_file, "\n"); | |
cf227303 | 1320 | } |
f1749ec1 RG |
1321 | |
1322 | gimple_call_set_lhs (stmt, NULL_TREE); | |
1323 | maybe_clean_or_replace_eh_stmt (stmt, stmt); | |
1324 | update_stmt (stmt); | |
1325 | release_ssa_name (name); | |
a601baa0 | 1326 | |
f6b9f2ff ML |
1327 | /* GOMP_SIMD_LANE or ASAN_POISON without lhs is not |
1328 | needed. */ | |
1329 | if (gimple_call_internal_p (stmt)) | |
1330 | switch (gimple_call_internal_fn (stmt)) | |
1331 | { | |
1332 | case IFN_GOMP_SIMD_LANE: | |
1333 | case IFN_ASAN_POISON: | |
1334 | remove_dead_stmt (&gsi, bb); | |
1335 | break; | |
1336 | default: | |
1337 | break; | |
1338 | } | |
cf227303 | 1339 | } |
1304953e JJ |
1340 | else if (gimple_call_internal_p (stmt)) |
1341 | switch (gimple_call_internal_fn (stmt)) | |
1342 | { | |
1343 | case IFN_ADD_OVERFLOW: | |
1344 | maybe_optimize_arith_overflow (&gsi, PLUS_EXPR); | |
1345 | break; | |
1346 | case IFN_SUB_OVERFLOW: | |
1347 | maybe_optimize_arith_overflow (&gsi, MINUS_EXPR); | |
1348 | break; | |
1349 | case IFN_MUL_OVERFLOW: | |
1350 | maybe_optimize_arith_overflow (&gsi, MULT_EXPR); | |
1351 | break; | |
1352 | default: | |
1353 | break; | |
1354 | } | |
726a989a | 1355 | } |
9b3b55a1 DN |
1356 | } |
1357 | } | |
b5b8b0ac | 1358 | |
9771b263 | 1359 | h.release (); |
b5b8b0ac | 1360 | |
7351bcaa JH |
1361 | /* Since we don't track liveness of virtual PHI nodes, it is possible that we |
1362 | rendered some PHI nodes unreachable while they are still in use. | |
1363 | Mark them for renaming. */ | |
1364 | if (cfg_altered) | |
1365 | { | |
b5b8b0ac AO |
1366 | basic_block prev_bb; |
1367 | ||
7351bcaa | 1368 | find_unreachable_blocks (); |
b5b8b0ac AO |
1369 | |
1370 | /* Delete all unreachable basic blocks in reverse dominator order. */ | |
fefa31b5 DM |
1371 | for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb; |
1372 | bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb) | |
7351bcaa | 1373 | { |
b5b8b0ac AO |
1374 | prev_bb = bb->prev_bb; |
1375 | ||
d7c028c0 | 1376 | if (!bitmap_bit_p (bb_contains_live_stmts, bb->index) |
a915ab00 | 1377 | || !(bb->flags & BB_REACHABLE)) |
7351bcaa | 1378 | { |
538dd0b7 DM |
1379 | for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); |
1380 | gsi_next (&gsi)) | |
1381 | if (virtual_operand_p (gimple_phi_result (gsi.phi ()))) | |
7351bcaa JH |
1382 | { |
1383 | bool found = false; | |
1384 | imm_use_iterator iter; | |
1385 | ||
538dd0b7 DM |
1386 | FOR_EACH_IMM_USE_STMT (stmt, iter, |
1387 | gimple_phi_result (gsi.phi ())) | |
7351bcaa JH |
1388 | { |
1389 | if (!(gimple_bb (stmt)->flags & BB_REACHABLE)) | |
1390 | continue; | |
1391 | if (gimple_code (stmt) == GIMPLE_PHI | |
1392 | || gimple_plf (stmt, STMT_NECESSARY)) | |
1393 | { | |
1394 | found = true; | |
1395 | BREAK_FROM_IMM_USE_STMT (iter); | |
1396 | } | |
1397 | } | |
1398 | if (found) | |
538dd0b7 | 1399 | mark_virtual_phi_result_for_renaming (gsi.phi ()); |
7351bcaa | 1400 | } |
b5b8b0ac | 1401 | |
a915ab00 | 1402 | if (!(bb->flags & BB_REACHABLE)) |
b5b8b0ac AO |
1403 | { |
1404 | /* Speed up the removal of blocks that don't | |
1405 | dominate others. Walking backwards, this should | |
1406 | be the common case. ??? Do we need to recompute | |
1407 | dominators because of cfg_altered? */ | |
65f4b875 | 1408 | if (!first_dom_son (CDI_DOMINATORS, bb)) |
b5b8b0ac AO |
1409 | delete_basic_block (bb); |
1410 | else | |
1411 | { | |
1412 | h = get_all_dominated_blocks (CDI_DOMINATORS, bb); | |
1413 | ||
9771b263 | 1414 | while (h.length ()) |
b5b8b0ac | 1415 | { |
9771b263 | 1416 | bb = h.pop (); |
b5b8b0ac AO |
1417 | prev_bb = bb->prev_bb; |
1418 | /* Rearrangements to the CFG may have failed | |
1419 | to update the dominators tree, so that | |
1420 | formerly-dominated blocks are now | |
1421 | otherwise reachable. */ | |
1422 | if (!!(bb->flags & BB_REACHABLE)) | |
1423 | continue; | |
1424 | delete_basic_block (bb); | |
1425 | } | |
1426 | ||
9771b263 | 1427 | h.release (); |
b5b8b0ac AO |
1428 | } |
1429 | } | |
7351bcaa JH |
1430 | } |
1431 | } | |
1432 | } | |
11cd3bed | 1433 | FOR_EACH_BB_FN (bb, cfun) |
5006671f RG |
1434 | { |
1435 | /* Remove dead PHI nodes. */ | |
1436 | something_changed |= remove_dead_phis (bb); | |
1437 | } | |
1438 | ||
e4dbb0d4 JH |
1439 | if (bb_postorder) |
1440 | free (bb_postorder); | |
1441 | bb_postorder = NULL; | |
1442 | ||
7665f023 | 1443 | return something_changed; |
9b3b55a1 DN |
1444 | } |
1445 | ||
1446 | ||
6de9cd9a DN |
1447 | /* Print out removed statement statistics. */ |
1448 | ||
1449 | static void | |
1450 | print_stats (void) | |
1451 | { | |
01902653 | 1452 | float percg; |
6de9cd9a | 1453 | |
01902653 RG |
1454 | percg = ((float) stats.removed / (float) stats.total) * 100; |
1455 | fprintf (dump_file, "Removed %d of %d statements (%d%%)\n", | |
1456 | stats.removed, stats.total, (int) percg); | |
6de9cd9a | 1457 | |
01902653 RG |
1458 | if (stats.total_phis == 0) |
1459 | percg = 0; | |
1460 | else | |
1461 | percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100; | |
6de9cd9a | 1462 | |
01902653 RG |
1463 | fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n", |
1464 | stats.removed_phis, stats.total_phis, (int) percg); | |
6de9cd9a | 1465 | } |
726a989a | 1466 | |
6de9cd9a DN |
1467 | /* Initialization for this pass. Set up the used data structures. */ |
1468 | ||
1469 | static void | |
1470 | tree_dce_init (bool aggressive) | |
1471 | { | |
1472 | memset ((void *) &stats, 0, sizeof (stats)); | |
1473 | ||
1474 | if (aggressive) | |
1475 | { | |
8b1c6fd7 | 1476 | last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun)); |
f61e445a | 1477 | bitmap_clear (last_stmt_necessary); |
8b1c6fd7 | 1478 | bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun)); |
f61e445a | 1479 | bitmap_clear (bb_contains_live_stmts); |
6de9cd9a DN |
1480 | } |
1481 | ||
95a3742c | 1482 | processed = sbitmap_alloc (num_ssa_names + 1); |
f61e445a | 1483 | bitmap_clear (processed); |
6de9cd9a | 1484 | |
9771b263 | 1485 | worklist.create (64); |
9da4058c | 1486 | cfg_altered = false; |
6de9cd9a DN |
1487 | } |
1488 | ||
1489 | /* Cleanup after this pass. */ | |
1490 | ||
1491 | static void | |
1492 | tree_dce_done (bool aggressive) | |
1493 | { | |
1494 | if (aggressive) | |
1495 | { | |
c8e9d8c3 | 1496 | delete cd; |
a28fee03 | 1497 | sbitmap_free (visited_control_parents); |
6de9cd9a | 1498 | sbitmap_free (last_stmt_necessary); |
1fc41282 JH |
1499 | sbitmap_free (bb_contains_live_stmts); |
1500 | bb_contains_live_stmts = NULL; | |
6de9cd9a DN |
1501 | } |
1502 | ||
1503 | sbitmap_free (processed); | |
906532aa | 1504 | |
9771b263 | 1505 | worklist.release (); |
6de9cd9a | 1506 | } |
726a989a | 1507 | |
6de9cd9a DN |
1508 | /* Main routine to eliminate dead code. |
1509 | ||
1510 | AGGRESSIVE controls the aggressiveness of the algorithm. | |
1511 | In conservative mode, we ignore control dependence and simply declare | |
1512 | all but the most trivially dead branches necessary. This mode is fast. | |
1513 | In aggressive mode, control dependences are taken into account, which | |
1514 | results in more dead code elimination, but at the cost of some time. | |
1515 | ||
1516 | FIXME: Aggressive mode before PRE doesn't work currently because | |
1517 | the dominance info is not invalidated after DCE1. This is | |
1518 | not an issue right now because we only run aggressive DCE | |
1519 | as the last tree SSA pass, but keep this in mind when you | |
1520 | start experimenting with pass ordering. */ | |
1521 | ||
7665f023 | 1522 | static unsigned int |
6de9cd9a DN |
1523 | perform_tree_ssa_dce (bool aggressive) |
1524 | { | |
7665f023 | 1525 | bool something_changed = 0; |
6de9cd9a | 1526 | |
5ac60b56 RG |
1527 | calculate_dominance_info (CDI_DOMINATORS); |
1528 | ||
7351bcaa JH |
1529 | /* Preheaders are needed for SCEV to work. |
1530 | Simple lateches and recorded exits improve chances that loop will | |
1531 | proved to be finite in testcases such as in loop-15.c and loop-24.c */ | |
59ec925b RB |
1532 | bool in_loop_pipeline = scev_initialized_p (); |
1533 | if (aggressive && ! in_loop_pipeline) | |
1534 | { | |
1535 | scev_initialize (); | |
1536 | loop_optimizer_init (LOOPS_NORMAL | |
1537 | | LOOPS_HAVE_RECORDED_EXITS); | |
1538 | } | |
7351bcaa | 1539 | |
6de9cd9a DN |
1540 | tree_dce_init (aggressive); |
1541 | ||
1542 | if (aggressive) | |
1543 | { | |
1544 | /* Compute control dependence. */ | |
6de9cd9a | 1545 | calculate_dominance_info (CDI_POST_DOMINATORS); |
30fd2977 | 1546 | cd = new control_dependences (); |
6de9cd9a | 1547 | |
8b1c6fd7 DM |
1548 | visited_control_parents = |
1549 | sbitmap_alloc (last_basic_block_for_fn (cfun)); | |
f61e445a | 1550 | bitmap_clear (visited_control_parents); |
a28fee03 | 1551 | |
6de9cd9a DN |
1552 | mark_dfs_back_edges (); |
1553 | } | |
1554 | ||
c8e9d8c3 | 1555 | find_obviously_necessary_stmts (aggressive); |
6de9cd9a | 1556 | |
59ec925b RB |
1557 | if (aggressive && ! in_loop_pipeline) |
1558 | { | |
1559 | loop_optimizer_finalize (); | |
1560 | scev_finalize (); | |
1561 | } | |
7351bcaa | 1562 | |
5006671f RG |
1563 | longest_chain = 0; |
1564 | total_chain = 0; | |
a61f9cc0 | 1565 | nr_walks = 0; |
5006671f | 1566 | chain_ovfl = false; |
d9b99b4c | 1567 | visited = BITMAP_ALLOC (NULL); |
c8e9d8c3 | 1568 | propagate_necessity (aggressive); |
5006671f | 1569 | BITMAP_FREE (visited); |
bc300fec | 1570 | |
7665f023 JH |
1571 | something_changed |= eliminate_unnecessary_stmts (); |
1572 | something_changed |= cfg_altered; | |
6de9cd9a | 1573 | |
30251f7a ZD |
1574 | /* We do not update postdominators, so free them unconditionally. */ |
1575 | free_dominance_info (CDI_POST_DOMINATORS); | |
6de9cd9a | 1576 | |
9da4058c JL |
1577 | /* If we removed paths in the CFG, then we need to update |
1578 | dominators as well. I haven't investigated the possibility | |
1579 | of incrementally updating dominators. */ | |
1580 | if (cfg_altered) | |
1581 | free_dominance_info (CDI_DOMINATORS); | |
1582 | ||
01902653 RG |
1583 | statistics_counter_event (cfun, "Statements deleted", stats.removed); |
1584 | statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis); | |
1585 | ||
6de9cd9a | 1586 | /* Debugging dumps. */ |
01902653 | 1587 | if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS))) |
88a40e67 | 1588 | print_stats (); |
6de9cd9a DN |
1589 | |
1590 | tree_dce_done (aggressive); | |
960076d9 | 1591 | |
7665f023 | 1592 | if (something_changed) |
30866dc9 | 1593 | { |
61183076 | 1594 | free_numbers_of_iterations_estimates (cfun); |
59ec925b | 1595 | if (in_loop_pipeline) |
30866dc9 RB |
1596 | scev_reset (); |
1597 | return TODO_update_ssa | TODO_cleanup_cfg; | |
1598 | } | |
c0e50f72 | 1599 | return 0; |
6de9cd9a DN |
1600 | } |
1601 | ||
1602 | /* Pass entry points. */ | |
c2924966 | 1603 | static unsigned int |
6de9cd9a DN |
1604 | tree_ssa_dce (void) |
1605 | { | |
7665f023 | 1606 | return perform_tree_ssa_dce (/*aggressive=*/false); |
6de9cd9a DN |
1607 | } |
1608 | ||
c2924966 | 1609 | static unsigned int |
6de9cd9a DN |
1610 | tree_ssa_cd_dce (void) |
1611 | { | |
7665f023 | 1612 | return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2); |
6de9cd9a DN |
1613 | } |
1614 | ||
17795822 TS |
1615 | namespace { |
1616 | ||
1617 | const pass_data pass_data_dce = | |
6de9cd9a | 1618 | { |
27a4cd48 DM |
1619 | GIMPLE_PASS, /* type */ |
1620 | "dce", /* name */ | |
1621 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
1622 | TV_TREE_DCE, /* tv_id */ |
1623 | ( PROP_cfg | PROP_ssa ), /* properties_required */ | |
1624 | 0, /* properties_provided */ | |
1625 | 0, /* properties_destroyed */ | |
1626 | 0, /* todo_flags_start */ | |
3bea341f | 1627 | 0, /* todo_flags_finish */ |
6de9cd9a DN |
1628 | }; |
1629 | ||
17795822 | 1630 | class pass_dce : public gimple_opt_pass |
27a4cd48 DM |
1631 | { |
1632 | public: | |
c3284718 RS |
1633 | pass_dce (gcc::context *ctxt) |
1634 | : gimple_opt_pass (pass_data_dce, ctxt) | |
27a4cd48 DM |
1635 | {} |
1636 | ||
1637 | /* opt_pass methods: */ | |
65d3284b | 1638 | opt_pass * clone () { return new pass_dce (m_ctxt); } |
1a3d085c | 1639 | virtual bool gate (function *) { return flag_tree_dce != 0; } |
be55bfe6 | 1640 | virtual unsigned int execute (function *) { return tree_ssa_dce (); } |
27a4cd48 DM |
1641 | |
1642 | }; // class pass_dce | |
1643 | ||
17795822 TS |
1644 | } // anon namespace |
1645 | ||
27a4cd48 DM |
1646 | gimple_opt_pass * |
1647 | make_pass_dce (gcc::context *ctxt) | |
1648 | { | |
1649 | return new pass_dce (ctxt); | |
1650 | } | |
1651 | ||
17795822 TS |
1652 | namespace { |
1653 | ||
1654 | const pass_data pass_data_cd_dce = | |
27a4cd48 DM |
1655 | { |
1656 | GIMPLE_PASS, /* type */ | |
1657 | "cddce", /* name */ | |
1658 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
1659 | TV_TREE_CD_DCE, /* tv_id */ |
1660 | ( PROP_cfg | PROP_ssa ), /* properties_required */ | |
1661 | 0, /* properties_provided */ | |
1662 | 0, /* properties_destroyed */ | |
1663 | 0, /* todo_flags_start */ | |
3bea341f | 1664 | 0, /* todo_flags_finish */ |
6de9cd9a | 1665 | }; |
27a4cd48 | 1666 | |
17795822 | 1667 | class pass_cd_dce : public gimple_opt_pass |
27a4cd48 DM |
1668 | { |
1669 | public: | |
c3284718 RS |
1670 | pass_cd_dce (gcc::context *ctxt) |
1671 | : gimple_opt_pass (pass_data_cd_dce, ctxt) | |
27a4cd48 DM |
1672 | {} |
1673 | ||
1674 | /* opt_pass methods: */ | |
65d3284b | 1675 | opt_pass * clone () { return new pass_cd_dce (m_ctxt); } |
1a3d085c | 1676 | virtual bool gate (function *) { return flag_tree_dce != 0; } |
be55bfe6 | 1677 | virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); } |
27a4cd48 DM |
1678 | |
1679 | }; // class pass_cd_dce | |
1680 | ||
17795822 TS |
1681 | } // anon namespace |
1682 | ||
27a4cd48 DM |
1683 | gimple_opt_pass * |
1684 | make_pass_cd_dce (gcc::context *ctxt) | |
1685 | { | |
1686 | return new pass_cd_dce (ctxt); | |
1687 | } | |
23ffbafe BC |
1688 | |
1689 | ||
1690 | /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and | |
1691 | is consumed by this function. The function has linear complexity in | |
1692 | the number of dead stmts with a constant factor like the average SSA | |
1693 | use operands number. */ | |
1694 | ||
1695 | void | |
1696 | simple_dce_from_worklist (bitmap worklist) | |
1697 | { | |
1698 | while (! bitmap_empty_p (worklist)) | |
1699 | { | |
1700 | /* Pop item. */ | |
1701 | unsigned i = bitmap_first_set_bit (worklist); | |
1702 | bitmap_clear_bit (worklist, i); | |
1703 | ||
1704 | tree def = ssa_name (i); | |
1705 | /* Removed by somebody else or still in use. */ | |
1706 | if (! def || ! has_zero_uses (def)) | |
1707 | continue; | |
1708 | ||
1709 | gimple *t = SSA_NAME_DEF_STMT (def); | |
1710 | if (gimple_has_side_effects (t)) | |
1711 | continue; | |
1712 | ||
1713 | /* Add uses to the worklist. */ | |
1714 | ssa_op_iter iter; | |
1715 | use_operand_p use_p; | |
1716 | FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE) | |
1717 | { | |
1718 | tree use = USE_FROM_PTR (use_p); | |
1719 | if (TREE_CODE (use) == SSA_NAME | |
1720 | && ! SSA_NAME_IS_DEFAULT_DEF (use)) | |
1721 | bitmap_set_bit (worklist, SSA_NAME_VERSION (use)); | |
1722 | } | |
1723 | ||
1724 | /* Remove stmt. */ | |
1725 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1726 | { | |
1727 | fprintf (dump_file, "Removing dead stmt:"); | |
1728 | print_gimple_stmt (dump_file, t, 0); | |
1729 | } | |
1730 | gimple_stmt_iterator gsi = gsi_for_stmt (t); | |
1731 | if (gimple_code (t) == GIMPLE_PHI) | |
1732 | remove_phi_node (&gsi, true); | |
1733 | else | |
1734 | { | |
1735 | gsi_remove (&gsi, true); | |
1736 | release_defs (t); | |
1737 | } | |
1738 | } | |
1739 | } |