]>
Commit | Line | Data |
---|---|---|
750628d8 | 1 | /* Generic SSA value propagation engine. |
66647d44 | 2 | Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc. |
750628d8 DN |
3 | Contributed by Diego Novillo <dnovillo@redhat.com> |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it | |
8 | under the terms of the GNU General Public License as published by the | |
9dcd6f09 | 9 | Free Software Foundation; either version 3, or (at your option) any |
750628d8 DN |
10 | later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT | |
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
750628d8 DN |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
25 | #include "tree.h" | |
26 | #include "flags.h" | |
27 | #include "rtl.h" | |
28 | #include "tm_p.h" | |
29 | #include "ggc.h" | |
30 | #include "basic-block.h" | |
31 | #include "output.h" | |
750628d8 DN |
32 | #include "expr.h" |
33 | #include "function.h" | |
34 | #include "diagnostic.h" | |
35 | #include "timevar.h" | |
36 | #include "tree-dump.h" | |
37 | #include "tree-flow.h" | |
38 | #include "tree-pass.h" | |
39 | #include "tree-ssa-propagate.h" | |
40 | #include "langhooks.h" | |
78492bf5 SB |
41 | #include "varray.h" |
42 | #include "vec.h" | |
b608a1bc | 43 | #include "value-prof.h" |
726a989a | 44 | #include "gimple.h" |
750628d8 DN |
45 | |
46 | /* This file implements a generic value propagation engine based on | |
47 | the same propagation used by the SSA-CCP algorithm [1]. | |
48 | ||
49 | Propagation is performed by simulating the execution of every | |
50 | statement that produces the value being propagated. Simulation | |
51 | proceeds as follows: | |
52 | ||
53 | 1- Initially, all edges of the CFG are marked not executable and | |
766ff1b1 | 54 | the CFG worklist is seeded with all the statements in the entry |
750628d8 DN |
55 | basic block (block 0). |
56 | ||
57 | 2- Every statement S is simulated with a call to the call-back | |
58 | function SSA_PROP_VISIT_STMT. This evaluation may produce 3 | |
59 | results: | |
60 | ||
61 | SSA_PROP_NOT_INTERESTING: Statement S produces nothing of | |
62 | interest and does not affect any of the work lists. | |
63 | ||
64 | SSA_PROP_VARYING: The value produced by S cannot be determined | |
65 | at compile time. Further simulation of S is not required. | |
66 | If S is a conditional jump, all the outgoing edges for the | |
67 | block are considered executable and added to the work | |
68 | list. | |
69 | ||
70 | SSA_PROP_INTERESTING: S produces a value that can be computed | |
71 | at compile time. Its result can be propagated into the | |
2a7e31df | 72 | statements that feed from S. Furthermore, if S is a |
750628d8 DN |
73 | conditional jump, only the edge known to be taken is added |
74 | to the work list. Edges that are known not to execute are | |
75 | never simulated. | |
76 | ||
77 | 3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI. The | |
78 | return value from SSA_PROP_VISIT_PHI has the same semantics as | |
766ff1b1 | 79 | described in #2. |
750628d8 DN |
80 | |
81 | 4- Three work lists are kept. Statements are only added to these | |
82 | lists if they produce one of SSA_PROP_INTERESTING or | |
83 | SSA_PROP_VARYING. | |
84 | ||
85 | CFG_BLOCKS contains the list of blocks to be simulated. | |
86 | Blocks are added to this list if their incoming edges are | |
87 | found executable. | |
88 | ||
89 | VARYING_SSA_EDGES contains the list of statements that feed | |
90 | from statements that produce an SSA_PROP_VARYING result. | |
91 | These are simulated first to speed up processing. | |
92 | ||
93 | INTERESTING_SSA_EDGES contains the list of statements that | |
94 | feed from statements that produce an SSA_PROP_INTERESTING | |
95 | result. | |
96 | ||
97 | 5- Simulation terminates when all three work lists are drained. | |
98 | ||
99 | Before calling ssa_propagate, it is important to clear | |
726a989a | 100 | prop_simulate_again_p for all the statements in the program that |
750628d8 DN |
101 | should be simulated. This initialization allows an implementation |
102 | to specify which statements should never be simulated. | |
103 | ||
104 | It is also important to compute def-use information before calling | |
105 | ssa_propagate. | |
106 | ||
107 | References: | |
108 | ||
109 | [1] Constant propagation with conditional branches, | |
110 | Wegman and Zadeck, ACM TOPLAS 13(2):181-210. | |
111 | ||
112 | [2] Building an Optimizing Compiler, | |
113 | Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9. | |
114 | ||
115 | [3] Advanced Compiler Design and Implementation, | |
116 | Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */ | |
117 | ||
118 | /* Function pointers used to parameterize the propagation engine. */ | |
119 | static ssa_prop_visit_stmt_fn ssa_prop_visit_stmt; | |
120 | static ssa_prop_visit_phi_fn ssa_prop_visit_phi; | |
121 | ||
726a989a RB |
122 | /* Keep track of statements that have been added to one of the SSA |
123 | edges worklists. This flag is used to avoid visiting statements | |
124 | unnecessarily when draining an SSA edge worklist. If while | |
125 | simulating a basic block, we find a statement with | |
750628d8 | 126 | STMT_IN_SSA_EDGE_WORKLIST set, we clear it to prevent SSA edge |
726a989a RB |
127 | processing from visiting it again. |
128 | ||
129 | NOTE: users of the propagation engine are not allowed to use | |
130 | the GF_PLF_1 flag. */ | |
131 | #define STMT_IN_SSA_EDGE_WORKLIST GF_PLF_1 | |
750628d8 DN |
132 | |
133 | /* A bitmap to keep track of executable blocks in the CFG. */ | |
134 | static sbitmap executable_blocks; | |
135 | ||
136 | /* Array of control flow edges on the worklist. */ | |
36c88f34 | 137 | static VEC(basic_block,heap) *cfg_blocks; |
750628d8 DN |
138 | |
139 | static unsigned int cfg_blocks_num = 0; | |
140 | static int cfg_blocks_tail; | |
141 | static int cfg_blocks_head; | |
142 | ||
143 | static sbitmap bb_in_list; | |
144 | ||
145 | /* Worklist of SSA edges which will need reexamination as their | |
146 | definition has changed. SSA edges are def-use edges in the SSA | |
147 | web. For each D-U edge, we store the target statement or PHI node | |
148 | U. */ | |
726a989a | 149 | static GTY(()) VEC(gimple,gc) *interesting_ssa_edges; |
750628d8 DN |
150 | |
151 | /* Identical to INTERESTING_SSA_EDGES. For performance reasons, the | |
152 | list of SSA edges is split into two. One contains all SSA edges | |
153 | who need to be reexamined because their lattice value changed to | |
154 | varying (this worklist), and the other contains all other SSA edges | |
155 | to be reexamined (INTERESTING_SSA_EDGES). | |
156 | ||
157 | Since most values in the program are VARYING, the ideal situation | |
158 | is to move them to that lattice value as quickly as possible. | |
159 | Thus, it doesn't make sense to process any other type of lattice | |
160 | value until all VARYING values are propagated fully, which is one | |
161 | thing using the VARYING worklist achieves. In addition, if we | |
162 | don't use a separate worklist for VARYING edges, we end up with | |
163 | situations where lattice values move from | |
164 | UNDEFINED->INTERESTING->VARYING instead of UNDEFINED->VARYING. */ | |
726a989a | 165 | static GTY(()) VEC(gimple,gc) *varying_ssa_edges; |
750628d8 DN |
166 | |
167 | ||
168 | /* Return true if the block worklist empty. */ | |
169 | ||
170 | static inline bool | |
171 | cfg_blocks_empty_p (void) | |
172 | { | |
173 | return (cfg_blocks_num == 0); | |
174 | } | |
175 | ||
176 | ||
78492bf5 | 177 | /* Add a basic block to the worklist. The block must not be already |
39850c0b | 178 | in the worklist, and it must not be the ENTRY or EXIT block. */ |
750628d8 | 179 | |
b8698a0f | 180 | static void |
750628d8 DN |
181 | cfg_blocks_add (basic_block bb) |
182 | { | |
5a0ed003 ILT |
183 | bool head = false; |
184 | ||
39850c0b | 185 | gcc_assert (bb != ENTRY_BLOCK_PTR && bb != EXIT_BLOCK_PTR); |
78492bf5 | 186 | gcc_assert (!TEST_BIT (bb_in_list, bb->index)); |
750628d8 DN |
187 | |
188 | if (cfg_blocks_empty_p ()) | |
189 | { | |
190 | cfg_blocks_tail = cfg_blocks_head = 0; | |
191 | cfg_blocks_num = 1; | |
192 | } | |
193 | else | |
194 | { | |
195 | cfg_blocks_num++; | |
36c88f34 | 196 | if (cfg_blocks_num > VEC_length (basic_block, cfg_blocks)) |
750628d8 | 197 | { |
36c88f34 KH |
198 | /* We have to grow the array now. Adjust to queue to occupy |
199 | the full space of the original array. We do not need to | |
200 | initialize the newly allocated portion of the array | |
201 | because we keep track of CFG_BLOCKS_HEAD and | |
202 | CFG_BLOCKS_HEAD. */ | |
203 | cfg_blocks_tail = VEC_length (basic_block, cfg_blocks); | |
750628d8 | 204 | cfg_blocks_head = 0; |
36c88f34 | 205 | VEC_safe_grow (basic_block, heap, cfg_blocks, 2 * cfg_blocks_tail); |
750628d8 | 206 | } |
5a0ed003 ILT |
207 | /* Minor optimization: we prefer to see blocks with more |
208 | predecessors later, because there is more of a chance that | |
209 | the incoming edges will be executable. */ | |
210 | else if (EDGE_COUNT (bb->preds) | |
211 | >= EDGE_COUNT (VEC_index (basic_block, cfg_blocks, | |
212 | cfg_blocks_head)->preds)) | |
36c88f34 KH |
213 | cfg_blocks_tail = ((cfg_blocks_tail + 1) |
214 | % VEC_length (basic_block, cfg_blocks)); | |
5a0ed003 ILT |
215 | else |
216 | { | |
217 | if (cfg_blocks_head == 0) | |
218 | cfg_blocks_head = VEC_length (basic_block, cfg_blocks); | |
219 | --cfg_blocks_head; | |
220 | head = true; | |
221 | } | |
750628d8 DN |
222 | } |
223 | ||
5a0ed003 ILT |
224 | VEC_replace (basic_block, cfg_blocks, |
225 | head ? cfg_blocks_head : cfg_blocks_tail, | |
226 | bb); | |
750628d8 DN |
227 | SET_BIT (bb_in_list, bb->index); |
228 | } | |
229 | ||
230 | ||
231 | /* Remove a block from the worklist. */ | |
232 | ||
233 | static basic_block | |
234 | cfg_blocks_get (void) | |
235 | { | |
236 | basic_block bb; | |
237 | ||
36c88f34 | 238 | bb = VEC_index (basic_block, cfg_blocks, cfg_blocks_head); |
750628d8 | 239 | |
1e128c5f GB |
240 | gcc_assert (!cfg_blocks_empty_p ()); |
241 | gcc_assert (bb); | |
750628d8 | 242 | |
36c88f34 KH |
243 | cfg_blocks_head = ((cfg_blocks_head + 1) |
244 | % VEC_length (basic_block, cfg_blocks)); | |
750628d8 DN |
245 | --cfg_blocks_num; |
246 | RESET_BIT (bb_in_list, bb->index); | |
247 | ||
248 | return bb; | |
249 | } | |
250 | ||
251 | ||
252 | /* We have just defined a new value for VAR. If IS_VARYING is true, | |
253 | add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add | |
254 | them to INTERESTING_SSA_EDGES. */ | |
255 | ||
256 | static void | |
257 | add_ssa_edge (tree var, bool is_varying) | |
258 | { | |
f430bae8 AM |
259 | imm_use_iterator iter; |
260 | use_operand_p use_p; | |
750628d8 | 261 | |
f430bae8 | 262 | FOR_EACH_IMM_USE_FAST (use_p, iter, var) |
750628d8 | 263 | { |
726a989a | 264 | gimple use_stmt = USE_STMT (use_p); |
750628d8 | 265 | |
726a989a RB |
266 | if (prop_simulate_again_p (use_stmt) |
267 | && !gimple_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST)) | |
750628d8 | 268 | { |
726a989a | 269 | gimple_set_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST, true); |
750628d8 | 270 | if (is_varying) |
726a989a | 271 | VEC_safe_push (gimple, gc, varying_ssa_edges, use_stmt); |
750628d8 | 272 | else |
726a989a | 273 | VEC_safe_push (gimple, gc, interesting_ssa_edges, use_stmt); |
750628d8 DN |
274 | } |
275 | } | |
276 | } | |
277 | ||
278 | ||
279 | /* Add edge E to the control flow worklist. */ | |
280 | ||
281 | static void | |
282 | add_control_edge (edge e) | |
283 | { | |
284 | basic_block bb = e->dest; | |
285 | if (bb == EXIT_BLOCK_PTR) | |
286 | return; | |
287 | ||
288 | /* If the edge had already been executed, skip it. */ | |
289 | if (e->flags & EDGE_EXECUTABLE) | |
290 | return; | |
291 | ||
292 | e->flags |= EDGE_EXECUTABLE; | |
293 | ||
294 | /* If the block is already in the list, we're done. */ | |
295 | if (TEST_BIT (bb_in_list, bb->index)) | |
296 | return; | |
297 | ||
298 | cfg_blocks_add (bb); | |
299 | ||
300 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
301 | fprintf (dump_file, "Adding Destination of edge (%d -> %d) to worklist\n\n", | |
302 | e->src->index, e->dest->index); | |
303 | } | |
304 | ||
305 | ||
306 | /* Simulate the execution of STMT and update the work lists accordingly. */ | |
307 | ||
308 | static void | |
726a989a | 309 | simulate_stmt (gimple stmt) |
750628d8 DN |
310 | { |
311 | enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING; | |
312 | edge taken_edge = NULL; | |
313 | tree output_name = NULL_TREE; | |
314 | ||
315 | /* Don't bother visiting statements that are already | |
316 | considered varying by the propagator. */ | |
726a989a | 317 | if (!prop_simulate_again_p (stmt)) |
750628d8 DN |
318 | return; |
319 | ||
726a989a | 320 | if (gimple_code (stmt) == GIMPLE_PHI) |
750628d8 DN |
321 | { |
322 | val = ssa_prop_visit_phi (stmt); | |
726a989a | 323 | output_name = gimple_phi_result (stmt); |
750628d8 DN |
324 | } |
325 | else | |
326 | val = ssa_prop_visit_stmt (stmt, &taken_edge, &output_name); | |
327 | ||
328 | if (val == SSA_PROP_VARYING) | |
329 | { | |
726a989a | 330 | prop_set_simulate_again (stmt, false); |
750628d8 DN |
331 | |
332 | /* If the statement produced a new varying value, add the SSA | |
333 | edges coming out of OUTPUT_NAME. */ | |
334 | if (output_name) | |
335 | add_ssa_edge (output_name, true); | |
336 | ||
337 | /* If STMT transfers control out of its basic block, add | |
338 | all outgoing edges to the work list. */ | |
339 | if (stmt_ends_bb_p (stmt)) | |
340 | { | |
341 | edge e; | |
628f6a4e | 342 | edge_iterator ei; |
726a989a | 343 | basic_block bb = gimple_bb (stmt); |
628f6a4e | 344 | FOR_EACH_EDGE (e, ei, bb->succs) |
750628d8 DN |
345 | add_control_edge (e); |
346 | } | |
347 | } | |
348 | else if (val == SSA_PROP_INTERESTING) | |
349 | { | |
350 | /* If the statement produced new value, add the SSA edges coming | |
351 | out of OUTPUT_NAME. */ | |
352 | if (output_name) | |
353 | add_ssa_edge (output_name, false); | |
354 | ||
355 | /* If we know which edge is going to be taken out of this block, | |
356 | add it to the CFG work list. */ | |
357 | if (taken_edge) | |
358 | add_control_edge (taken_edge); | |
359 | } | |
360 | } | |
361 | ||
362 | /* Process an SSA edge worklist. WORKLIST is the SSA edge worklist to | |
363 | drain. This pops statements off the given WORKLIST and processes | |
78492bf5 SB |
364 | them until there are no more statements on WORKLIST. |
365 | We take a pointer to WORKLIST because it may be reallocated when an | |
366 | SSA edge is added to it in simulate_stmt. */ | |
750628d8 DN |
367 | |
368 | static void | |
726a989a | 369 | process_ssa_edge_worklist (VEC(gimple,gc) **worklist) |
750628d8 DN |
370 | { |
371 | /* Drain the entire worklist. */ | |
726a989a | 372 | while (VEC_length (gimple, *worklist) > 0) |
750628d8 DN |
373 | { |
374 | basic_block bb; | |
375 | ||
376 | /* Pull the statement to simulate off the worklist. */ | |
726a989a | 377 | gimple stmt = VEC_pop (gimple, *worklist); |
750628d8 DN |
378 | |
379 | /* If this statement was already visited by simulate_block, then | |
380 | we don't need to visit it again here. */ | |
726a989a | 381 | if (!gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST)) |
750628d8 DN |
382 | continue; |
383 | ||
384 | /* STMT is no longer in a worklist. */ | |
726a989a | 385 | gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false); |
750628d8 DN |
386 | |
387 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
388 | { | |
389 | fprintf (dump_file, "\nSimulating statement (from ssa_edges): "); | |
726a989a | 390 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
750628d8 DN |
391 | } |
392 | ||
726a989a | 393 | bb = gimple_bb (stmt); |
750628d8 DN |
394 | |
395 | /* PHI nodes are always visited, regardless of whether or not | |
396 | the destination block is executable. Otherwise, visit the | |
397 | statement only if its block is marked executable. */ | |
726a989a | 398 | if (gimple_code (stmt) == GIMPLE_PHI |
750628d8 DN |
399 | || TEST_BIT (executable_blocks, bb->index)) |
400 | simulate_stmt (stmt); | |
401 | } | |
402 | } | |
403 | ||
404 | ||
405 | /* Simulate the execution of BLOCK. Evaluate the statement associated | |
406 | with each variable reference inside the block. */ | |
407 | ||
408 | static void | |
409 | simulate_block (basic_block block) | |
410 | { | |
726a989a | 411 | gimple_stmt_iterator gsi; |
750628d8 DN |
412 | |
413 | /* There is nothing to do for the exit block. */ | |
414 | if (block == EXIT_BLOCK_PTR) | |
415 | return; | |
416 | ||
417 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
418 | fprintf (dump_file, "\nSimulating block %d\n", block->index); | |
419 | ||
420 | /* Always simulate PHI nodes, even if we have simulated this block | |
421 | before. */ | |
726a989a RB |
422 | for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi)) |
423 | simulate_stmt (gsi_stmt (gsi)); | |
750628d8 DN |
424 | |
425 | /* If this is the first time we've simulated this block, then we | |
426 | must simulate each of its statements. */ | |
427 | if (!TEST_BIT (executable_blocks, block->index)) | |
428 | { | |
726a989a | 429 | gimple_stmt_iterator j; |
750628d8 DN |
430 | unsigned int normal_edge_count; |
431 | edge e, normal_edge; | |
628f6a4e | 432 | edge_iterator ei; |
750628d8 DN |
433 | |
434 | /* Note that we have simulated this block. */ | |
435 | SET_BIT (executable_blocks, block->index); | |
436 | ||
726a989a | 437 | for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j)) |
750628d8 | 438 | { |
726a989a | 439 | gimple stmt = gsi_stmt (j); |
750628d8 DN |
440 | |
441 | /* If this statement is already in the worklist then | |
442 | "cancel" it. The reevaluation implied by the worklist | |
443 | entry will produce the same value we generate here and | |
444 | thus reevaluating it again from the worklist is | |
445 | pointless. */ | |
726a989a RB |
446 | if (gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST)) |
447 | gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false); | |
750628d8 DN |
448 | |
449 | simulate_stmt (stmt); | |
450 | } | |
451 | ||
496a4ef5 | 452 | /* We can not predict when abnormal and EH edges will be executed, so |
750628d8 DN |
453 | once a block is considered executable, we consider any |
454 | outgoing abnormal edges as executable. | |
455 | ||
496a4ef5 JH |
456 | TODO: This is not exactly true. Simplifying statement might |
457 | prove it non-throwing and also computed goto can be handled | |
458 | when destination is known. | |
459 | ||
750628d8 DN |
460 | At the same time, if this block has only one successor that is |
461 | reached by non-abnormal edges, then add that successor to the | |
462 | worklist. */ | |
463 | normal_edge_count = 0; | |
464 | normal_edge = NULL; | |
628f6a4e | 465 | FOR_EACH_EDGE (e, ei, block->succs) |
750628d8 | 466 | { |
496a4ef5 | 467 | if (e->flags & (EDGE_ABNORMAL | EDGE_EH)) |
750628d8 DN |
468 | add_control_edge (e); |
469 | else | |
470 | { | |
471 | normal_edge_count++; | |
472 | normal_edge = e; | |
473 | } | |
474 | } | |
475 | ||
476 | if (normal_edge_count == 1) | |
477 | add_control_edge (normal_edge); | |
478 | } | |
479 | } | |
480 | ||
481 | ||
482 | /* Initialize local data structures and work lists. */ | |
483 | ||
484 | static void | |
485 | ssa_prop_init (void) | |
486 | { | |
487 | edge e; | |
628f6a4e | 488 | edge_iterator ei; |
750628d8 DN |
489 | basic_block bb; |
490 | ||
491 | /* Worklists of SSA edges. */ | |
726a989a RB |
492 | interesting_ssa_edges = VEC_alloc (gimple, gc, 20); |
493 | varying_ssa_edges = VEC_alloc (gimple, gc, 20); | |
750628d8 DN |
494 | |
495 | executable_blocks = sbitmap_alloc (last_basic_block); | |
496 | sbitmap_zero (executable_blocks); | |
497 | ||
498 | bb_in_list = sbitmap_alloc (last_basic_block); | |
499 | sbitmap_zero (bb_in_list); | |
500 | ||
501 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
502 | dump_immediate_uses (dump_file); | |
503 | ||
36c88f34 KH |
504 | cfg_blocks = VEC_alloc (basic_block, heap, 20); |
505 | VEC_safe_grow (basic_block, heap, cfg_blocks, 20); | |
750628d8 | 506 | |
0bca51f0 | 507 | /* Initially assume that every edge in the CFG is not executable. |
0777d852 DN |
508 | (including the edges coming out of ENTRY_BLOCK_PTR). */ |
509 | FOR_ALL_BB (bb) | |
750628d8 | 510 | { |
726a989a | 511 | gimple_stmt_iterator si; |
750628d8 | 512 | |
726a989a RB |
513 | for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si)) |
514 | gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false); | |
b8698a0f | 515 | |
726a989a RB |
516 | for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si)) |
517 | gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false); | |
750628d8 | 518 | |
628f6a4e | 519 | FOR_EACH_EDGE (e, ei, bb->succs) |
750628d8 DN |
520 | e->flags &= ~EDGE_EXECUTABLE; |
521 | } | |
522 | ||
523 | /* Seed the algorithm by adding the successors of the entry block to the | |
524 | edge worklist. */ | |
628f6a4e | 525 | FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs) |
39850c0b | 526 | add_control_edge (e); |
750628d8 DN |
527 | } |
528 | ||
529 | ||
530 | /* Free allocated storage. */ | |
531 | ||
532 | static void | |
533 | ssa_prop_fini (void) | |
534 | { | |
726a989a RB |
535 | VEC_free (gimple, gc, interesting_ssa_edges); |
536 | VEC_free (gimple, gc, varying_ssa_edges); | |
36c88f34 | 537 | VEC_free (basic_block, heap, cfg_blocks); |
750628d8 DN |
538 | cfg_blocks = NULL; |
539 | sbitmap_free (bb_in_list); | |
540 | sbitmap_free (executable_blocks); | |
750628d8 DN |
541 | } |
542 | ||
543 | ||
726a989a RB |
544 | /* Return true if EXPR is an acceptable right-hand-side for a |
545 | GIMPLE assignment. We validate the entire tree, not just | |
546 | the root node, thus catching expressions that embed complex | |
547 | operands that are not permitted in GIMPLE. This function | |
548 | is needed because the folding routines in fold-const.c | |
549 | may return such expressions in some cases, e.g., an array | |
550 | access with an embedded index addition. It may make more | |
551 | sense to have folding routines that are sensitive to the | |
552 | constraints on GIMPLE operands, rather than abandoning any | |
553 | any attempt to fold if the usual folding turns out to be too | |
554 | aggressive. */ | |
750628d8 DN |
555 | |
556 | bool | |
726a989a | 557 | valid_gimple_rhs_p (tree expr) |
750628d8 | 558 | { |
750628d8 | 559 | enum tree_code code = TREE_CODE (expr); |
750628d8 | 560 | |
5cdc4a26 | 561 | switch (TREE_CODE_CLASS (code)) |
750628d8 | 562 | { |
5cdc4a26 | 563 | case tcc_declaration: |
c2979eaf | 564 | if (!is_gimple_variable (expr)) |
5cdc4a26 RS |
565 | return false; |
566 | break; | |
567 | ||
568 | case tcc_constant: | |
726a989a | 569 | /* All constants are ok. */ |
5cdc4a26 RS |
570 | break; |
571 | ||
572 | case tcc_binary: | |
573 | case tcc_comparison: | |
750628d8 DN |
574 | if (!is_gimple_val (TREE_OPERAND (expr, 0)) |
575 | || !is_gimple_val (TREE_OPERAND (expr, 1))) | |
576 | return false; | |
5cdc4a26 RS |
577 | break; |
578 | ||
579 | case tcc_unary: | |
750628d8 DN |
580 | if (!is_gimple_val (TREE_OPERAND (expr, 0))) |
581 | return false; | |
5cdc4a26 RS |
582 | break; |
583 | ||
584 | case tcc_expression: | |
585 | switch (code) | |
726a989a RB |
586 | { |
587 | case ADDR_EXPR: | |
588 | { | |
589 | tree t; | |
590 | if (is_gimple_min_invariant (expr)) | |
591 | return true; | |
592 | t = TREE_OPERAND (expr, 0); | |
593 | while (handled_component_p (t)) | |
594 | { | |
595 | /* ??? More checks needed, see the GIMPLE verifier. */ | |
596 | if ((TREE_CODE (t) == ARRAY_REF | |
597 | || TREE_CODE (t) == ARRAY_RANGE_REF) | |
598 | && !is_gimple_val (TREE_OPERAND (t, 1))) | |
599 | return false; | |
600 | t = TREE_OPERAND (t, 0); | |
601 | } | |
602 | if (!is_gimple_id (t)) | |
603 | return false; | |
604 | } | |
605 | break; | |
5cdc4a26 RS |
606 | |
607 | case TRUTH_NOT_EXPR: | |
608 | if (!is_gimple_val (TREE_OPERAND (expr, 0))) | |
609 | return false; | |
610 | break; | |
611 | ||
612 | case TRUTH_AND_EXPR: | |
613 | case TRUTH_XOR_EXPR: | |
614 | case TRUTH_OR_EXPR: | |
615 | if (!is_gimple_val (TREE_OPERAND (expr, 0)) | |
616 | || !is_gimple_val (TREE_OPERAND (expr, 1))) | |
617 | return false; | |
618 | break; | |
619 | ||
5cdc4a26 RS |
620 | default: |
621 | return false; | |
622 | } | |
623 | break; | |
624 | ||
3328fbb7 | 625 | case tcc_vl_exp: |
726a989a | 626 | return false; |
3328fbb7 | 627 | |
5cdc4a26 | 628 | case tcc_exceptional: |
726a989a RB |
629 | if (code != SSA_NAME) |
630 | return false; | |
5cdc4a26 RS |
631 | break; |
632 | ||
633 | default: | |
634 | return false; | |
750628d8 DN |
635 | } |
636 | ||
c2979eaf EB |
637 | return true; |
638 | } | |
639 | ||
640 | ||
726a989a RB |
641 | /* Return true if EXPR is a CALL_EXPR suitable for representation |
642 | as a single GIMPLE_CALL statement. If the arguments require | |
643 | further gimplification, return false. */ | |
c2979eaf EB |
644 | |
645 | bool | |
726a989a | 646 | valid_gimple_call_p (tree expr) |
c2979eaf | 647 | { |
726a989a | 648 | unsigned i, nargs; |
c2979eaf | 649 | |
726a989a | 650 | if (TREE_CODE (expr) != CALL_EXPR) |
c2979eaf EB |
651 | return false; |
652 | ||
726a989a RB |
653 | nargs = call_expr_nargs (expr); |
654 | for (i = 0; i < nargs; i++) | |
655 | if (! is_gimple_operand (CALL_EXPR_ARG (expr, i))) | |
656 | return false; | |
37358746 | 657 | |
726a989a RB |
658 | return true; |
659 | } | |
750628d8 | 660 | |
750628d8 | 661 | |
726a989a RB |
662 | /* Make SSA names defined by OLD_STMT point to NEW_STMT |
663 | as their defining statement. */ | |
750628d8 | 664 | |
726a989a RB |
665 | void |
666 | move_ssa_defining_stmt_for_defs (gimple new_stmt, gimple old_stmt) | |
667 | { | |
668 | tree var; | |
669 | ssa_op_iter iter; | |
750628d8 | 670 | |
726a989a RB |
671 | if (gimple_in_ssa_p (cfun)) |
672 | { | |
673 | /* Make defined SSA_NAMEs point to the new | |
674 | statement as their definition. */ | |
675 | FOR_EACH_SSA_TREE_OPERAND (var, old_stmt, iter, SSA_OP_ALL_DEFS) | |
676 | { | |
677 | if (TREE_CODE (var) == SSA_NAME) | |
678 | SSA_NAME_DEF_STMT (var) = new_stmt; | |
679 | } | |
750628d8 | 680 | } |
726a989a | 681 | } |
750628d8 | 682 | |
726a989a RB |
683 | |
684 | /* Update a GIMPLE_CALL statement at iterator *SI_P to reflect the | |
685 | value of EXPR, which is expected to be the result of folding the | |
686 | call. This can only be done if EXPR is a CALL_EXPR with valid | |
687 | GIMPLE operands as arguments, or if it is a suitable RHS expression | |
688 | for a GIMPLE_ASSIGN. More complex expressions will require | |
689 | gimplification, which will introduce addtional statements. In this | |
690 | event, no update is performed, and the function returns false. | |
691 | Note that we cannot mutate a GIMPLE_CALL in-place, so we always | |
692 | replace the statement at *SI_P with an entirely new statement. | |
693 | The new statement need not be a call, e.g., if the original call | |
694 | folded to a constant. */ | |
695 | ||
696 | bool | |
697 | update_call_from_tree (gimple_stmt_iterator *si_p, tree expr) | |
698 | { | |
699 | tree lhs; | |
700 | ||
701 | gimple stmt = gsi_stmt (*si_p); | |
702 | ||
703 | gcc_assert (is_gimple_call (stmt)); | |
704 | ||
705 | lhs = gimple_call_lhs (stmt); | |
706 | ||
707 | if (valid_gimple_call_p (expr)) | |
708 | { | |
709 | /* The call has simplified to another call. */ | |
710 | tree fn = CALL_EXPR_FN (expr); | |
711 | unsigned i; | |
712 | unsigned nargs = call_expr_nargs (expr); | |
713 | VEC(tree, heap) *args = NULL; | |
714 | gimple new_stmt; | |
715 | ||
716 | if (nargs > 0) | |
717 | { | |
718 | args = VEC_alloc (tree, heap, nargs); | |
719 | VEC_safe_grow (tree, heap, args, nargs); | |
b8698a0f | 720 | |
726a989a RB |
721 | for (i = 0; i < nargs; i++) |
722 | VEC_replace (tree, args, i, CALL_EXPR_ARG (expr, i)); | |
723 | } | |
724 | ||
725 | new_stmt = gimple_build_call_vec (fn, args); | |
726 | gimple_call_set_lhs (new_stmt, lhs); | |
726a989a | 727 | move_ssa_defining_stmt_for_defs (new_stmt, stmt); |
5006671f RG |
728 | gimple_set_vuse (new_stmt, gimple_vuse (stmt)); |
729 | gimple_set_vdef (new_stmt, gimple_vdef (stmt)); | |
726a989a RB |
730 | gimple_set_location (new_stmt, gimple_location (stmt)); |
731 | gsi_replace (si_p, new_stmt, false); | |
732 | VEC_free (tree, heap, args); | |
733 | ||
734 | return true; | |
735 | } | |
736 | else if (valid_gimple_rhs_p (expr)) | |
737 | { | |
738 | gimple new_stmt; | |
739 | ||
740 | /* The call has simplified to an expression | |
741 | that cannot be represented as a GIMPLE_CALL. */ | |
742 | if (lhs) | |
743 | { | |
744 | /* A value is expected. | |
745 | Introduce a new GIMPLE_ASSIGN statement. */ | |
746 | STRIP_USELESS_TYPE_CONVERSION (expr); | |
747 | new_stmt = gimple_build_assign (lhs, expr); | |
726a989a | 748 | move_ssa_defining_stmt_for_defs (new_stmt, stmt); |
5006671f RG |
749 | gimple_set_vuse (new_stmt, gimple_vuse (stmt)); |
750 | gimple_set_vdef (new_stmt, gimple_vdef (stmt)); | |
726a989a RB |
751 | } |
752 | else if (!TREE_SIDE_EFFECTS (expr)) | |
753 | { | |
754 | /* No value is expected, and EXPR has no effect. | |
755 | Replace it with an empty statement. */ | |
756 | new_stmt = gimple_build_nop (); | |
5006671f RG |
757 | unlink_stmt_vdef (stmt); |
758 | release_defs (stmt); | |
726a989a RB |
759 | } |
760 | else | |
761 | { | |
762 | /* No value is expected, but EXPR has an effect, | |
763 | e.g., it could be a reference to a volatile | |
764 | variable. Create an assignment statement | |
765 | with a dummy (unused) lhs variable. */ | |
766 | STRIP_USELESS_TYPE_CONVERSION (expr); | |
767 | lhs = create_tmp_var (TREE_TYPE (expr), NULL); | |
768 | new_stmt = gimple_build_assign (lhs, expr); | |
769 | add_referenced_var (lhs); | |
770 | lhs = make_ssa_name (lhs, new_stmt); | |
771 | gimple_assign_set_lhs (new_stmt, lhs); | |
5006671f RG |
772 | gimple_set_vuse (new_stmt, gimple_vuse (stmt)); |
773 | gimple_set_vdef (new_stmt, gimple_vdef (stmt)); | |
726a989a RB |
774 | move_ssa_defining_stmt_for_defs (new_stmt, stmt); |
775 | } | |
776 | gimple_set_location (new_stmt, gimple_location (stmt)); | |
777 | gsi_replace (si_p, new_stmt, false); | |
778 | return true; | |
779 | } | |
780 | else | |
781 | /* The call simplified to an expression that is | |
782 | not a valid GIMPLE RHS. */ | |
783 | return false; | |
750628d8 DN |
784 | } |
785 | ||
786 | ||
787 | /* Entry point to the propagation engine. | |
788 | ||
789 | VISIT_STMT is called for every statement visited. | |
790 | VISIT_PHI is called for every PHI node visited. */ | |
791 | ||
792 | void | |
793 | ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt, | |
794 | ssa_prop_visit_phi_fn visit_phi) | |
795 | { | |
796 | ssa_prop_visit_stmt = visit_stmt; | |
797 | ssa_prop_visit_phi = visit_phi; | |
798 | ||
799 | ssa_prop_init (); | |
800 | ||
801 | /* Iterate until the worklists are empty. */ | |
b8698a0f | 802 | while (!cfg_blocks_empty_p () |
726a989a RB |
803 | || VEC_length (gimple, interesting_ssa_edges) > 0 |
804 | || VEC_length (gimple, varying_ssa_edges) > 0) | |
750628d8 DN |
805 | { |
806 | if (!cfg_blocks_empty_p ()) | |
807 | { | |
808 | /* Pull the next block to simulate off the worklist. */ | |
809 | basic_block dest_block = cfg_blocks_get (); | |
810 | simulate_block (dest_block); | |
811 | } | |
812 | ||
813 | /* In order to move things to varying as quickly as | |
814 | possible,process the VARYING_SSA_EDGES worklist first. */ | |
815 | process_ssa_edge_worklist (&varying_ssa_edges); | |
816 | ||
817 | /* Now process the INTERESTING_SSA_EDGES worklist. */ | |
818 | process_ssa_edge_worklist (&interesting_ssa_edges); | |
819 | } | |
820 | ||
821 | ssa_prop_fini (); | |
822 | } | |
823 | ||
0bca51f0 | 824 | |
0bca51f0 DN |
825 | /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref' |
826 | is a non-volatile pointer dereference, a structure reference or a | |
827 | reference to a single _DECL. Ignore volatile memory references | |
828 | because they are not interesting for the optimizers. */ | |
829 | ||
830 | bool | |
726a989a | 831 | stmt_makes_single_store (gimple stmt) |
0bca51f0 DN |
832 | { |
833 | tree lhs; | |
834 | ||
726a989a RB |
835 | if (gimple_code (stmt) != GIMPLE_ASSIGN |
836 | && gimple_code (stmt) != GIMPLE_CALL) | |
0bca51f0 DN |
837 | return false; |
838 | ||
5006671f | 839 | if (!gimple_vdef (stmt)) |
0bca51f0 DN |
840 | return false; |
841 | ||
726a989a RB |
842 | lhs = gimple_get_lhs (stmt); |
843 | ||
844 | /* A call statement may have a null LHS. */ | |
845 | if (!lhs) | |
846 | return false; | |
0bca51f0 DN |
847 | |
848 | return (!TREE_THIS_VOLATILE (lhs) | |
849 | && (DECL_P (lhs) | |
7da4bf7d | 850 | || REFERENCE_CLASS_P (lhs))); |
0bca51f0 DN |
851 | } |
852 | ||
853 | ||
0bca51f0 DN |
854 | /* Propagation statistics. */ |
855 | struct prop_stats_d | |
856 | { | |
857 | long num_const_prop; | |
858 | long num_copy_prop; | |
ff7ffb8f | 859 | long num_stmts_folded; |
9fe0cb7d | 860 | long num_dce; |
0bca51f0 DN |
861 | }; |
862 | ||
863 | static struct prop_stats_d prop_stats; | |
864 | ||
865 | /* Replace USE references in statement STMT with the values stored in | |
726a989a | 866 | PROP_VALUE. Return true if at least one reference was replaced. */ |
0bca51f0 | 867 | |
726a989a RB |
868 | static bool |
869 | replace_uses_in (gimple stmt, prop_value_t *prop_value) | |
0bca51f0 DN |
870 | { |
871 | bool replaced = false; | |
872 | use_operand_p use; | |
873 | ssa_op_iter iter; | |
874 | ||
875 | FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE) | |
876 | { | |
877 | tree tuse = USE_FROM_PTR (use); | |
878 | tree val = prop_value[SSA_NAME_VERSION (tuse)].value; | |
879 | ||
880 | if (val == tuse || val == NULL_TREE) | |
881 | continue; | |
882 | ||
726a989a | 883 | if (gimple_code (stmt) == GIMPLE_ASM |
0bca51f0 DN |
884 | && !may_propagate_copy_into_asm (tuse)) |
885 | continue; | |
886 | ||
887 | if (!may_propagate_copy (tuse, val)) | |
888 | continue; | |
889 | ||
890 | if (TREE_CODE (val) != SSA_NAME) | |
891 | prop_stats.num_const_prop++; | |
892 | else | |
893 | prop_stats.num_copy_prop++; | |
894 | ||
895 | propagate_value (use, val); | |
896 | ||
897 | replaced = true; | |
0bca51f0 DN |
898 | } |
899 | ||
900 | return replaced; | |
901 | } | |
902 | ||
903 | ||
0bca51f0 DN |
904 | /* Replace propagated values into all the arguments for PHI using the |
905 | values from PROP_VALUE. */ | |
906 | ||
907 | static void | |
726a989a | 908 | replace_phi_args_in (gimple phi, prop_value_t *prop_value) |
0bca51f0 | 909 | { |
726a989a | 910 | size_t i; |
227858d1 | 911 | bool replaced = false; |
227858d1 DN |
912 | |
913 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
726a989a RB |
914 | { |
915 | fprintf (dump_file, "Folding PHI node: "); | |
916 | print_gimple_stmt (dump_file, phi, 0, TDF_SLIM); | |
917 | } | |
0bca51f0 | 918 | |
726a989a | 919 | for (i = 0; i < gimple_phi_num_args (phi); i++) |
0bca51f0 | 920 | { |
726a989a | 921 | tree arg = gimple_phi_arg_def (phi, i); |
0bca51f0 DN |
922 | |
923 | if (TREE_CODE (arg) == SSA_NAME) | |
924 | { | |
925 | tree val = prop_value[SSA_NAME_VERSION (arg)].value; | |
926 | ||
927 | if (val && val != arg && may_propagate_copy (arg, val)) | |
928 | { | |
929 | if (TREE_CODE (val) != SSA_NAME) | |
930 | prop_stats.num_const_prop++; | |
931 | else | |
932 | prop_stats.num_copy_prop++; | |
933 | ||
934 | propagate_value (PHI_ARG_DEF_PTR (phi, i), val); | |
227858d1 | 935 | replaced = true; |
0bca51f0 DN |
936 | |
937 | /* If we propagated a copy and this argument flows | |
938 | through an abnormal edge, update the replacement | |
939 | accordingly. */ | |
940 | if (TREE_CODE (val) == SSA_NAME | |
726a989a | 941 | && gimple_phi_arg_edge (phi, i)->flags & EDGE_ABNORMAL) |
0bca51f0 DN |
942 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1; |
943 | } | |
944 | } | |
945 | } | |
b8698a0f | 946 | |
726a989a | 947 | if (dump_file && (dump_flags & TDF_DETAILS)) |
227858d1 | 948 | { |
726a989a RB |
949 | if (!replaced) |
950 | fprintf (dump_file, "No folding possible\n"); | |
951 | else | |
952 | { | |
953 | fprintf (dump_file, "Folded into: "); | |
954 | print_gimple_stmt (dump_file, phi, 0, TDF_SLIM); | |
955 | fprintf (dump_file, "\n"); | |
956 | } | |
227858d1 DN |
957 | } |
958 | } | |
959 | ||
960 | ||
227858d1 DN |
961 | /* Perform final substitution and folding of propagated values. |
962 | ||
963 | PROP_VALUE[I] contains the single value that should be substituted | |
964 | at every use of SSA name N_I. If PROP_VALUE is NULL, no values are | |
965 | substituted. | |
966 | ||
ff7ffb8f RG |
967 | If FOLD_FN is non-NULL the function will be invoked on all statements |
968 | before propagating values for pass specific simplification. | |
0bca51f0 | 969 | |
3253eafb JH |
970 | Return TRUE when something changed. */ |
971 | ||
972 | bool | |
ff7ffb8f | 973 | substitute_and_fold (prop_value_t *prop_value, ssa_prop_fold_stmt_fn fold_fn) |
0bca51f0 DN |
974 | { |
975 | basic_block bb; | |
3253eafb | 976 | bool something_changed = false; |
0bca51f0 | 977 | |
ff7ffb8f | 978 | if (prop_value == NULL && !fold_fn) |
3253eafb | 979 | return false; |
227858d1 | 980 | |
0bca51f0 | 981 | if (dump_file && (dump_flags & TDF_DETAILS)) |
726a989a | 982 | fprintf (dump_file, "\nSubstituting values and folding statements\n\n"); |
0bca51f0 DN |
983 | |
984 | memset (&prop_stats, 0, sizeof (prop_stats)); | |
985 | ||
986 | /* Substitute values in every statement of every basic block. */ | |
987 | FOR_EACH_BB (bb) | |
988 | { | |
726a989a | 989 | gimple_stmt_iterator i; |
0bca51f0 | 990 | |
227858d1 DN |
991 | /* Propagate known values into PHI nodes. */ |
992 | if (prop_value) | |
726a989a RB |
993 | for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i)) |
994 | replace_phi_args_in (gsi_stmt (i), prop_value); | |
0bca51f0 | 995 | |
3bb3bb2d RG |
996 | /* Propagate known values into stmts. Do a backward walk to expose |
997 | more trivially deletable stmts. */ | |
726a989a | 998 | for (i = gsi_last_bb (bb); !gsi_end_p (i);) |
0bca51f0 | 999 | { |
726a989a RB |
1000 | bool did_replace; |
1001 | gimple stmt = gsi_stmt (i); | |
30821654 | 1002 | gimple old_stmt; |
726a989a | 1003 | enum gimple_code code = gimple_code (stmt); |
44e10129 MM |
1004 | gimple_stmt_iterator oldi; |
1005 | ||
1006 | oldi = i; | |
1007 | gsi_prev (&i); | |
0bca51f0 | 1008 | |
227858d1 DN |
1009 | /* Ignore ASSERT_EXPRs. They are used by VRP to generate |
1010 | range information for names and they are discarded | |
1011 | afterwards. */ | |
726a989a RB |
1012 | |
1013 | if (code == GIMPLE_ASSIGN | |
1014 | && TREE_CODE (gimple_assign_rhs1 (stmt)) == ASSERT_EXPR) | |
44e10129 | 1015 | continue; |
3bb3bb2d RG |
1016 | |
1017 | /* No point propagating into a stmt whose result is not used, | |
1018 | but instead we might be able to remove a trivially dead stmt. */ | |
726a989a RB |
1019 | if (gimple_get_lhs (stmt) |
1020 | && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME | |
1021 | && has_zero_uses (gimple_get_lhs (stmt)) | |
1022 | && !stmt_could_throw_p (stmt) | |
1023 | && !gimple_has_side_effects (stmt)) | |
3bb3bb2d | 1024 | { |
726a989a RB |
1025 | gimple_stmt_iterator i2; |
1026 | ||
3bb3bb2d RG |
1027 | if (dump_file && dump_flags & TDF_DETAILS) |
1028 | { | |
1029 | fprintf (dump_file, "Removing dead stmt "); | |
726a989a | 1030 | print_gimple_stmt (dump_file, stmt, 0, 0); |
3bb3bb2d RG |
1031 | fprintf (dump_file, "\n"); |
1032 | } | |
9fe0cb7d | 1033 | prop_stats.num_dce++; |
726a989a RB |
1034 | i2 = gsi_for_stmt (stmt); |
1035 | gsi_remove (&i2, true); | |
3bb3bb2d | 1036 | release_defs (stmt); |
3bb3bb2d RG |
1037 | continue; |
1038 | } | |
227858d1 | 1039 | |
0bca51f0 DN |
1040 | /* Replace the statement with its folded version and mark it |
1041 | folded. */ | |
227858d1 | 1042 | did_replace = false; |
0bca51f0 | 1043 | if (dump_file && (dump_flags & TDF_DETAILS)) |
726a989a RB |
1044 | { |
1045 | fprintf (dump_file, "Folding statement: "); | |
1046 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); | |
1047 | } | |
227858d1 | 1048 | |
ff7ffb8f RG |
1049 | old_stmt = stmt; |
1050 | ||
1051 | /* Some statements may be simplified using propagator | |
1052 | specific information. Do this before propagating | |
1053 | into the stmt to not disturb pass specific information. */ | |
1054 | if (fold_fn | |
44e10129 | 1055 | && (*fold_fn)(&oldi)) |
726a989a | 1056 | { |
ff7ffb8f RG |
1057 | did_replace = true; |
1058 | prop_stats.num_stmts_folded++; | |
726a989a | 1059 | } |
227858d1 | 1060 | |
dce2b2f6 RG |
1061 | /* Only replace real uses if we couldn't fold the |
1062 | statement using value range information. */ | |
1063 | if (prop_value | |
1064 | && !did_replace) | |
1065 | did_replace |= replace_uses_in (stmt, prop_value); | |
0bca51f0 | 1066 | |
30821654 | 1067 | /* If we made a replacement, fold the statement. */ |
0bca51f0 | 1068 | if (did_replace) |
44e10129 | 1069 | fold_stmt (&oldi); |
30821654 | 1070 | |
30821654 PB |
1071 | /* Now cleanup. */ |
1072 | if (did_replace) | |
1073 | { | |
44e10129 | 1074 | stmt = gsi_stmt (oldi); |
0bca51f0 | 1075 | |
0bca51f0 DN |
1076 | /* If we cleaned up EH information from the statement, |
1077 | remove EH edges. */ | |
af47810a | 1078 | if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)) |
726a989a RB |
1079 | gimple_purge_dead_eh_edges (bb); |
1080 | ||
1081 | if (is_gimple_assign (stmt) | |
1082 | && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) | |
1083 | == GIMPLE_SINGLE_RHS)) | |
1084 | { | |
1085 | tree rhs = gimple_assign_rhs1 (stmt); | |
b8698a0f | 1086 | |
726a989a RB |
1087 | if (TREE_CODE (rhs) == ADDR_EXPR) |
1088 | recompute_tree_invariant_for_addr_expr (rhs); | |
1089 | } | |
cfaab3a9 DN |
1090 | |
1091 | /* Determine what needs to be done to update the SSA form. */ | |
cff4e50d | 1092 | update_stmt (stmt); |
b5b8b0ac AO |
1093 | if (!is_gimple_debug (stmt)) |
1094 | something_changed = true; | |
cfaab3a9 | 1095 | } |
726a989a RB |
1096 | |
1097 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1098 | { | |
1099 | if (did_replace) | |
1100 | { | |
1101 | fprintf (dump_file, "Folded into: "); | |
1102 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); | |
1103 | fprintf (dump_file, "\n"); | |
1104 | } | |
1105 | else | |
1106 | fprintf (dump_file, "Not folded\n"); | |
0bca51f0 DN |
1107 | } |
1108 | } | |
1109 | } | |
1110 | ||
9fe0cb7d RG |
1111 | statistics_counter_event (cfun, "Constants propagated", |
1112 | prop_stats.num_const_prop); | |
1113 | statistics_counter_event (cfun, "Copies propagated", | |
1114 | prop_stats.num_copy_prop); | |
ff7ffb8f RG |
1115 | statistics_counter_event (cfun, "Statements folded", |
1116 | prop_stats.num_stmts_folded); | |
9fe0cb7d RG |
1117 | statistics_counter_event (cfun, "Statements deleted", |
1118 | prop_stats.num_dce); | |
3253eafb | 1119 | return something_changed; |
0bca51f0 | 1120 | } |
227858d1 | 1121 | |
750628d8 | 1122 | #include "gt-tree-ssa-propagate.h" |