]>
Commit | Line | Data |
---|---|---|
750628d8 | 1 | /* Generic SSA value propagation engine. |
8d9254fc | 2 | Copyright (C) 2004-2020 Free Software Foundation, Inc. |
750628d8 DN |
3 | Contributed by Diego Novillo <dnovillo@redhat.com> |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it | |
8 | under the terms of the GNU General Public License as published by the | |
9dcd6f09 | 9 | Free Software Foundation; either version 3, or (at your option) any |
750628d8 DN |
10 | later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT | |
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
750628d8 DN |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
c7131fb2 | 24 | #include "backend.h" |
750628d8 | 25 | #include "tree.h" |
c7131fb2 | 26 | #include "gimple.h" |
c7131fb2 | 27 | #include "ssa.h" |
957060b5 | 28 | #include "gimple-pretty-print.h" |
7ee2468b | 29 | #include "dumpfile.h" |
2fb9a547 AM |
30 | #include "gimple-fold.h" |
31 | #include "tree-eh.h" | |
45b0be94 | 32 | #include "gimplify.h" |
5be5c238 | 33 | #include "gimple-iterator.h" |
442b4905 | 34 | #include "tree-cfg.h" |
7a300452 | 35 | #include "tree-ssa.h" |
750628d8 | 36 | #include "tree-ssa-propagate.h" |
ec18e2eb | 37 | #include "domwalk.h" |
31935398 | 38 | #include "cfgloop.h" |
2a5671ee | 39 | #include "tree-cfgcleanup.h" |
663eecfd | 40 | #include "cfganal.h" |
750628d8 DN |
41 | |
42 | /* This file implements a generic value propagation engine based on | |
43 | the same propagation used by the SSA-CCP algorithm [1]. | |
44 | ||
45 | Propagation is performed by simulating the execution of every | |
46 | statement that produces the value being propagated. Simulation | |
47 | proceeds as follows: | |
48 | ||
49 | 1- Initially, all edges of the CFG are marked not executable and | |
766ff1b1 | 50 | the CFG worklist is seeded with all the statements in the entry |
750628d8 DN |
51 | basic block (block 0). |
52 | ||
53 | 2- Every statement S is simulated with a call to the call-back | |
54 | function SSA_PROP_VISIT_STMT. This evaluation may produce 3 | |
55 | results: | |
56 | ||
57 | SSA_PROP_NOT_INTERESTING: Statement S produces nothing of | |
58 | interest and does not affect any of the work lists. | |
e0144c7b AH |
59 | The statement may be simulated again if any of its input |
60 | operands change in future iterations of the simulator. | |
750628d8 DN |
61 | |
62 | SSA_PROP_VARYING: The value produced by S cannot be determined | |
63 | at compile time. Further simulation of S is not required. | |
64 | If S is a conditional jump, all the outgoing edges for the | |
65 | block are considered executable and added to the work | |
66 | list. | |
67 | ||
68 | SSA_PROP_INTERESTING: S produces a value that can be computed | |
69 | at compile time. Its result can be propagated into the | |
2a7e31df | 70 | statements that feed from S. Furthermore, if S is a |
750628d8 DN |
71 | conditional jump, only the edge known to be taken is added |
72 | to the work list. Edges that are known not to execute are | |
73 | never simulated. | |
74 | ||
75 | 3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI. The | |
76 | return value from SSA_PROP_VISIT_PHI has the same semantics as | |
766ff1b1 | 77 | described in #2. |
750628d8 DN |
78 | |
79 | 4- Three work lists are kept. Statements are only added to these | |
80 | lists if they produce one of SSA_PROP_INTERESTING or | |
81 | SSA_PROP_VARYING. | |
82 | ||
83 | CFG_BLOCKS contains the list of blocks to be simulated. | |
84 | Blocks are added to this list if their incoming edges are | |
85 | found executable. | |
86 | ||
663eecfd RB |
87 | SSA_EDGE_WORKLIST contains the list of statements that we |
88 | need to revisit. | |
750628d8 DN |
89 | |
90 | 5- Simulation terminates when all three work lists are drained. | |
91 | ||
92 | Before calling ssa_propagate, it is important to clear | |
726a989a | 93 | prop_simulate_again_p for all the statements in the program that |
750628d8 DN |
94 | should be simulated. This initialization allows an implementation |
95 | to specify which statements should never be simulated. | |
96 | ||
97 | It is also important to compute def-use information before calling | |
98 | ssa_propagate. | |
99 | ||
100 | References: | |
101 | ||
102 | [1] Constant propagation with conditional branches, | |
103 | Wegman and Zadeck, ACM TOPLAS 13(2):181-210. | |
104 | ||
105 | [2] Building an Optimizing Compiler, | |
106 | Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9. | |
107 | ||
108 | [3] Advanced Compiler Design and Implementation, | |
109 | Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */ | |
110 | ||
f48bd5e4 | 111 | /* Worklists of control flow edge destinations. This contains |
663eecfd | 112 | the CFG order number of the blocks so we can iterate in CFG |
f48bd5e4 RB |
113 | order by visiting in bit-order. We use two worklists to |
114 | first make forward progress before iterating. */ | |
663eecfd | 115 | static bitmap cfg_blocks; |
f48bd5e4 | 116 | static bitmap cfg_blocks_back; |
663eecfd RB |
117 | static int *bb_to_cfg_order; |
118 | static int *cfg_order_to_bb; | |
750628d8 | 119 | |
f48bd5e4 | 120 | /* Worklists of SSA edges which will need reexamination as their |
750628d8 DN |
121 | definition has changed. SSA edges are def-use edges in the SSA |
122 | web. For each D-U edge, we store the target statement or PHI node | |
f48bd5e4 RB |
123 | UID in a bitmap. UIDs order stmts in execution order. We use |
124 | two worklists to first make forward progress before iterating. */ | |
663eecfd | 125 | static bitmap ssa_edge_worklist; |
f48bd5e4 | 126 | static bitmap ssa_edge_worklist_back; |
663eecfd | 127 | static vec<gimple *> uid_to_stmt; |
750628d8 | 128 | |
f48bd5e4 RB |
129 | /* Current RPO index in the iteration. */ |
130 | static int curr_order; | |
750628d8 DN |
131 | |
132 | ||
133 | /* We have just defined a new value for VAR. If IS_VARYING is true, | |
134 | add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add | |
135 | them to INTERESTING_SSA_EDGES. */ | |
136 | ||
137 | static void | |
663eecfd | 138 | add_ssa_edge (tree var) |
750628d8 | 139 | { |
f430bae8 AM |
140 | imm_use_iterator iter; |
141 | use_operand_p use_p; | |
750628d8 | 142 | |
f430bae8 | 143 | FOR_EACH_IMM_USE_FAST (use_p, iter, var) |
750628d8 | 144 | { |
355fe088 | 145 | gimple *use_stmt = USE_STMT (use_p); |
09068087 RB |
146 | if (!prop_simulate_again_p (use_stmt)) |
147 | continue; | |
750628d8 | 148 | |
663eecfd RB |
149 | /* If we did not yet simulate the block wait for this to happen |
150 | and do not add the stmt to the SSA edge worklist. */ | |
09068087 | 151 | basic_block use_bb = gimple_bb (use_stmt); |
e78c3eb3 RB |
152 | if (! (use_bb->flags & BB_VISITED)) |
153 | continue; | |
154 | ||
155 | /* If this is a use on a not yet executable edge do not bother to | |
156 | queue it. */ | |
157 | if (gimple_code (use_stmt) == GIMPLE_PHI | |
158 | && !(EDGE_PRED (use_bb, PHI_ARG_INDEX_FROM_USE (use_p))->flags | |
159 | & EDGE_EXECUTABLE)) | |
663eecfd RB |
160 | continue; |
161 | ||
f48bd5e4 RB |
162 | bitmap worklist; |
163 | if (bb_to_cfg_order[gimple_bb (use_stmt)->index] < curr_order) | |
164 | worklist = ssa_edge_worklist_back; | |
165 | else | |
166 | worklist = ssa_edge_worklist; | |
167 | if (bitmap_set_bit (worklist, gimple_uid (use_stmt))) | |
750628d8 | 168 | { |
663eecfd RB |
169 | uid_to_stmt[gimple_uid (use_stmt)] = use_stmt; |
170 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
970bb2de | 171 | { |
663eecfd RB |
172 | fprintf (dump_file, "ssa_edge_worklist: adding SSA use in "); |
173 | print_gimple_stmt (dump_file, use_stmt, 0, TDF_SLIM); | |
970bb2de | 174 | } |
750628d8 DN |
175 | } |
176 | } | |
177 | } | |
178 | ||
179 | ||
180 | /* Add edge E to the control flow worklist. */ | |
181 | ||
182 | static void | |
183 | add_control_edge (edge e) | |
184 | { | |
185 | basic_block bb = e->dest; | |
fefa31b5 | 186 | if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)) |
750628d8 DN |
187 | return; |
188 | ||
189 | /* If the edge had already been executed, skip it. */ | |
190 | if (e->flags & EDGE_EXECUTABLE) | |
191 | return; | |
192 | ||
193 | e->flags |= EDGE_EXECUTABLE; | |
194 | ||
f48bd5e4 RB |
195 | int bb_order = bb_to_cfg_order[bb->index]; |
196 | if (bb_order < curr_order) | |
197 | bitmap_set_bit (cfg_blocks_back, bb_order); | |
198 | else | |
199 | bitmap_set_bit (cfg_blocks, bb_order); | |
750628d8 DN |
200 | |
201 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
970bb2de | 202 | fprintf (dump_file, "Adding destination of edge (%d -> %d) to worklist\n", |
750628d8 DN |
203 | e->src->index, e->dest->index); |
204 | } | |
205 | ||
206 | ||
207 | /* Simulate the execution of STMT and update the work lists accordingly. */ | |
208 | ||
d9a3704a JL |
209 | void |
210 | ssa_propagation_engine::simulate_stmt (gimple *stmt) | |
750628d8 DN |
211 | { |
212 | enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING; | |
213 | edge taken_edge = NULL; | |
214 | tree output_name = NULL_TREE; | |
215 | ||
663eecfd RB |
216 | /* Pull the stmt off the SSA edge worklist. */ |
217 | bitmap_clear_bit (ssa_edge_worklist, gimple_uid (stmt)); | |
218 | ||
750628d8 DN |
219 | /* Don't bother visiting statements that are already |
220 | considered varying by the propagator. */ | |
726a989a | 221 | if (!prop_simulate_again_p (stmt)) |
750628d8 DN |
222 | return; |
223 | ||
726a989a | 224 | if (gimple_code (stmt) == GIMPLE_PHI) |
750628d8 | 225 | { |
d9a3704a | 226 | val = visit_phi (as_a <gphi *> (stmt)); |
726a989a | 227 | output_name = gimple_phi_result (stmt); |
750628d8 DN |
228 | } |
229 | else | |
d9a3704a | 230 | val = visit_stmt (stmt, &taken_edge, &output_name); |
750628d8 DN |
231 | |
232 | if (val == SSA_PROP_VARYING) | |
233 | { | |
726a989a | 234 | prop_set_simulate_again (stmt, false); |
750628d8 DN |
235 | |
236 | /* If the statement produced a new varying value, add the SSA | |
237 | edges coming out of OUTPUT_NAME. */ | |
238 | if (output_name) | |
663eecfd | 239 | add_ssa_edge (output_name); |
750628d8 DN |
240 | |
241 | /* If STMT transfers control out of its basic block, add | |
242 | all outgoing edges to the work list. */ | |
243 | if (stmt_ends_bb_p (stmt)) | |
244 | { | |
245 | edge e; | |
628f6a4e | 246 | edge_iterator ei; |
726a989a | 247 | basic_block bb = gimple_bb (stmt); |
628f6a4e | 248 | FOR_EACH_EDGE (e, ei, bb->succs) |
750628d8 DN |
249 | add_control_edge (e); |
250 | } | |
8a474dc5 | 251 | return; |
750628d8 DN |
252 | } |
253 | else if (val == SSA_PROP_INTERESTING) | |
254 | { | |
255 | /* If the statement produced new value, add the SSA edges coming | |
256 | out of OUTPUT_NAME. */ | |
257 | if (output_name) | |
663eecfd | 258 | add_ssa_edge (output_name); |
750628d8 DN |
259 | |
260 | /* If we know which edge is going to be taken out of this block, | |
261 | add it to the CFG work list. */ | |
262 | if (taken_edge) | |
263 | add_control_edge (taken_edge); | |
264 | } | |
8a474dc5 RB |
265 | |
266 | /* If there are no SSA uses on the stmt whose defs are simulated | |
267 | again then this stmt will be never visited again. */ | |
268 | bool has_simulate_again_uses = false; | |
269 | use_operand_p use_p; | |
270 | ssa_op_iter iter; | |
271 | if (gimple_code (stmt) == GIMPLE_PHI) | |
272 | { | |
273 | edge_iterator ei; | |
274 | edge e; | |
275 | tree arg; | |
276 | FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->preds) | |
277 | if (!(e->flags & EDGE_EXECUTABLE) | |
278 | || ((arg = PHI_ARG_DEF_FROM_EDGE (stmt, e)) | |
279 | && TREE_CODE (arg) == SSA_NAME | |
280 | && !SSA_NAME_IS_DEFAULT_DEF (arg) | |
281 | && prop_simulate_again_p (SSA_NAME_DEF_STMT (arg)))) | |
282 | { | |
283 | has_simulate_again_uses = true; | |
284 | break; | |
285 | } | |
286 | } | |
287 | else | |
288 | FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE) | |
289 | { | |
355fe088 | 290 | gimple *def_stmt = SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p)); |
8a474dc5 RB |
291 | if (!gimple_nop_p (def_stmt) |
292 | && prop_simulate_again_p (def_stmt)) | |
293 | { | |
294 | has_simulate_again_uses = true; | |
295 | break; | |
296 | } | |
297 | } | |
298 | if (!has_simulate_again_uses) | |
299 | { | |
300 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
301 | fprintf (dump_file, "marking stmt to be not simulated again\n"); | |
302 | prop_set_simulate_again (stmt, false); | |
303 | } | |
750628d8 DN |
304 | } |
305 | ||
750628d8 DN |
306 | |
307 | /* Simulate the execution of BLOCK. Evaluate the statement associated | |
308 | with each variable reference inside the block. */ | |
309 | ||
d9a3704a JL |
310 | void |
311 | ssa_propagation_engine::simulate_block (basic_block block) | |
750628d8 | 312 | { |
726a989a | 313 | gimple_stmt_iterator gsi; |
750628d8 DN |
314 | |
315 | /* There is nothing to do for the exit block. */ | |
fefa31b5 | 316 | if (block == EXIT_BLOCK_PTR_FOR_FN (cfun)) |
750628d8 DN |
317 | return; |
318 | ||
319 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
320 | fprintf (dump_file, "\nSimulating block %d\n", block->index); | |
321 | ||
322 | /* Always simulate PHI nodes, even if we have simulated this block | |
323 | before. */ | |
726a989a RB |
324 | for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi)) |
325 | simulate_stmt (gsi_stmt (gsi)); | |
750628d8 DN |
326 | |
327 | /* If this is the first time we've simulated this block, then we | |
328 | must simulate each of its statements. */ | |
663eecfd | 329 | if (! (block->flags & BB_VISITED)) |
750628d8 | 330 | { |
726a989a | 331 | gimple_stmt_iterator j; |
750628d8 DN |
332 | unsigned int normal_edge_count; |
333 | edge e, normal_edge; | |
628f6a4e | 334 | edge_iterator ei; |
750628d8 | 335 | |
726a989a | 336 | for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j)) |
663eecfd | 337 | simulate_stmt (gsi_stmt (j)); |
750628d8 | 338 | |
663eecfd RB |
339 | /* Note that we have simulated this block. */ |
340 | block->flags |= BB_VISITED; | |
750628d8 | 341 | |
67914693 | 342 | /* We cannot predict when abnormal and EH edges will be executed, so |
750628d8 DN |
343 | once a block is considered executable, we consider any |
344 | outgoing abnormal edges as executable. | |
345 | ||
496a4ef5 JH |
346 | TODO: This is not exactly true. Simplifying statement might |
347 | prove it non-throwing and also computed goto can be handled | |
348 | when destination is known. | |
349 | ||
750628d8 DN |
350 | At the same time, if this block has only one successor that is |
351 | reached by non-abnormal edges, then add that successor to the | |
352 | worklist. */ | |
353 | normal_edge_count = 0; | |
354 | normal_edge = NULL; | |
628f6a4e | 355 | FOR_EACH_EDGE (e, ei, block->succs) |
750628d8 | 356 | { |
496a4ef5 | 357 | if (e->flags & (EDGE_ABNORMAL | EDGE_EH)) |
750628d8 DN |
358 | add_control_edge (e); |
359 | else | |
360 | { | |
361 | normal_edge_count++; | |
362 | normal_edge = e; | |
363 | } | |
364 | } | |
365 | ||
366 | if (normal_edge_count == 1) | |
367 | add_control_edge (normal_edge); | |
368 | } | |
369 | } | |
370 | ||
371 | ||
372 | /* Initialize local data structures and work lists. */ | |
373 | ||
374 | static void | |
375 | ssa_prop_init (void) | |
376 | { | |
377 | edge e; | |
628f6a4e | 378 | edge_iterator ei; |
750628d8 DN |
379 | basic_block bb; |
380 | ||
381 | /* Worklists of SSA edges. */ | |
663eecfd | 382 | ssa_edge_worklist = BITMAP_ALLOC (NULL); |
f48bd5e4 | 383 | ssa_edge_worklist_back = BITMAP_ALLOC (NULL); |
d1e14d97 SB |
384 | bitmap_tree_view (ssa_edge_worklist); |
385 | bitmap_tree_view (ssa_edge_worklist_back); | |
750628d8 | 386 | |
663eecfd RB |
387 | /* Worklist of basic-blocks. */ |
388 | bb_to_cfg_order = XNEWVEC (int, last_basic_block_for_fn (cfun) + 1); | |
389 | cfg_order_to_bb = XNEWVEC (int, n_basic_blocks_for_fn (cfun)); | |
f4eec0a3 RB |
390 | int n = pre_and_rev_post_order_compute_fn (cfun, NULL, |
391 | cfg_order_to_bb, false); | |
663eecfd RB |
392 | for (int i = 0; i < n; ++i) |
393 | bb_to_cfg_order[cfg_order_to_bb[i]] = i; | |
394 | cfg_blocks = BITMAP_ALLOC (NULL); | |
f48bd5e4 | 395 | cfg_blocks_back = BITMAP_ALLOC (NULL); |
750628d8 | 396 | |
0bca51f0 | 397 | /* Initially assume that every edge in the CFG is not executable. |
663eecfd RB |
398 | (including the edges coming out of the entry block). Mark blocks |
399 | as not visited, blocks not yet visited will have all their statements | |
400 | simulated once an incoming edge gets executable. */ | |
401 | set_gimple_stmt_max_uid (cfun, 0); | |
402 | for (int i = 0; i < n; ++i) | |
750628d8 | 403 | { |
726a989a | 404 | gimple_stmt_iterator si; |
663eecfd | 405 | bb = BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb[i]); |
b8698a0f | 406 | |
726a989a | 407 | for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si)) |
663eecfd RB |
408 | { |
409 | gimple *stmt = gsi_stmt (si); | |
410 | gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); | |
411 | } | |
750628d8 | 412 | |
663eecfd RB |
413 | for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si)) |
414 | { | |
415 | gimple *stmt = gsi_stmt (si); | |
416 | gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun)); | |
417 | } | |
418 | ||
f4eec0a3 | 419 | bb->flags &= ~BB_VISITED; |
628f6a4e | 420 | FOR_EACH_EDGE (e, ei, bb->succs) |
750628d8 DN |
421 | e->flags &= ~EDGE_EXECUTABLE; |
422 | } | |
663eecfd | 423 | uid_to_stmt.safe_grow (gimple_stmt_max_uid (cfun)); |
750628d8 DN |
424 | } |
425 | ||
426 | ||
427 | /* Free allocated storage. */ | |
428 | ||
429 | static void | |
430 | ssa_prop_fini (void) | |
431 | { | |
663eecfd | 432 | BITMAP_FREE (cfg_blocks); |
f48bd5e4 | 433 | BITMAP_FREE (cfg_blocks_back); |
663eecfd RB |
434 | free (bb_to_cfg_order); |
435 | free (cfg_order_to_bb); | |
436 | BITMAP_FREE (ssa_edge_worklist); | |
f48bd5e4 | 437 | BITMAP_FREE (ssa_edge_worklist_back); |
663eecfd | 438 | uid_to_stmt.release (); |
750628d8 DN |
439 | } |
440 | ||
441 | ||
726a989a RB |
442 | /* Return true if EXPR is an acceptable right-hand-side for a |
443 | GIMPLE assignment. We validate the entire tree, not just | |
444 | the root node, thus catching expressions that embed complex | |
445 | operands that are not permitted in GIMPLE. This function | |
446 | is needed because the folding routines in fold-const.c | |
447 | may return such expressions in some cases, e.g., an array | |
448 | access with an embedded index addition. It may make more | |
449 | sense to have folding routines that are sensitive to the | |
450 | constraints on GIMPLE operands, rather than abandoning any | |
451 | any attempt to fold if the usual folding turns out to be too | |
452 | aggressive. */ | |
750628d8 DN |
453 | |
454 | bool | |
726a989a | 455 | valid_gimple_rhs_p (tree expr) |
750628d8 | 456 | { |
750628d8 | 457 | enum tree_code code = TREE_CODE (expr); |
750628d8 | 458 | |
5cdc4a26 | 459 | switch (TREE_CODE_CLASS (code)) |
750628d8 | 460 | { |
5cdc4a26 | 461 | case tcc_declaration: |
c2979eaf | 462 | if (!is_gimple_variable (expr)) |
5cdc4a26 RS |
463 | return false; |
464 | break; | |
465 | ||
466 | case tcc_constant: | |
726a989a | 467 | /* All constants are ok. */ |
5cdc4a26 RS |
468 | break; |
469 | ||
5cdc4a26 | 470 | case tcc_comparison: |
b94970bc RB |
471 | /* GENERIC allows comparisons with non-boolean types, reject |
472 | those for GIMPLE. Let vector-typed comparisons pass - rules | |
473 | for GENERIC and GIMPLE are the same here. */ | |
474 | if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr)) | |
475 | && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE | |
476 | || TYPE_PRECISION (TREE_TYPE (expr)) == 1)) | |
477 | && ! VECTOR_TYPE_P (TREE_TYPE (expr))) | |
83ad208e RB |
478 | return false; |
479 | ||
480 | /* Fallthru. */ | |
481 | case tcc_binary: | |
750628d8 DN |
482 | if (!is_gimple_val (TREE_OPERAND (expr, 0)) |
483 | || !is_gimple_val (TREE_OPERAND (expr, 1))) | |
484 | return false; | |
5cdc4a26 RS |
485 | break; |
486 | ||
487 | case tcc_unary: | |
750628d8 DN |
488 | if (!is_gimple_val (TREE_OPERAND (expr, 0))) |
489 | return false; | |
5cdc4a26 RS |
490 | break; |
491 | ||
492 | case tcc_expression: | |
493 | switch (code) | |
726a989a RB |
494 | { |
495 | case ADDR_EXPR: | |
496 | { | |
497 | tree t; | |
498 | if (is_gimple_min_invariant (expr)) | |
499 | return true; | |
500 | t = TREE_OPERAND (expr, 0); | |
501 | while (handled_component_p (t)) | |
502 | { | |
503 | /* ??? More checks needed, see the GIMPLE verifier. */ | |
504 | if ((TREE_CODE (t) == ARRAY_REF | |
505 | || TREE_CODE (t) == ARRAY_RANGE_REF) | |
506 | && !is_gimple_val (TREE_OPERAND (t, 1))) | |
507 | return false; | |
508 | t = TREE_OPERAND (t, 0); | |
509 | } | |
510 | if (!is_gimple_id (t)) | |
511 | return false; | |
512 | } | |
513 | break; | |
5cdc4a26 | 514 | |
5cdc4a26 | 515 | default: |
22d8712a JJ |
516 | if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS) |
517 | { | |
518 | if (((code == VEC_COND_EXPR || code == COND_EXPR) | |
519 | ? !is_gimple_condexpr (TREE_OPERAND (expr, 0)) | |
520 | : !is_gimple_val (TREE_OPERAND (expr, 0))) | |
521 | || !is_gimple_val (TREE_OPERAND (expr, 1)) | |
522 | || !is_gimple_val (TREE_OPERAND (expr, 2))) | |
523 | return false; | |
524 | break; | |
525 | } | |
5cdc4a26 RS |
526 | return false; |
527 | } | |
528 | break; | |
529 | ||
3328fbb7 | 530 | case tcc_vl_exp: |
726a989a | 531 | return false; |
3328fbb7 | 532 | |
5cdc4a26 | 533 | case tcc_exceptional: |
e9d6bd8c MG |
534 | if (code == CONSTRUCTOR) |
535 | { | |
536 | unsigned i; | |
537 | tree elt; | |
538 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt) | |
539 | if (!is_gimple_val (elt)) | |
540 | return false; | |
541 | return true; | |
542 | } | |
726a989a RB |
543 | if (code != SSA_NAME) |
544 | return false; | |
5cdc4a26 RS |
545 | break; |
546 | ||
e9d6bd8c MG |
547 | case tcc_reference: |
548 | if (code == BIT_FIELD_REF) | |
549 | return is_gimple_val (TREE_OPERAND (expr, 0)); | |
550 | return false; | |
551 | ||
5cdc4a26 RS |
552 | default: |
553 | return false; | |
750628d8 DN |
554 | } |
555 | ||
c2979eaf EB |
556 | return true; |
557 | } | |
558 | ||
559 | ||
726a989a RB |
560 | /* Return true if EXPR is a CALL_EXPR suitable for representation |
561 | as a single GIMPLE_CALL statement. If the arguments require | |
562 | further gimplification, return false. */ | |
c2979eaf | 563 | |
523968bf | 564 | static bool |
726a989a | 565 | valid_gimple_call_p (tree expr) |
c2979eaf | 566 | { |
726a989a | 567 | unsigned i, nargs; |
c2979eaf | 568 | |
726a989a | 569 | if (TREE_CODE (expr) != CALL_EXPR) |
c2979eaf EB |
570 | return false; |
571 | ||
726a989a RB |
572 | nargs = call_expr_nargs (expr); |
573 | for (i = 0; i < nargs; i++) | |
523968bf RG |
574 | { |
575 | tree arg = CALL_EXPR_ARG (expr, i); | |
0c6b087c | 576 | if (is_gimple_reg_type (TREE_TYPE (arg))) |
523968bf RG |
577 | { |
578 | if (!is_gimple_val (arg)) | |
579 | return false; | |
580 | } | |
581 | else | |
582 | if (!is_gimple_lvalue (arg)) | |
583 | return false; | |
584 | } | |
37358746 | 585 | |
726a989a RB |
586 | return true; |
587 | } | |
750628d8 | 588 | |
750628d8 | 589 | |
726a989a RB |
590 | /* Make SSA names defined by OLD_STMT point to NEW_STMT |
591 | as their defining statement. */ | |
750628d8 | 592 | |
726a989a | 593 | void |
355fe088 | 594 | move_ssa_defining_stmt_for_defs (gimple *new_stmt, gimple *old_stmt) |
726a989a RB |
595 | { |
596 | tree var; | |
597 | ssa_op_iter iter; | |
750628d8 | 598 | |
726a989a RB |
599 | if (gimple_in_ssa_p (cfun)) |
600 | { | |
601 | /* Make defined SSA_NAMEs point to the new | |
602 | statement as their definition. */ | |
603 | FOR_EACH_SSA_TREE_OPERAND (var, old_stmt, iter, SSA_OP_ALL_DEFS) | |
604 | { | |
605 | if (TREE_CODE (var) == SSA_NAME) | |
606 | SSA_NAME_DEF_STMT (var) = new_stmt; | |
607 | } | |
750628d8 | 608 | } |
726a989a | 609 | } |
750628d8 | 610 | |
21860814 JJ |
611 | /* Helper function for update_gimple_call and update_call_from_tree. |
612 | A GIMPLE_CALL STMT is being replaced with GIMPLE_CALL NEW_STMT. */ | |
613 | ||
614 | static void | |
355fe088 TS |
615 | finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt, |
616 | gimple *stmt) | |
21860814 JJ |
617 | { |
618 | gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt)); | |
619 | move_ssa_defining_stmt_for_defs (new_stmt, stmt); | |
779724a5 | 620 | gimple_move_vops (new_stmt, stmt); |
21860814 JJ |
621 | gimple_set_location (new_stmt, gimple_location (stmt)); |
622 | if (gimple_block (new_stmt) == NULL_TREE) | |
623 | gimple_set_block (new_stmt, gimple_block (stmt)); | |
624 | gsi_replace (si_p, new_stmt, false); | |
625 | } | |
626 | ||
627 | /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN | |
628 | with number of arguments NARGS, where the arguments in GIMPLE form | |
629 | follow NARGS argument. */ | |
630 | ||
631 | bool | |
632 | update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...) | |
633 | { | |
634 | va_list ap; | |
538dd0b7 | 635 | gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p)); |
21860814 JJ |
636 | |
637 | gcc_assert (is_gimple_call (stmt)); | |
638 | va_start (ap, nargs); | |
639 | new_stmt = gimple_build_call_valist (fn, nargs, ap); | |
640 | finish_update_gimple_call (si_p, new_stmt, stmt); | |
641 | va_end (ap); | |
642 | return true; | |
643 | } | |
726a989a RB |
644 | |
645 | /* Update a GIMPLE_CALL statement at iterator *SI_P to reflect the | |
646 | value of EXPR, which is expected to be the result of folding the | |
647 | call. This can only be done if EXPR is a CALL_EXPR with valid | |
648 | GIMPLE operands as arguments, or if it is a suitable RHS expression | |
649 | for a GIMPLE_ASSIGN. More complex expressions will require | |
073a8998 | 650 | gimplification, which will introduce additional statements. In this |
726a989a RB |
651 | event, no update is performed, and the function returns false. |
652 | Note that we cannot mutate a GIMPLE_CALL in-place, so we always | |
653 | replace the statement at *SI_P with an entirely new statement. | |
654 | The new statement need not be a call, e.g., if the original call | |
655 | folded to a constant. */ | |
656 | ||
657 | bool | |
658 | update_call_from_tree (gimple_stmt_iterator *si_p, tree expr) | |
659 | { | |
355fe088 | 660 | gimple *stmt = gsi_stmt (*si_p); |
726a989a | 661 | |
726a989a RB |
662 | if (valid_gimple_call_p (expr)) |
663 | { | |
664 | /* The call has simplified to another call. */ | |
665 | tree fn = CALL_EXPR_FN (expr); | |
666 | unsigned i; | |
667 | unsigned nargs = call_expr_nargs (expr); | |
6e1aa848 | 668 | vec<tree> args = vNULL; |
538dd0b7 | 669 | gcall *new_stmt; |
726a989a RB |
670 | |
671 | if (nargs > 0) | |
672 | { | |
9771b263 DN |
673 | args.create (nargs); |
674 | args.safe_grow_cleared (nargs); | |
b8698a0f | 675 | |
726a989a | 676 | for (i = 0; i < nargs; i++) |
9771b263 | 677 | args[i] = CALL_EXPR_ARG (expr, i); |
726a989a RB |
678 | } |
679 | ||
680 | new_stmt = gimple_build_call_vec (fn, args); | |
21860814 | 681 | finish_update_gimple_call (si_p, new_stmt, stmt); |
9771b263 | 682 | args.release (); |
726a989a RB |
683 | |
684 | return true; | |
685 | } | |
686 | else if (valid_gimple_rhs_p (expr)) | |
687 | { | |
21860814 | 688 | tree lhs = gimple_call_lhs (stmt); |
355fe088 | 689 | gimple *new_stmt; |
726a989a RB |
690 | |
691 | /* The call has simplified to an expression | |
692 | that cannot be represented as a GIMPLE_CALL. */ | |
693 | if (lhs) | |
694 | { | |
695 | /* A value is expected. | |
696 | Introduce a new GIMPLE_ASSIGN statement. */ | |
697 | STRIP_USELESS_TYPE_CONVERSION (expr); | |
698 | new_stmt = gimple_build_assign (lhs, expr); | |
726a989a | 699 | move_ssa_defining_stmt_for_defs (new_stmt, stmt); |
779724a5 | 700 | gimple_move_vops (new_stmt, stmt); |
726a989a RB |
701 | } |
702 | else if (!TREE_SIDE_EFFECTS (expr)) | |
703 | { | |
704 | /* No value is expected, and EXPR has no effect. | |
705 | Replace it with an empty statement. */ | |
706 | new_stmt = gimple_build_nop (); | |
277dc810 JJ |
707 | if (gimple_in_ssa_p (cfun)) |
708 | { | |
709 | unlink_stmt_vdef (stmt); | |
710 | release_defs (stmt); | |
711 | } | |
726a989a RB |
712 | } |
713 | else | |
714 | { | |
715 | /* No value is expected, but EXPR has an effect, | |
716 | e.g., it could be a reference to a volatile | |
717 | variable. Create an assignment statement | |
718 | with a dummy (unused) lhs variable. */ | |
719 | STRIP_USELESS_TYPE_CONVERSION (expr); | |
277dc810 | 720 | if (gimple_in_ssa_p (cfun)) |
b731b390 | 721 | lhs = make_ssa_name (TREE_TYPE (expr)); |
83d5977e | 722 | else |
b731b390 | 723 | lhs = create_tmp_var (TREE_TYPE (expr)); |
83d5977e | 724 | new_stmt = gimple_build_assign (lhs, expr); |
779724a5 | 725 | gimple_move_vops (new_stmt, stmt); |
726a989a RB |
726 | move_ssa_defining_stmt_for_defs (new_stmt, stmt); |
727 | } | |
728 | gimple_set_location (new_stmt, gimple_location (stmt)); | |
729 | gsi_replace (si_p, new_stmt, false); | |
730 | return true; | |
731 | } | |
732 | else | |
733 | /* The call simplified to an expression that is | |
734 | not a valid GIMPLE RHS. */ | |
735 | return false; | |
750628d8 DN |
736 | } |
737 | ||
750628d8 DN |
738 | /* Entry point to the propagation engine. |
739 | ||
d9a3704a JL |
740 | The VISIT_STMT virtual function is called for every statement |
741 | visited and the VISIT_PHI virtual function is called for every PHI | |
742 | node visited. */ | |
750628d8 DN |
743 | |
744 | void | |
d9a3704a | 745 | ssa_propagation_engine::ssa_propagate (void) |
750628d8 | 746 | { |
750628d8 DN |
747 | ssa_prop_init (); |
748 | ||
f48bd5e4 RB |
749 | curr_order = 0; |
750 | ||
751 | /* Iterate until the worklists are empty. We iterate both blocks | |
752 | and stmts in RPO order, using sets of two worklists to first | |
917e21e8 RB |
753 | complete the current iteration before iterating over backedges. |
754 | Seed the algorithm by adding the successors of the entry block to the | |
755 | edge worklist. */ | |
756 | edge e; | |
757 | edge_iterator ei; | |
758 | FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) | |
759 | { | |
760 | e->flags &= ~EDGE_EXECUTABLE; | |
761 | add_control_edge (e); | |
762 | } | |
f48bd5e4 | 763 | while (1) |
750628d8 | 764 | { |
f48bd5e4 RB |
765 | int next_block_order = (bitmap_empty_p (cfg_blocks) |
766 | ? -1 : bitmap_first_set_bit (cfg_blocks)); | |
767 | int next_stmt_uid = (bitmap_empty_p (ssa_edge_worklist) | |
768 | ? -1 : bitmap_first_set_bit (ssa_edge_worklist)); | |
769 | if (next_block_order == -1 && next_stmt_uid == -1) | |
750628d8 | 770 | { |
f48bd5e4 RB |
771 | if (bitmap_empty_p (cfg_blocks_back) |
772 | && bitmap_empty_p (ssa_edge_worklist_back)) | |
773 | break; | |
774 | ||
775 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
776 | fprintf (dump_file, "Regular worklists empty, now processing " | |
777 | "backedge destinations\n"); | |
778 | std::swap (cfg_blocks, cfg_blocks_back); | |
779 | std::swap (ssa_edge_worklist, ssa_edge_worklist_back); | |
970bb2de | 780 | continue; |
750628d8 DN |
781 | } |
782 | ||
f48bd5e4 RB |
783 | int next_stmt_bb_order = -1; |
784 | gimple *next_stmt = NULL; | |
785 | if (next_stmt_uid != -1) | |
786 | { | |
787 | next_stmt = uid_to_stmt[next_stmt_uid]; | |
788 | next_stmt_bb_order = bb_to_cfg_order[gimple_bb (next_stmt)->index]; | |
789 | } | |
790 | ||
791 | /* Pull the next block to simulate off the worklist if it comes first. */ | |
792 | if (next_block_order != -1 | |
793 | && (next_stmt_bb_order == -1 | |
794 | || next_block_order <= next_stmt_bb_order)) | |
795 | { | |
796 | curr_order = next_block_order; | |
797 | bitmap_clear_bit (cfg_blocks, next_block_order); | |
798 | basic_block bb | |
799 | = BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb [next_block_order]); | |
800 | simulate_block (bb); | |
801 | } | |
802 | /* Else simulate from the SSA edge worklist. */ | |
803 | else | |
804 | { | |
805 | curr_order = next_stmt_bb_order; | |
f48bd5e4 RB |
806 | if (dump_file && (dump_flags & TDF_DETAILS)) |
807 | { | |
808 | fprintf (dump_file, "\nSimulating statement: "); | |
809 | print_gimple_stmt (dump_file, next_stmt, 0, dump_flags); | |
810 | } | |
811 | simulate_stmt (next_stmt); | |
812 | } | |
750628d8 DN |
813 | } |
814 | ||
815 | ssa_prop_fini (); | |
816 | } | |
817 | ||
0bca51f0 DN |
818 | /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref' |
819 | is a non-volatile pointer dereference, a structure reference or a | |
820 | reference to a single _DECL. Ignore volatile memory references | |
821 | because they are not interesting for the optimizers. */ | |
822 | ||
823 | bool | |
355fe088 | 824 | stmt_makes_single_store (gimple *stmt) |
0bca51f0 DN |
825 | { |
826 | tree lhs; | |
827 | ||
726a989a RB |
828 | if (gimple_code (stmt) != GIMPLE_ASSIGN |
829 | && gimple_code (stmt) != GIMPLE_CALL) | |
0bca51f0 DN |
830 | return false; |
831 | ||
5006671f | 832 | if (!gimple_vdef (stmt)) |
0bca51f0 DN |
833 | return false; |
834 | ||
726a989a RB |
835 | lhs = gimple_get_lhs (stmt); |
836 | ||
837 | /* A call statement may have a null LHS. */ | |
838 | if (!lhs) | |
839 | return false; | |
0bca51f0 DN |
840 | |
841 | return (!TREE_THIS_VOLATILE (lhs) | |
842 | && (DECL_P (lhs) | |
7da4bf7d | 843 | || REFERENCE_CLASS_P (lhs))); |
0bca51f0 DN |
844 | } |
845 | ||
846 | ||
0bca51f0 DN |
847 | /* Propagation statistics. */ |
848 | struct prop_stats_d | |
849 | { | |
850 | long num_const_prop; | |
851 | long num_copy_prop; | |
ff7ffb8f | 852 | long num_stmts_folded; |
9fe0cb7d | 853 | long num_dce; |
0bca51f0 DN |
854 | }; |
855 | ||
856 | static struct prop_stats_d prop_stats; | |
857 | ||
858 | /* Replace USE references in statement STMT with the values stored in | |
726a989a | 859 | PROP_VALUE. Return true if at least one reference was replaced. */ |
0bca51f0 | 860 | |
973625a0 | 861 | bool |
e10a635c | 862 | substitute_and_fold_engine::replace_uses_in (gimple *stmt) |
0bca51f0 DN |
863 | { |
864 | bool replaced = false; | |
865 | use_operand_p use; | |
866 | ssa_op_iter iter; | |
867 | ||
868 | FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE) | |
869 | { | |
870 | tree tuse = USE_FROM_PTR (use); | |
e10a635c | 871 | tree val = get_value (tuse); |
0bca51f0 DN |
872 | |
873 | if (val == tuse || val == NULL_TREE) | |
874 | continue; | |
875 | ||
726a989a | 876 | if (gimple_code (stmt) == GIMPLE_ASM |
0bca51f0 DN |
877 | && !may_propagate_copy_into_asm (tuse)) |
878 | continue; | |
879 | ||
880 | if (!may_propagate_copy (tuse, val)) | |
881 | continue; | |
882 | ||
883 | if (TREE_CODE (val) != SSA_NAME) | |
884 | prop_stats.num_const_prop++; | |
885 | else | |
886 | prop_stats.num_copy_prop++; | |
887 | ||
888 | propagate_value (use, val); | |
889 | ||
890 | replaced = true; | |
0bca51f0 DN |
891 | } |
892 | ||
893 | return replaced; | |
894 | } | |
895 | ||
896 | ||
0bca51f0 DN |
897 | /* Replace propagated values into all the arguments for PHI using the |
898 | values from PROP_VALUE. */ | |
899 | ||
e10a635c JL |
900 | bool |
901 | substitute_and_fold_engine::replace_phi_args_in (gphi *phi) | |
0bca51f0 | 902 | { |
726a989a | 903 | size_t i; |
227858d1 | 904 | bool replaced = false; |
227858d1 DN |
905 | |
906 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
726a989a RB |
907 | { |
908 | fprintf (dump_file, "Folding PHI node: "); | |
909 | print_gimple_stmt (dump_file, phi, 0, TDF_SLIM); | |
910 | } | |
0bca51f0 | 911 | |
726a989a | 912 | for (i = 0; i < gimple_phi_num_args (phi); i++) |
0bca51f0 | 913 | { |
726a989a | 914 | tree arg = gimple_phi_arg_def (phi, i); |
0bca51f0 DN |
915 | |
916 | if (TREE_CODE (arg) == SSA_NAME) | |
917 | { | |
e10a635c | 918 | tree val = get_value (arg); |
0bca51f0 DN |
919 | |
920 | if (val && val != arg && may_propagate_copy (arg, val)) | |
921 | { | |
31935398 RB |
922 | edge e = gimple_phi_arg_edge (phi, i); |
923 | ||
0bca51f0 DN |
924 | if (TREE_CODE (val) != SSA_NAME) |
925 | prop_stats.num_const_prop++; | |
926 | else | |
927 | prop_stats.num_copy_prop++; | |
928 | ||
929 | propagate_value (PHI_ARG_DEF_PTR (phi, i), val); | |
227858d1 | 930 | replaced = true; |
0bca51f0 DN |
931 | |
932 | /* If we propagated a copy and this argument flows | |
933 | through an abnormal edge, update the replacement | |
934 | accordingly. */ | |
935 | if (TREE_CODE (val) == SSA_NAME | |
31935398 RB |
936 | && e->flags & EDGE_ABNORMAL |
937 | && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)) | |
938 | { | |
939 | /* This can only occur for virtual operands, since | |
940 | for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)) | |
941 | would prevent replacement. */ | |
942 | gcc_checking_assert (virtual_operand_p (val)); | |
943 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1; | |
944 | } | |
0bca51f0 DN |
945 | } |
946 | } | |
947 | } | |
b8698a0f | 948 | |
726a989a | 949 | if (dump_file && (dump_flags & TDF_DETAILS)) |
227858d1 | 950 | { |
726a989a RB |
951 | if (!replaced) |
952 | fprintf (dump_file, "No folding possible\n"); | |
953 | else | |
954 | { | |
955 | fprintf (dump_file, "Folded into: "); | |
956 | print_gimple_stmt (dump_file, phi, 0, TDF_SLIM); | |
957 | fprintf (dump_file, "\n"); | |
958 | } | |
227858d1 | 959 | } |
25b7069a RB |
960 | |
961 | return replaced; | |
227858d1 DN |
962 | } |
963 | ||
964 | ||
ec18e2eb | 965 | class substitute_and_fold_dom_walker : public dom_walker |
0bca51f0 | 966 | { |
ec18e2eb RB |
967 | public: |
968 | substitute_and_fold_dom_walker (cdi_direction direction, | |
e10a635c JL |
969 | class substitute_and_fold_engine *engine) |
970 | : dom_walker (direction), | |
971 | something_changed (false), | |
972 | substitute_and_fold_engine (engine) | |
ec18e2eb RB |
973 | { |
974 | stmts_to_remove.create (0); | |
2a5671ee | 975 | stmts_to_fixup.create (0); |
33512353 CLT |
976 | need_eh_cleanup = BITMAP_ALLOC (NULL); |
977 | } | |
978 | ~substitute_and_fold_dom_walker () | |
979 | { | |
980 | stmts_to_remove.release (); | |
2a5671ee | 981 | stmts_to_fixup.release (); |
33512353 | 982 | BITMAP_FREE (need_eh_cleanup); |
ec18e2eb | 983 | } |
0bca51f0 | 984 | |
3daacdcd | 985 | virtual edge before_dom_children (basic_block); |
ec18e2eb | 986 | virtual void after_dom_children (basic_block) {} |
227858d1 | 987 | |
ec18e2eb | 988 | bool something_changed; |
355fe088 TS |
989 | vec<gimple *> stmts_to_remove; |
990 | vec<gimple *> stmts_to_fixup; | |
33512353 | 991 | bitmap need_eh_cleanup; |
e10a635c JL |
992 | |
993 | class substitute_and_fold_engine *substitute_and_fold_engine; | |
ec18e2eb | 994 | }; |
0bca51f0 | 995 | |
3daacdcd | 996 | edge |
ec18e2eb RB |
997 | substitute_and_fold_dom_walker::before_dom_children (basic_block bb) |
998 | { | |
ec18e2eb | 999 | /* Propagate known values into PHI nodes. */ |
538dd0b7 DM |
1000 | for (gphi_iterator i = gsi_start_phis (bb); |
1001 | !gsi_end_p (i); | |
1002 | gsi_next (&i)) | |
ec18e2eb | 1003 | { |
538dd0b7 | 1004 | gphi *phi = i.phi (); |
ec18e2eb RB |
1005 | tree res = gimple_phi_result (phi); |
1006 | if (virtual_operand_p (res)) | |
1007 | continue; | |
62869a1c | 1008 | if (res && TREE_CODE (res) == SSA_NAME) |
ec18e2eb | 1009 | { |
e10a635c | 1010 | tree sprime = substitute_and_fold_engine->get_value (res); |
ec18e2eb RB |
1011 | if (sprime |
1012 | && sprime != res | |
1013 | && may_propagate_copy (res, sprime)) | |
1014 | { | |
1015 | stmts_to_remove.safe_push (phi); | |
45a5b21a | 1016 | continue; |
ec18e2eb RB |
1017 | } |
1018 | } | |
e10a635c | 1019 | something_changed |= substitute_and_fold_engine->replace_phi_args_in (phi); |
ec18e2eb RB |
1020 | } |
1021 | ||
1022 | /* Propagate known values into stmts. In some case it exposes | |
1023 | more trivially deletable stmts to walk backward. */ | |
538dd0b7 DM |
1024 | for (gimple_stmt_iterator i = gsi_start_bb (bb); |
1025 | !gsi_end_p (i); | |
1026 | gsi_next (&i)) | |
0bca51f0 | 1027 | { |
ec18e2eb | 1028 | bool did_replace; |
355fe088 | 1029 | gimple *stmt = gsi_stmt (i); |
0bca51f0 | 1030 | |
ec18e2eb RB |
1031 | /* No point propagating into a stmt we have a value for we |
1032 | can propagate into all uses. Mark it for removal instead. */ | |
1033 | tree lhs = gimple_get_lhs (stmt); | |
62869a1c | 1034 | if (lhs && TREE_CODE (lhs) == SSA_NAME) |
0bca51f0 | 1035 | { |
e10a635c | 1036 | tree sprime = substitute_and_fold_engine->get_value (lhs); |
ec18e2eb RB |
1037 | if (sprime |
1038 | && sprime != lhs | |
1039 | && may_propagate_copy (lhs, sprime) | |
36bbc05d | 1040 | && !stmt_could_throw_p (cfun, stmt) |
8a7c91cd RB |
1041 | && !gimple_has_side_effects (stmt) |
1042 | /* We have to leave ASSERT_EXPRs around for jump-threading. */ | |
1043 | && (!is_gimple_assign (stmt) | |
1044 | || gimple_assign_rhs_code (stmt) != ASSERT_EXPR)) | |
3bb3bb2d | 1045 | { |
ec18e2eb | 1046 | stmts_to_remove.safe_push (stmt); |
3bb3bb2d RG |
1047 | continue; |
1048 | } | |
ec18e2eb RB |
1049 | } |
1050 | ||
1051 | /* Replace the statement with its folded version and mark it | |
1052 | folded. */ | |
1053 | did_replace = false; | |
1054 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1055 | { | |
1056 | fprintf (dump_file, "Folding statement: "); | |
1057 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); | |
1058 | } | |
1059 | ||
355fe088 | 1060 | gimple *old_stmt = stmt; |
2a5671ee RB |
1061 | bool was_noreturn = (is_gimple_call (stmt) |
1062 | && gimple_call_noreturn_p (stmt)); | |
ec18e2eb | 1063 | |
ec18e2eb | 1064 | /* Replace real uses in the statement. */ |
e10a635c | 1065 | did_replace |= substitute_and_fold_engine->replace_uses_in (stmt); |
ec18e2eb RB |
1066 | |
1067 | /* If we made a replacement, fold the statement. */ | |
1068 | if (did_replace) | |
28537a45 RB |
1069 | { |
1070 | fold_stmt (&i, follow_single_use_edges); | |
1071 | stmt = gsi_stmt (i); | |
d7f336f8 RB |
1072 | gimple_set_modified (stmt, true); |
1073 | } | |
e944354e RD |
1074 | /* Also fold if we want to fold all statements. */ |
1075 | else if (substitute_and_fold_engine->fold_all_stmts | |
1076 | && fold_stmt (&i, follow_single_use_edges)) | |
1077 | { | |
1078 | did_replace = true; | |
1079 | stmt = gsi_stmt (i); | |
1080 | gimple_set_modified (stmt, true); | |
1081 | } | |
d7f336f8 RB |
1082 | |
1083 | /* Some statements may be simplified using propagator | |
1084 | specific information. Do this before propagating | |
1085 | into the stmt to not disturb pass specific information. */ | |
e10a635c JL |
1086 | update_stmt_if_modified (stmt); |
1087 | if (substitute_and_fold_engine->fold_stmt(&i)) | |
d7f336f8 | 1088 | { |
e10a635c JL |
1089 | did_replace = true; |
1090 | prop_stats.num_stmts_folded++; | |
1091 | stmt = gsi_stmt (i); | |
1092 | gimple_set_modified (stmt, true); | |
28537a45 RB |
1093 | } |
1094 | ||
1095 | /* If this is a control statement the propagator left edges | |
1096 | unexecuted on force the condition in a way consistent with | |
1097 | that. See PR66945 for cases where the propagator can end | |
1098 | up with a different idea of a taken edge than folding | |
1099 | (once undefined behavior is involved). */ | |
1100 | if (gimple_code (stmt) == GIMPLE_COND) | |
1101 | { | |
1102 | if ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE) | |
1103 | ^ (EDGE_SUCC (bb, 1)->flags & EDGE_EXECUTABLE)) | |
1104 | { | |
1105 | if (((EDGE_SUCC (bb, 0)->flags & EDGE_TRUE_VALUE) != 0) | |
1106 | == ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE) != 0)) | |
1107 | gimple_cond_make_true (as_a <gcond *> (stmt)); | |
1108 | else | |
1109 | gimple_cond_make_false (as_a <gcond *> (stmt)); | |
d7f336f8 | 1110 | gimple_set_modified (stmt, true); |
28537a45 RB |
1111 | did_replace = true; |
1112 | } | |
1113 | } | |
227858d1 | 1114 | |
ec18e2eb RB |
1115 | /* Now cleanup. */ |
1116 | if (did_replace) | |
1117 | { | |
ec18e2eb RB |
1118 | /* If we cleaned up EH information from the statement, |
1119 | remove EH edges. */ | |
1120 | if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)) | |
33512353 | 1121 | bitmap_set_bit (need_eh_cleanup, bb->index); |
ec18e2eb | 1122 | |
2a5671ee RB |
1123 | /* If we turned a not noreturn call into a noreturn one |
1124 | schedule it for fixup. */ | |
1125 | if (!was_noreturn | |
1126 | && is_gimple_call (stmt) | |
1127 | && gimple_call_noreturn_p (stmt)) | |
1128 | stmts_to_fixup.safe_push (stmt); | |
1129 | ||
681a3d86 | 1130 | if (gimple_assign_single_p (stmt)) |
726a989a | 1131 | { |
ec18e2eb RB |
1132 | tree rhs = gimple_assign_rhs1 (stmt); |
1133 | ||
1134 | if (TREE_CODE (rhs) == ADDR_EXPR) | |
1135 | recompute_tree_invariant_for_addr_expr (rhs); | |
726a989a | 1136 | } |
227858d1 | 1137 | |
ec18e2eb | 1138 | /* Determine what needs to be done to update the SSA form. */ |
d7f336f8 | 1139 | update_stmt_if_modified (stmt); |
ec18e2eb RB |
1140 | if (!is_gimple_debug (stmt)) |
1141 | something_changed = true; | |
1142 | } | |
ff7ffb8f | 1143 | |
ec18e2eb RB |
1144 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1145 | { | |
1146 | if (did_replace) | |
726a989a | 1147 | { |
ec18e2eb RB |
1148 | fprintf (dump_file, "Folded into: "); |
1149 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); | |
1150 | fprintf (dump_file, "\n"); | |
726a989a | 1151 | } |
ec18e2eb RB |
1152 | else |
1153 | fprintf (dump_file, "Not folded\n"); | |
1154 | } | |
1155 | } | |
3daacdcd | 1156 | return NULL; |
ec18e2eb | 1157 | } |
227858d1 | 1158 | |
0bca51f0 | 1159 | |
30821654 | 1160 | |
ec18e2eb | 1161 | /* Perform final substitution and folding of propagated values. |
13e08dc9 RS |
1162 | Process the whole function if BLOCK is null, otherwise only |
1163 | process the blocks that BLOCK dominates. In the latter case, | |
1164 | it is the caller's responsibility to ensure that dominator | |
1165 | information is available and up-to-date. | |
0bca51f0 | 1166 | |
ec18e2eb RB |
1167 | PROP_VALUE[I] contains the single value that should be substituted |
1168 | at every use of SSA name N_I. If PROP_VALUE is NULL, no values are | |
1169 | substituted. | |
726a989a | 1170 | |
ec18e2eb RB |
1171 | If FOLD_FN is non-NULL the function will be invoked on all statements |
1172 | before propagating values for pass specific simplification. | |
b8698a0f | 1173 | |
ec18e2eb | 1174 | DO_DCE is true if trivially dead stmts can be removed. |
cfaab3a9 | 1175 | |
ec18e2eb RB |
1176 | If DO_DCE is true, the statements within a BB are walked from |
1177 | last to first element. Otherwise we scan from first to last element. | |
726a989a | 1178 | |
ec18e2eb RB |
1179 | Return TRUE when something changed. */ |
1180 | ||
1181 | bool | |
13e08dc9 | 1182 | substitute_and_fold_engine::substitute_and_fold (basic_block block) |
ec18e2eb | 1183 | { |
ec18e2eb RB |
1184 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1185 | fprintf (dump_file, "\nSubstituting values and folding statements\n\n"); | |
1186 | ||
1187 | memset (&prop_stats, 0, sizeof (prop_stats)); | |
1188 | ||
13e08dc9 RS |
1189 | /* Don't call calculate_dominance_info when iterating over a subgraph. |
1190 | Callers that are using the interface this way are likely to want to | |
1191 | iterate over several disjoint subgraphs, and it would be expensive | |
1192 | in enable-checking builds to revalidate the whole dominance tree | |
1193 | each time. */ | |
1194 | if (block) | |
1195 | gcc_assert (dom_info_state (CDI_DOMINATORS)); | |
1196 | else | |
1197 | calculate_dominance_info (CDI_DOMINATORS); | |
e10a635c | 1198 | substitute_and_fold_dom_walker walker (CDI_DOMINATORS, this); |
13e08dc9 | 1199 | walker.walk (block ? block : ENTRY_BLOCK_PTR_FOR_FN (cfun)); |
ec18e2eb RB |
1200 | |
1201 | /* We cannot remove stmts during the BB walk, especially not release | |
1202 | SSA names there as that destroys the lattice of our callers. | |
1203 | Remove stmts in reverse order to make debug stmt creation possible. */ | |
1204 | while (!walker.stmts_to_remove.is_empty ()) | |
1205 | { | |
355fe088 | 1206 | gimple *stmt = walker.stmts_to_remove.pop (); |
ec18e2eb RB |
1207 | if (dump_file && dump_flags & TDF_DETAILS) |
1208 | { | |
1209 | fprintf (dump_file, "Removing dead stmt "); | |
ef6cb4c7 | 1210 | print_gimple_stmt (dump_file, stmt, 0); |
ec18e2eb RB |
1211 | fprintf (dump_file, "\n"); |
1212 | } | |
1213 | prop_stats.num_dce++; | |
1214 | gimple_stmt_iterator gsi = gsi_for_stmt (stmt); | |
1215 | if (gimple_code (stmt) == GIMPLE_PHI) | |
1216 | remove_phi_node (&gsi, true); | |
1217 | else | |
1218 | { | |
1219 | unlink_stmt_vdef (stmt); | |
1220 | gsi_remove (&gsi, true); | |
1221 | release_defs (stmt); | |
0bca51f0 DN |
1222 | } |
1223 | } | |
1224 | ||
33512353 CLT |
1225 | if (!bitmap_empty_p (walker.need_eh_cleanup)) |
1226 | gimple_purge_all_dead_eh_edges (walker.need_eh_cleanup); | |
1227 | ||
2a5671ee RB |
1228 | /* Fixup stmts that became noreturn calls. This may require splitting |
1229 | blocks and thus isn't possible during the dominator walk. Do this | |
1230 | in reverse order so we don't inadvertedly remove a stmt we want to | |
1231 | fixup by visiting a dominating now noreturn call first. */ | |
1232 | while (!walker.stmts_to_fixup.is_empty ()) | |
1233 | { | |
355fe088 | 1234 | gimple *stmt = walker.stmts_to_fixup.pop (); |
2a5671ee RB |
1235 | if (dump_file && dump_flags & TDF_DETAILS) |
1236 | { | |
1237 | fprintf (dump_file, "Fixing up noreturn call "); | |
ef6cb4c7 | 1238 | print_gimple_stmt (dump_file, stmt, 0); |
2a5671ee RB |
1239 | fprintf (dump_file, "\n"); |
1240 | } | |
1241 | fixup_noreturn_call (stmt); | |
1242 | } | |
1243 | ||
9fe0cb7d RG |
1244 | statistics_counter_event (cfun, "Constants propagated", |
1245 | prop_stats.num_const_prop); | |
1246 | statistics_counter_event (cfun, "Copies propagated", | |
1247 | prop_stats.num_copy_prop); | |
ff7ffb8f RG |
1248 | statistics_counter_event (cfun, "Statements folded", |
1249 | prop_stats.num_stmts_folded); | |
9fe0cb7d RG |
1250 | statistics_counter_event (cfun, "Statements deleted", |
1251 | prop_stats.num_dce); | |
ec18e2eb RB |
1252 | |
1253 | return walker.something_changed; | |
0bca51f0 | 1254 | } |
227858d1 | 1255 | |
744730a4 AM |
1256 | |
1257 | /* Return true if we may propagate ORIG into DEST, false otherwise. */ | |
1258 | ||
1259 | bool | |
1260 | may_propagate_copy (tree dest, tree orig) | |
1261 | { | |
1262 | tree type_d = TREE_TYPE (dest); | |
1263 | tree type_o = TREE_TYPE (orig); | |
1264 | ||
c7488fba PP |
1265 | /* If ORIG is a default definition which flows in from an abnormal edge |
1266 | then the copy can be propagated. It is important that we do so to avoid | |
1267 | uninitialized copies. */ | |
744730a4 AM |
1268 | if (TREE_CODE (orig) == SSA_NAME |
1269 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig) | |
c7488fba PP |
1270 | && SSA_NAME_IS_DEFAULT_DEF (orig) |
1271 | && (SSA_NAME_VAR (orig) == NULL_TREE | |
1272 | || TREE_CODE (SSA_NAME_VAR (orig)) == VAR_DECL)) | |
1273 | ; | |
1274 | /* Otherwise if ORIG just flows in from an abnormal edge then the copy cannot | |
1275 | be propagated. */ | |
1276 | else if (TREE_CODE (orig) == SSA_NAME | |
1277 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)) | |
744730a4 | 1278 | return false; |
c7488fba PP |
1279 | /* Similarly if DEST flows in from an abnormal edge then the copy cannot be |
1280 | propagated. */ | |
1281 | else if (TREE_CODE (dest) == SSA_NAME | |
1282 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest)) | |
744730a4 AM |
1283 | return false; |
1284 | ||
1285 | /* Do not copy between types for which we *do* need a conversion. */ | |
1286 | if (!useless_type_conversion_p (type_d, type_o)) | |
1287 | return false; | |
1288 | ||
1289 | /* Generally propagating virtual operands is not ok as that may | |
1290 | create overlapping life-ranges. */ | |
1291 | if (TREE_CODE (dest) == SSA_NAME && virtual_operand_p (dest)) | |
1292 | return false; | |
1293 | ||
1294 | /* Anything else is OK. */ | |
1295 | return true; | |
1296 | } | |
1297 | ||
1298 | /* Like may_propagate_copy, but use as the destination expression | |
1299 | the principal expression (typically, the RHS) contained in | |
1300 | statement DEST. This is more efficient when working with the | |
1301 | gimple tuples representation. */ | |
1302 | ||
1303 | bool | |
355fe088 | 1304 | may_propagate_copy_into_stmt (gimple *dest, tree orig) |
744730a4 AM |
1305 | { |
1306 | tree type_d; | |
1307 | tree type_o; | |
1308 | ||
1309 | /* If the statement is a switch or a single-rhs assignment, | |
1310 | then the expression to be replaced by the propagation may | |
1311 | be an SSA_NAME. Fortunately, there is an explicit tree | |
1312 | for the expression, so we delegate to may_propagate_copy. */ | |
1313 | ||
1314 | if (gimple_assign_single_p (dest)) | |
1315 | return may_propagate_copy (gimple_assign_rhs1 (dest), orig); | |
538dd0b7 DM |
1316 | else if (gswitch *dest_swtch = dyn_cast <gswitch *> (dest)) |
1317 | return may_propagate_copy (gimple_switch_index (dest_swtch), orig); | |
744730a4 AM |
1318 | |
1319 | /* In other cases, the expression is not materialized, so there | |
1320 | is no destination to pass to may_propagate_copy. On the other | |
1321 | hand, the expression cannot be an SSA_NAME, so the analysis | |
1322 | is much simpler. */ | |
1323 | ||
1324 | if (TREE_CODE (orig) == SSA_NAME | |
1325 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)) | |
1326 | return false; | |
1327 | ||
1328 | if (is_gimple_assign (dest)) | |
1329 | type_d = TREE_TYPE (gimple_assign_lhs (dest)); | |
1330 | else if (gimple_code (dest) == GIMPLE_COND) | |
1331 | type_d = boolean_type_node; | |
1332 | else if (is_gimple_call (dest) | |
1333 | && gimple_call_lhs (dest) != NULL_TREE) | |
1334 | type_d = TREE_TYPE (gimple_call_lhs (dest)); | |
1335 | else | |
1336 | gcc_unreachable (); | |
1337 | ||
1338 | type_o = TREE_TYPE (orig); | |
1339 | ||
1340 | if (!useless_type_conversion_p (type_d, type_o)) | |
1341 | return false; | |
1342 | ||
1343 | return true; | |
1344 | } | |
1345 | ||
1346 | /* Similarly, but we know that we're propagating into an ASM_EXPR. */ | |
1347 | ||
1348 | bool | |
1349 | may_propagate_copy_into_asm (tree dest ATTRIBUTE_UNUSED) | |
1350 | { | |
1351 | return true; | |
1352 | } | |
1353 | ||
1354 | ||
1355 | /* Common code for propagate_value and replace_exp. | |
1356 | ||
1357 | Replace use operand OP_P with VAL. FOR_PROPAGATION indicates if the | |
1358 | replacement is done to propagate a value or not. */ | |
1359 | ||
1360 | static void | |
1361 | replace_exp_1 (use_operand_p op_p, tree val, | |
1362 | bool for_propagation ATTRIBUTE_UNUSED) | |
1363 | { | |
b2b29377 MM |
1364 | if (flag_checking) |
1365 | { | |
1366 | tree op = USE_FROM_PTR (op_p); | |
1367 | gcc_assert (!(for_propagation | |
1368 | && TREE_CODE (op) == SSA_NAME | |
1369 | && TREE_CODE (val) == SSA_NAME | |
1370 | && !may_propagate_copy (op, val))); | |
1371 | } | |
744730a4 AM |
1372 | |
1373 | if (TREE_CODE (val) == SSA_NAME) | |
1374 | SET_USE (op_p, val); | |
1375 | else | |
1376 | SET_USE (op_p, unshare_expr (val)); | |
1377 | } | |
1378 | ||
1379 | ||
1380 | /* Propagate the value VAL (assumed to be a constant or another SSA_NAME) | |
1381 | into the operand pointed to by OP_P. | |
1382 | ||
1383 | Use this version for const/copy propagation as it will perform additional | |
1384 | checks to ensure validity of the const/copy propagation. */ | |
1385 | ||
1386 | void | |
1387 | propagate_value (use_operand_p op_p, tree val) | |
1388 | { | |
1389 | replace_exp_1 (op_p, val, true); | |
1390 | } | |
1391 | ||
1392 | /* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME). | |
1393 | ||
1394 | Use this version when not const/copy propagating values. For example, | |
1395 | PRE uses this version when building expressions as they would appear | |
1396 | in specific blocks taking into account actions of PHI nodes. | |
1397 | ||
1398 | The statement in which an expression has been replaced should be | |
1399 | folded using fold_stmt_inplace. */ | |
1400 | ||
1401 | void | |
1402 | replace_exp (use_operand_p op_p, tree val) | |
1403 | { | |
1404 | replace_exp_1 (op_p, val, false); | |
1405 | } | |
1406 | ||
1407 | ||
1408 | /* Propagate the value VAL (assumed to be a constant or another SSA_NAME) | |
1409 | into the tree pointed to by OP_P. | |
1410 | ||
1411 | Use this version for const/copy propagation when SSA operands are not | |
1412 | available. It will perform the additional checks to ensure validity of | |
1413 | the const/copy propagation, but will not update any operand information. | |
1414 | Be sure to mark the stmt as modified. */ | |
1415 | ||
1416 | void | |
1417 | propagate_tree_value (tree *op_p, tree val) | |
1418 | { | |
744730a4 AM |
1419 | if (TREE_CODE (val) == SSA_NAME) |
1420 | *op_p = val; | |
1421 | else | |
1422 | *op_p = unshare_expr (val); | |
1423 | } | |
1424 | ||
1425 | ||
1426 | /* Like propagate_tree_value, but use as the operand to replace | |
1427 | the principal expression (typically, the RHS) contained in the | |
1428 | statement referenced by iterator GSI. Note that it is not | |
1429 | always possible to update the statement in-place, so a new | |
1430 | statement may be created to replace the original. */ | |
1431 | ||
1432 | void | |
1433 | propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val) | |
1434 | { | |
355fe088 | 1435 | gimple *stmt = gsi_stmt (*gsi); |
744730a4 AM |
1436 | |
1437 | if (is_gimple_assign (stmt)) | |
1438 | { | |
1439 | tree expr = NULL_TREE; | |
1440 | if (gimple_assign_single_p (stmt)) | |
1441 | expr = gimple_assign_rhs1 (stmt); | |
1442 | propagate_tree_value (&expr, val); | |
1443 | gimple_assign_set_rhs_from_tree (gsi, expr); | |
1444 | } | |
538dd0b7 | 1445 | else if (gcond *cond_stmt = dyn_cast <gcond *> (stmt)) |
744730a4 AM |
1446 | { |
1447 | tree lhs = NULL_TREE; | |
1448 | tree rhs = build_zero_cst (TREE_TYPE (val)); | |
1449 | propagate_tree_value (&lhs, val); | |
538dd0b7 DM |
1450 | gimple_cond_set_code (cond_stmt, NE_EXPR); |
1451 | gimple_cond_set_lhs (cond_stmt, lhs); | |
1452 | gimple_cond_set_rhs (cond_stmt, rhs); | |
744730a4 AM |
1453 | } |
1454 | else if (is_gimple_call (stmt) | |
1455 | && gimple_call_lhs (stmt) != NULL_TREE) | |
1456 | { | |
1457 | tree expr = NULL_TREE; | |
1458 | bool res; | |
1459 | propagate_tree_value (&expr, val); | |
1460 | res = update_call_from_tree (gsi, expr); | |
1461 | gcc_assert (res); | |
1462 | } | |
538dd0b7 DM |
1463 | else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt)) |
1464 | propagate_tree_value (gimple_switch_index_ptr (swtch_stmt), val); | |
744730a4 AM |
1465 | else |
1466 | gcc_unreachable (); | |
1467 | } |