]>
Commit | Line | Data |
---|---|---|
67514449 | 1 | /* Backward propagation of indirect loads through PHIs. |
8d9254fc | 2 | Copyright (C) 2007-2020 Free Software Foundation, Inc. |
67514449 RG |
3 | Contributed by Richard Guenther <rguenther@suse.de> |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 3, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
c7131fb2 | 24 | #include "backend.h" |
67514449 | 25 | #include "tree.h" |
c7131fb2 | 26 | #include "gimple.h" |
957060b5 | 27 | #include "tree-pass.h" |
c7131fb2 | 28 | #include "ssa.h" |
957060b5 | 29 | #include "gimple-pretty-print.h" |
40e23961 | 30 | #include "fold-const.h" |
2fb9a547 | 31 | #include "tree-eh.h" |
45b0be94 | 32 | #include "gimplify.h" |
5be5c238 | 33 | #include "gimple-iterator.h" |
ea8927ea | 34 | #include "stor-layout.h" |
4a3255dd | 35 | #include "tree-ssa-loop.h" |
67514449 RG |
36 | |
37 | /* This pass propagates indirect loads through the PHI node for its | |
fa10beec | 38 | address to make the load source possibly non-addressable and to |
67514449 RG |
39 | allow for PHI optimization to trigger. |
40 | ||
41 | For example the pass changes | |
42 | ||
43 | # addr_1 = PHI <&a, &b> | |
44 | tmp_1 = *addr_1; | |
45 | ||
46 | to | |
47 | ||
48 | # tmp_1 = PHI <a, b> | |
49 | ||
fa10beec | 50 | but also handles more complex scenarios like |
67514449 RG |
51 | |
52 | D.2077_2 = &this_1(D)->a1; | |
53 | ... | |
54 | ||
55 | # b_12 = PHI <&c(2), D.2077_2(3)> | |
56 | D.2114_13 = *b_12; | |
57 | ... | |
58 | ||
59 | # b_15 = PHI <b_12(4), &b(5)> | |
60 | D.2080_5 = &this_1(D)->a0; | |
61 | ... | |
62 | ||
63 | # b_18 = PHI <D.2080_5(6), &c(7)> | |
64 | ... | |
65 | ||
66 | # b_21 = PHI <b_15(8), b_18(9)> | |
67 | D.2076_8 = *b_21; | |
68 | ||
69 | where the addresses loaded are defined by PHIs itself. | |
70 | The above happens for | |
71 | ||
72 | std::max(std::min(a0, c), std::min(std::max(a1, c), b)) | |
73 | ||
74 | where this pass transforms it to a form later PHI optimization | |
75 | recognizes and transforms it to the simple | |
76 | ||
77 | D.2109_10 = this_1(D)->a1; | |
78 | D.2110_11 = c; | |
79 | D.2114_31 = MAX_EXPR <D.2109_10, D.2110_11>; | |
80 | D.2115_14 = b; | |
81 | D.2125_17 = MIN_EXPR <D.2115_14, D.2114_31>; | |
82 | D.2119_16 = this_1(D)->a0; | |
83 | D.2124_32 = MIN_EXPR <D.2110_11, D.2119_16>; | |
84 | D.2076_33 = MAX_EXPR <D.2125_17, D.2124_32>; | |
85 | ||
86 | The pass does a dominator walk processing loads using a basic-block | |
87 | local analysis and stores the result for use by transformations on | |
88 | dominated basic-blocks. */ | |
89 | ||
90 | ||
91 | /* Structure to keep track of the value of a dereferenced PHI result | |
7f8fdb9f | 92 | and the virtual operand used for that dereference. */ |
67514449 RG |
93 | |
94 | struct phiprop_d | |
95 | { | |
96 | tree value; | |
7f8fdb9f | 97 | tree vuse; |
67514449 RG |
98 | }; |
99 | ||
100 | /* Verify if the value recorded for NAME in PHIVN is still valid at | |
101 | the start of basic block BB. */ | |
102 | ||
103 | static bool | |
104 | phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb) | |
105 | { | |
7f8fdb9f | 106 | tree vuse = phivn[SSA_NAME_VERSION (name)].vuse; |
355fe088 | 107 | gimple *use_stmt; |
7f8fdb9f RG |
108 | imm_use_iterator ui2; |
109 | bool ok = true; | |
67514449 | 110 | |
7f8fdb9f RG |
111 | /* The def stmts of the virtual uses need to be dominated by bb. */ |
112 | gcc_assert (vuse != NULL_TREE); | |
67514449 | 113 | |
7f8fdb9f RG |
114 | FOR_EACH_IMM_USE_STMT (use_stmt, ui2, vuse) |
115 | { | |
116 | /* If BB does not dominate a VDEF, the value is invalid. */ | |
117 | if ((gimple_vdef (use_stmt) != NULL_TREE | |
118 | || gimple_code (use_stmt) == GIMPLE_PHI) | |
119 | && !dominated_by_p (CDI_DOMINATORS, gimple_bb (use_stmt), bb)) | |
67514449 | 120 | { |
7f8fdb9f RG |
121 | ok = false; |
122 | BREAK_FROM_IMM_USE_STMT (ui2); | |
67514449 | 123 | } |
67514449 RG |
124 | } |
125 | ||
7f8fdb9f | 126 | return ok; |
67514449 RG |
127 | } |
128 | ||
129 | /* Insert a new phi node for the dereference of PHI at basic_block | |
130 | BB with the virtual operands from USE_STMT. */ | |
131 | ||
132 | static tree | |
355fe088 | 133 | phiprop_insert_phi (basic_block bb, gphi *phi, gimple *use_stmt, |
67514449 RG |
134 | struct phiprop_d *phivn, size_t n) |
135 | { | |
726a989a | 136 | tree res; |
ea8927ea | 137 | gphi *new_phi = NULL; |
67514449 RG |
138 | edge_iterator ei; |
139 | edge e; | |
140 | ||
726a989a | 141 | gcc_assert (is_gimple_assign (use_stmt) |
70f34814 | 142 | && gimple_assign_rhs_code (use_stmt) == MEM_REF); |
726a989a | 143 | |
67514449 RG |
144 | /* Build a new PHI node to replace the definition of |
145 | the indirect reference lhs. */ | |
726a989a | 146 | res = gimple_assign_lhs (use_stmt); |
ea8927ea RB |
147 | if (TREE_CODE (res) == SSA_NAME) |
148 | new_phi = create_phi_node (res, bb); | |
67514449 | 149 | |
7f8fdb9f RG |
150 | if (dump_file && (dump_flags & TDF_DETAILS)) |
151 | { | |
152 | fprintf (dump_file, "Inserting PHI for result of load "); | |
ef6cb4c7 | 153 | print_gimple_stmt (dump_file, use_stmt, 0); |
7f8fdb9f RG |
154 | } |
155 | ||
67514449 RG |
156 | /* Add PHI arguments for each edge inserting loads of the |
157 | addressable operands. */ | |
158 | FOR_EACH_EDGE (e, ei, bb->preds) | |
159 | { | |
726a989a | 160 | tree old_arg, new_var; |
538dd0b7 | 161 | gassign *tmp; |
620e594b | 162 | location_t locus; |
67514449 RG |
163 | |
164 | old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e); | |
f5045c96 | 165 | locus = gimple_phi_arg_location_from_edge (phi, e); |
67514449 RG |
166 | while (TREE_CODE (old_arg) == SSA_NAME |
167 | && (SSA_NAME_VERSION (old_arg) >= n | |
168 | || phivn[SSA_NAME_VERSION (old_arg)].value == NULL_TREE)) | |
169 | { | |
355fe088 | 170 | gimple *def_stmt = SSA_NAME_DEF_STMT (old_arg); |
726a989a | 171 | old_arg = gimple_assign_rhs1 (def_stmt); |
f5045c96 | 172 | locus = gimple_location (def_stmt); |
67514449 RG |
173 | } |
174 | ||
175 | if (TREE_CODE (old_arg) == SSA_NAME) | |
7f8fdb9f RG |
176 | { |
177 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
178 | { | |
179 | fprintf (dump_file, " for edge defining "); | |
ef6cb4c7 | 180 | print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e)); |
7f8fdb9f RG |
181 | fprintf (dump_file, " reusing PHI result "); |
182 | print_generic_expr (dump_file, | |
ef6cb4c7 | 183 | phivn[SSA_NAME_VERSION (old_arg)].value); |
7f8fdb9f RG |
184 | fprintf (dump_file, "\n"); |
185 | } | |
186 | /* Reuse a formerly created dereference. */ | |
187 | new_var = phivn[SSA_NAME_VERSION (old_arg)].value; | |
188 | } | |
67514449 RG |
189 | else |
190 | { | |
f076deba | 191 | tree rhs = gimple_assign_rhs1 (use_stmt); |
726a989a | 192 | gcc_assert (TREE_CODE (old_arg) == ADDR_EXPR); |
ea8927ea RB |
193 | if (TREE_CODE (res) == SSA_NAME) |
194 | new_var = make_ssa_name (TREE_TYPE (rhs)); | |
195 | else | |
196 | new_var = unshare_expr (res); | |
f076deba RG |
197 | if (!is_gimple_min_invariant (old_arg)) |
198 | old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e); | |
199 | else | |
200 | old_arg = unshare_expr (old_arg); | |
201 | tmp = gimple_build_assign (new_var, | |
202 | fold_build2 (MEM_REF, TREE_TYPE (rhs), | |
203 | old_arg, | |
204 | TREE_OPERAND (rhs, 1))); | |
f5045c96 | 205 | gimple_set_location (tmp, locus); |
67514449 | 206 | |
726a989a | 207 | gsi_insert_on_edge (e, tmp); |
67514449 | 208 | update_stmt (tmp); |
7f8fdb9f RG |
209 | |
210 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
211 | { | |
212 | fprintf (dump_file, " for edge defining "); | |
ef6cb4c7 | 213 | print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e)); |
7f8fdb9f | 214 | fprintf (dump_file, " inserting load "); |
ef6cb4c7 | 215 | print_gimple_stmt (dump_file, tmp, 0); |
7f8fdb9f | 216 | } |
67514449 RG |
217 | } |
218 | ||
ea8927ea RB |
219 | if (new_phi) |
220 | add_phi_arg (new_phi, new_var, e, locus); | |
67514449 RG |
221 | } |
222 | ||
ea8927ea RB |
223 | if (new_phi) |
224 | { | |
225 | update_stmt (new_phi); | |
67514449 | 226 | |
ea8927ea | 227 | if (dump_file && (dump_flags & TDF_DETAILS)) |
ef6cb4c7 | 228 | print_gimple_stmt (dump_file, new_phi, 0); |
ea8927ea | 229 | } |
7f8fdb9f | 230 | |
67514449 RG |
231 | return res; |
232 | } | |
233 | ||
4a3255dd RB |
234 | /* Verify if *idx is available at *DATA. */ |
235 | ||
236 | static bool | |
237 | chk_uses (tree, tree *idx, void *data) | |
238 | { | |
239 | basic_block dom = (basic_block) data; | |
240 | if (TREE_CODE (*idx) == SSA_NAME) | |
241 | return (SSA_NAME_IS_DEFAULT_DEF (*idx) | |
242 | || ! dominated_by_p (CDI_DOMINATORS, | |
243 | gimple_bb (SSA_NAME_DEF_STMT (*idx)), dom)); | |
244 | return true; | |
245 | } | |
246 | ||
67514449 RG |
247 | /* Propagate between the phi node arguments of PHI in BB and phi result |
248 | users. For now this matches | |
249 | # p_2 = PHI <&x, &y> | |
250 | <Lx>:; | |
251 | p_3 = p_2; | |
252 | z_2 = *p_3; | |
253 | and converts it to | |
254 | # z_2 = PHI <x, y> | |
255 | <Lx>:; | |
256 | Returns true if a transformation was done and edge insertions | |
257 | need to be committed. Global data PHIVN and N is used to track | |
258 | past transformation results. We need to be especially careful here | |
259 | with aliasing issues as we are moving memory reads. */ | |
260 | ||
261 | static bool | |
538dd0b7 | 262 | propagate_with_phi (basic_block bb, gphi *phi, struct phiprop_d *phivn, |
726a989a | 263 | size_t n) |
67514449 RG |
264 | { |
265 | tree ptr = PHI_RESULT (phi); | |
355fe088 | 266 | gimple *use_stmt; |
726a989a RB |
267 | tree res = NULL_TREE; |
268 | gimple_stmt_iterator gsi; | |
67514449 RG |
269 | imm_use_iterator ui; |
270 | use_operand_p arg_p, use; | |
271 | ssa_op_iter i; | |
272 | bool phi_inserted; | |
24db2556 | 273 | bool changed; |
f076deba | 274 | tree type = NULL_TREE; |
67514449 | 275 | |
5006671f | 276 | if (!POINTER_TYPE_P (TREE_TYPE (ptr)) |
ea8927ea RB |
277 | || (!is_gimple_reg_type (TREE_TYPE (TREE_TYPE (ptr))) |
278 | && TYPE_MODE (TREE_TYPE (TREE_TYPE (ptr))) == BLKmode)) | |
67514449 RG |
279 | return false; |
280 | ||
281 | /* Check if we can "cheaply" dereference all phi arguments. */ | |
282 | FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_USE) | |
283 | { | |
284 | tree arg = USE_FROM_PTR (arg_p); | |
285 | /* Walk the ssa chain until we reach a ssa name we already | |
286 | created a value for or we reach a definition of the form | |
287 | ssa_name_n = &var; */ | |
288 | while (TREE_CODE (arg) == SSA_NAME | |
289 | && !SSA_NAME_IS_DEFAULT_DEF (arg) | |
290 | && (SSA_NAME_VERSION (arg) >= n | |
291 | || phivn[SSA_NAME_VERSION (arg)].value == NULL_TREE)) | |
292 | { | |
355fe088 | 293 | gimple *def_stmt = SSA_NAME_DEF_STMT (arg); |
e49a540c | 294 | if (!gimple_assign_single_p (def_stmt)) |
67514449 | 295 | return false; |
726a989a | 296 | arg = gimple_assign_rhs1 (def_stmt); |
67514449 | 297 | } |
f076deba | 298 | if (TREE_CODE (arg) != ADDR_EXPR |
67514449 | 299 | && !(TREE_CODE (arg) == SSA_NAME |
2970ccb3 | 300 | && SSA_NAME_VERSION (arg) < n |
67514449 | 301 | && phivn[SSA_NAME_VERSION (arg)].value != NULL_TREE |
f076deba RG |
302 | && (!type |
303 | || types_compatible_p | |
304 | (type, TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value))) | |
67514449 RG |
305 | && phivn_valid_p (phivn, arg, bb))) |
306 | return false; | |
f076deba RG |
307 | if (!type |
308 | && TREE_CODE (arg) == SSA_NAME) | |
309 | type = TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value); | |
67514449 RG |
310 | } |
311 | ||
312 | /* Find a dereferencing use. First follow (single use) ssa | |
313 | copy chains for ptr. */ | |
314 | while (single_imm_use (ptr, &use, &use_stmt) | |
726a989a RB |
315 | && gimple_assign_ssa_name_copy_p (use_stmt)) |
316 | ptr = gimple_assign_lhs (use_stmt); | |
67514449 RG |
317 | |
318 | /* Replace the first dereference of *ptr if there is one and if we | |
319 | can move the loads to the place of the ptr phi node. */ | |
320 | phi_inserted = false; | |
24db2556 | 321 | changed = false; |
67514449 RG |
322 | FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr) |
323 | { | |
355fe088 | 324 | gimple *def_stmt; |
67514449 RG |
325 | tree vuse; |
326 | ||
a2b33cc3 RB |
327 | /* Only replace loads in blocks that post-dominate the PHI node. That |
328 | makes sure we don't end up speculating loads. */ | |
329 | if (!dominated_by_p (CDI_POST_DOMINATORS, | |
330 | bb, gimple_bb (use_stmt))) | |
331 | continue; | |
e8dd1313 | 332 | |
67514449 | 333 | /* Check whether this is a load of *ptr. */ |
726a989a | 334 | if (!(is_gimple_assign (use_stmt) |
70f34814 | 335 | && gimple_assign_rhs_code (use_stmt) == MEM_REF |
726a989a | 336 | && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == ptr |
70f34814 | 337 | && integer_zerop (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 1)) |
f076deba RG |
338 | && (!type |
339 | || types_compatible_p | |
340 | (TREE_TYPE (gimple_assign_lhs (use_stmt)), type)) | |
d9e736e7 RB |
341 | /* We cannot replace a load that may throw or is volatile. |
342 | For volatiles the transform can change the number of | |
343 | executions if the load is inside a loop but the address | |
344 | computations outside (PR91812). We could relax this | |
345 | if we guard against that appropriately. For loads that can | |
346 | throw we could relax things if the moved loads all are | |
347 | known to not throw. */ | |
348 | && !stmt_can_throw_internal (cfun, use_stmt) | |
349 | && !gimple_has_volatile_ops (use_stmt))) | |
67514449 RG |
350 | continue; |
351 | ||
7f8fdb9f | 352 | /* Check if we can move the loads. The def stmt of the virtual use |
d1431192 RB |
353 | needs to be in a different basic block dominating bb. When the |
354 | def is an edge-inserted one we know it dominates us. */ | |
7f8fdb9f RG |
355 | vuse = gimple_vuse (use_stmt); |
356 | def_stmt = SSA_NAME_DEF_STMT (vuse); | |
357 | if (!SSA_NAME_IS_DEFAULT_DEF (vuse) | |
358 | && (gimple_bb (def_stmt) == bb | |
d1431192 RB |
359 | || (gimple_bb (def_stmt) |
360 | && !dominated_by_p (CDI_DOMINATORS, | |
361 | bb, gimple_bb (def_stmt))))) | |
7f8fdb9f | 362 | goto next; |
67514449 | 363 | |
ea8927ea RB |
364 | /* Found a proper dereference with an aggregate copy. Just |
365 | insert aggregate copies on the edges instead. */ | |
a5bd4027 | 366 | if (!is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (use_stmt)))) |
ea8927ea | 367 | { |
e8dd1313 JJ |
368 | if (!gimple_vdef (use_stmt)) |
369 | goto next; | |
370 | ||
4a3255dd RB |
371 | /* As we replicate the lhs on each incoming edge all |
372 | used SSA names have to be available there. */ | |
373 | if (! for_each_index (gimple_assign_lhs_ptr (use_stmt), | |
374 | chk_uses, | |
375 | get_immediate_dominator (CDI_DOMINATORS, | |
376 | gimple_bb (phi)))) | |
377 | goto next; | |
e8dd1313 JJ |
378 | |
379 | gimple *vuse_stmt; | |
380 | imm_use_iterator vui; | |
381 | use_operand_p vuse_p; | |
382 | /* In order to move the aggregate copies earlier, make sure | |
383 | there are no statements that could read from memory | |
384 | aliasing the lhs in between the start of bb and use_stmt. | |
385 | As we require use_stmt to have a VDEF above, loads after | |
386 | use_stmt will use a different virtual SSA_NAME. */ | |
387 | FOR_EACH_IMM_USE_FAST (vuse_p, vui, vuse) | |
388 | { | |
389 | vuse_stmt = USE_STMT (vuse_p); | |
390 | if (vuse_stmt == use_stmt) | |
391 | continue; | |
392 | if (!dominated_by_p (CDI_DOMINATORS, | |
393 | gimple_bb (vuse_stmt), bb)) | |
394 | continue; | |
395 | if (ref_maybe_used_by_stmt_p (vuse_stmt, | |
396 | gimple_assign_lhs (use_stmt))) | |
397 | goto next; | |
398 | } | |
399 | ||
ea8927ea RB |
400 | phiprop_insert_phi (bb, phi, use_stmt, phivn, n); |
401 | ||
402 | /* Remove old stmt. The phi is taken care of by DCE. */ | |
403 | gsi = gsi_for_stmt (use_stmt); | |
404 | /* Unlinking the VDEF here is fine as we are sure that we process | |
405 | stmts in execution order due to aggregate copies having VDEFs | |
406 | and we emit loads on the edges in the very same order. | |
407 | We get multiple copies (or intermediate register loads) handled | |
408 | only by walking PHIs or immediate uses in a lucky order though, | |
409 | so we could signal the caller to re-start iterating over PHIs | |
410 | when we come here which would make it quadratic in the number | |
411 | of PHIs. */ | |
412 | unlink_stmt_vdef (use_stmt); | |
413 | gsi_remove (&gsi, true); | |
414 | ||
24db2556 | 415 | changed = true; |
ea8927ea RB |
416 | } |
417 | ||
67514449 RG |
418 | /* Found a proper dereference. Insert a phi node if this |
419 | is the first load transformation. */ | |
ea8927ea | 420 | else if (!phi_inserted) |
67514449 RG |
421 | { |
422 | res = phiprop_insert_phi (bb, phi, use_stmt, phivn, n); | |
f076deba | 423 | type = TREE_TYPE (res); |
67514449 RG |
424 | |
425 | /* Remember the value we created for *ptr. */ | |
426 | phivn[SSA_NAME_VERSION (ptr)].value = res; | |
7f8fdb9f | 427 | phivn[SSA_NAME_VERSION (ptr)].vuse = vuse; |
67514449 RG |
428 | |
429 | /* Remove old stmt. The phi is taken care of by DCE, if we | |
430 | want to delete it here we also have to delete all intermediate | |
431 | copies. */ | |
726a989a | 432 | gsi = gsi_for_stmt (use_stmt); |
b1193f61 | 433 | gsi_remove (&gsi, true); |
67514449 RG |
434 | |
435 | phi_inserted = true; | |
24db2556 | 436 | changed = true; |
67514449 RG |
437 | } |
438 | else | |
439 | { | |
440 | /* Further replacements are easy, just make a copy out of the | |
441 | load. */ | |
726a989a | 442 | gimple_assign_set_rhs1 (use_stmt, res); |
67514449 | 443 | update_stmt (use_stmt); |
24db2556 | 444 | changed = true; |
67514449 RG |
445 | } |
446 | ||
447 | next:; | |
448 | /* Continue searching for a proper dereference. */ | |
449 | } | |
450 | ||
24db2556 | 451 | return changed; |
67514449 RG |
452 | } |
453 | ||
67514449 RG |
454 | /* Main entry for phiprop pass. */ |
455 | ||
be55bfe6 TS |
456 | namespace { |
457 | ||
458 | const pass_data pass_data_phiprop = | |
459 | { | |
460 | GIMPLE_PASS, /* type */ | |
461 | "phiprop", /* name */ | |
462 | OPTGROUP_NONE, /* optinfo_flags */ | |
be55bfe6 TS |
463 | TV_TREE_PHIPROP, /* tv_id */ |
464 | ( PROP_cfg | PROP_ssa ), /* properties_required */ | |
465 | 0, /* properties_provided */ | |
466 | 0, /* properties_destroyed */ | |
467 | 0, /* todo_flags_start */ | |
3bea341f | 468 | TODO_update_ssa, /* todo_flags_finish */ |
be55bfe6 TS |
469 | }; |
470 | ||
471 | class pass_phiprop : public gimple_opt_pass | |
472 | { | |
473 | public: | |
474 | pass_phiprop (gcc::context *ctxt) | |
475 | : gimple_opt_pass (pass_data_phiprop, ctxt) | |
476 | {} | |
477 | ||
478 | /* opt_pass methods: */ | |
479 | virtual bool gate (function *) { return flag_tree_phiprop; } | |
480 | virtual unsigned int execute (function *); | |
481 | ||
482 | }; // class pass_phiprop | |
483 | ||
484 | unsigned int | |
485 | pass_phiprop::execute (function *fun) | |
67514449 | 486 | { |
9771b263 | 487 | vec<basic_block> bbs; |
67514449 | 488 | struct phiprop_d *phivn; |
b8698a0f | 489 | bool did_something = false; |
438c239d | 490 | basic_block bb; |
538dd0b7 | 491 | gphi_iterator gsi; |
438c239d | 492 | unsigned i; |
2970ccb3 | 493 | size_t n; |
67514449 RG |
494 | |
495 | calculate_dominance_info (CDI_DOMINATORS); | |
a2b33cc3 | 496 | calculate_dominance_info (CDI_POST_DOMINATORS); |
67514449 | 497 | |
2970ccb3 RG |
498 | n = num_ssa_names; |
499 | phivn = XCNEWVEC (struct phiprop_d, n); | |
67514449 | 500 | |
438c239d RG |
501 | /* Walk the dominator tree in preorder. */ |
502 | bbs = get_all_dominated_blocks (CDI_DOMINATORS, | |
be55bfe6 | 503 | single_succ (ENTRY_BLOCK_PTR_FOR_FN (fun))); |
9771b263 | 504 | FOR_EACH_VEC_ELT (bbs, i, bb) |
7ad97d17 RB |
505 | { |
506 | /* Since we're going to move dereferences across predecessor | |
507 | edges avoid blocks with abnormal predecessors. */ | |
508 | if (bb_has_abnormal_pred (bb)) | |
509 | continue; | |
510 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
511 | did_something |= propagate_with_phi (bb, gsi.phi (), phivn, n); | |
512 | } | |
438c239d RG |
513 | |
514 | if (did_something) | |
726a989a | 515 | gsi_commit_edge_inserts (); |
67514449 | 516 | |
9771b263 | 517 | bbs.release (); |
67514449 RG |
518 | free (phivn); |
519 | ||
a2b33cc3 RB |
520 | free_dominance_info (CDI_POST_DOMINATORS); |
521 | ||
67514449 RG |
522 | return 0; |
523 | } | |
524 | ||
27a4cd48 DM |
525 | } // anon namespace |
526 | ||
527 | gimple_opt_pass * | |
528 | make_pass_phiprop (gcc::context *ctxt) | |
529 | { | |
530 | return new pass_phiprop (ctxt); | |
531 | } |