]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-phiprop.c
* gimple-walk.h: New File. Relocate prototypes from gimple.h.
[thirdparty/gcc.git] / gcc / tree-ssa-phiprop.c
1 /* Backward propagation of indirect loads through PHIs.
2 Copyright (C) 2007-2013 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "gimplify.h"
30 #include "gimple-iterator.h"
31 #include "gimple-ssa.h"
32 #include "tree-phinodes.h"
33 #include "ssa-iterators.h"
34 #include "tree-ssanames.h"
35 #include "tree-pass.h"
36 #include "langhooks.h"
37 #include "flags.h"
38
39 /* This pass propagates indirect loads through the PHI node for its
40 address to make the load source possibly non-addressable and to
41 allow for PHI optimization to trigger.
42
43 For example the pass changes
44
45 # addr_1 = PHI <&a, &b>
46 tmp_1 = *addr_1;
47
48 to
49
50 # tmp_1 = PHI <a, b>
51
52 but also handles more complex scenarios like
53
54 D.2077_2 = &this_1(D)->a1;
55 ...
56
57 # b_12 = PHI <&c(2), D.2077_2(3)>
58 D.2114_13 = *b_12;
59 ...
60
61 # b_15 = PHI <b_12(4), &b(5)>
62 D.2080_5 = &this_1(D)->a0;
63 ...
64
65 # b_18 = PHI <D.2080_5(6), &c(7)>
66 ...
67
68 # b_21 = PHI <b_15(8), b_18(9)>
69 D.2076_8 = *b_21;
70
71 where the addresses loaded are defined by PHIs itself.
72 The above happens for
73
74 std::max(std::min(a0, c), std::min(std::max(a1, c), b))
75
76 where this pass transforms it to a form later PHI optimization
77 recognizes and transforms it to the simple
78
79 D.2109_10 = this_1(D)->a1;
80 D.2110_11 = c;
81 D.2114_31 = MAX_EXPR <D.2109_10, D.2110_11>;
82 D.2115_14 = b;
83 D.2125_17 = MIN_EXPR <D.2115_14, D.2114_31>;
84 D.2119_16 = this_1(D)->a0;
85 D.2124_32 = MIN_EXPR <D.2110_11, D.2119_16>;
86 D.2076_33 = MAX_EXPR <D.2125_17, D.2124_32>;
87
88 The pass does a dominator walk processing loads using a basic-block
89 local analysis and stores the result for use by transformations on
90 dominated basic-blocks. */
91
92
93 /* Structure to keep track of the value of a dereferenced PHI result
94 and the virtual operand used for that dereference. */
95
96 struct phiprop_d
97 {
98 tree value;
99 tree vuse;
100 };
101
102 /* Verify if the value recorded for NAME in PHIVN is still valid at
103 the start of basic block BB. */
104
105 static bool
106 phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb)
107 {
108 tree vuse = phivn[SSA_NAME_VERSION (name)].vuse;
109 gimple use_stmt;
110 imm_use_iterator ui2;
111 bool ok = true;
112
113 /* The def stmts of the virtual uses need to be dominated by bb. */
114 gcc_assert (vuse != NULL_TREE);
115
116 FOR_EACH_IMM_USE_STMT (use_stmt, ui2, vuse)
117 {
118 /* If BB does not dominate a VDEF, the value is invalid. */
119 if ((gimple_vdef (use_stmt) != NULL_TREE
120 || gimple_code (use_stmt) == GIMPLE_PHI)
121 && !dominated_by_p (CDI_DOMINATORS, gimple_bb (use_stmt), bb))
122 {
123 ok = false;
124 BREAK_FROM_IMM_USE_STMT (ui2);
125 }
126 }
127
128 return ok;
129 }
130
131 /* Insert a new phi node for the dereference of PHI at basic_block
132 BB with the virtual operands from USE_STMT. */
133
134 static tree
135 phiprop_insert_phi (basic_block bb, gimple phi, gimple use_stmt,
136 struct phiprop_d *phivn, size_t n)
137 {
138 tree res;
139 gimple new_phi;
140 edge_iterator ei;
141 edge e;
142
143 gcc_assert (is_gimple_assign (use_stmt)
144 && gimple_assign_rhs_code (use_stmt) == MEM_REF);
145
146 /* Build a new PHI node to replace the definition of
147 the indirect reference lhs. */
148 res = gimple_assign_lhs (use_stmt);
149 new_phi = create_phi_node (res, bb);
150
151 if (dump_file && (dump_flags & TDF_DETAILS))
152 {
153 fprintf (dump_file, "Inserting PHI for result of load ");
154 print_gimple_stmt (dump_file, use_stmt, 0, 0);
155 }
156
157 /* Add PHI arguments for each edge inserting loads of the
158 addressable operands. */
159 FOR_EACH_EDGE (e, ei, bb->preds)
160 {
161 tree old_arg, new_var;
162 gimple tmp;
163 source_location locus;
164
165 old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
166 locus = gimple_phi_arg_location_from_edge (phi, e);
167 while (TREE_CODE (old_arg) == SSA_NAME
168 && (SSA_NAME_VERSION (old_arg) >= n
169 || phivn[SSA_NAME_VERSION (old_arg)].value == NULL_TREE))
170 {
171 gimple def_stmt = SSA_NAME_DEF_STMT (old_arg);
172 old_arg = gimple_assign_rhs1 (def_stmt);
173 locus = gimple_location (def_stmt);
174 }
175
176 if (TREE_CODE (old_arg) == SSA_NAME)
177 {
178 if (dump_file && (dump_flags & TDF_DETAILS))
179 {
180 fprintf (dump_file, " for edge defining ");
181 print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e), 0);
182 fprintf (dump_file, " reusing PHI result ");
183 print_generic_expr (dump_file,
184 phivn[SSA_NAME_VERSION (old_arg)].value, 0);
185 fprintf (dump_file, "\n");
186 }
187 /* Reuse a formerly created dereference. */
188 new_var = phivn[SSA_NAME_VERSION (old_arg)].value;
189 }
190 else
191 {
192 tree rhs = gimple_assign_rhs1 (use_stmt);
193 gcc_assert (TREE_CODE (old_arg) == ADDR_EXPR);
194 new_var = make_ssa_name (TREE_TYPE (rhs), NULL);
195 if (!is_gimple_min_invariant (old_arg))
196 old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
197 else
198 old_arg = unshare_expr (old_arg);
199 tmp = gimple_build_assign (new_var,
200 fold_build2 (MEM_REF, TREE_TYPE (rhs),
201 old_arg,
202 TREE_OPERAND (rhs, 1)));
203 gimple_set_location (tmp, locus);
204
205 gsi_insert_on_edge (e, tmp);
206 update_stmt (tmp);
207
208 if (dump_file && (dump_flags & TDF_DETAILS))
209 {
210 fprintf (dump_file, " for edge defining ");
211 print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e), 0);
212 fprintf (dump_file, " inserting load ");
213 print_gimple_stmt (dump_file, tmp, 0, 0);
214 }
215 }
216
217 add_phi_arg (new_phi, new_var, e, locus);
218 }
219
220 update_stmt (new_phi);
221
222 if (dump_file && (dump_flags & TDF_DETAILS))
223 print_gimple_stmt (dump_file, new_phi, 0, 0);
224
225 return res;
226 }
227
228 /* Propagate between the phi node arguments of PHI in BB and phi result
229 users. For now this matches
230 # p_2 = PHI <&x, &y>
231 <Lx>:;
232 p_3 = p_2;
233 z_2 = *p_3;
234 and converts it to
235 # z_2 = PHI <x, y>
236 <Lx>:;
237 Returns true if a transformation was done and edge insertions
238 need to be committed. Global data PHIVN and N is used to track
239 past transformation results. We need to be especially careful here
240 with aliasing issues as we are moving memory reads. */
241
242 static bool
243 propagate_with_phi (basic_block bb, gimple phi, struct phiprop_d *phivn,
244 size_t n)
245 {
246 tree ptr = PHI_RESULT (phi);
247 gimple use_stmt;
248 tree res = NULL_TREE;
249 gimple_stmt_iterator gsi;
250 imm_use_iterator ui;
251 use_operand_p arg_p, use;
252 ssa_op_iter i;
253 bool phi_inserted;
254 tree type = NULL_TREE;
255
256 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
257 || !is_gimple_reg_type (TREE_TYPE (TREE_TYPE (ptr))))
258 return false;
259
260 /* Check if we can "cheaply" dereference all phi arguments. */
261 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_USE)
262 {
263 tree arg = USE_FROM_PTR (arg_p);
264 /* Walk the ssa chain until we reach a ssa name we already
265 created a value for or we reach a definition of the form
266 ssa_name_n = &var; */
267 while (TREE_CODE (arg) == SSA_NAME
268 && !SSA_NAME_IS_DEFAULT_DEF (arg)
269 && (SSA_NAME_VERSION (arg) >= n
270 || phivn[SSA_NAME_VERSION (arg)].value == NULL_TREE))
271 {
272 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
273 if (!gimple_assign_single_p (def_stmt))
274 return false;
275 arg = gimple_assign_rhs1 (def_stmt);
276 }
277 if (TREE_CODE (arg) != ADDR_EXPR
278 && !(TREE_CODE (arg) == SSA_NAME
279 && SSA_NAME_VERSION (arg) < n
280 && phivn[SSA_NAME_VERSION (arg)].value != NULL_TREE
281 && (!type
282 || types_compatible_p
283 (type, TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value)))
284 && phivn_valid_p (phivn, arg, bb)))
285 return false;
286 if (!type
287 && TREE_CODE (arg) == SSA_NAME)
288 type = TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value);
289 }
290
291 /* Find a dereferencing use. First follow (single use) ssa
292 copy chains for ptr. */
293 while (single_imm_use (ptr, &use, &use_stmt)
294 && gimple_assign_ssa_name_copy_p (use_stmt))
295 ptr = gimple_assign_lhs (use_stmt);
296
297 /* Replace the first dereference of *ptr if there is one and if we
298 can move the loads to the place of the ptr phi node. */
299 phi_inserted = false;
300 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
301 {
302 gimple def_stmt;
303 tree vuse;
304
305 /* Check whether this is a load of *ptr. */
306 if (!(is_gimple_assign (use_stmt)
307 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
308 && gimple_assign_rhs_code (use_stmt) == MEM_REF
309 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == ptr
310 && integer_zerop (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 1))
311 && (!type
312 || types_compatible_p
313 (TREE_TYPE (gimple_assign_lhs (use_stmt)), type))
314 /* We cannot replace a load that may throw or is volatile. */
315 && !stmt_can_throw_internal (use_stmt)))
316 continue;
317
318 /* Check if we can move the loads. The def stmt of the virtual use
319 needs to be in a different basic block dominating bb. */
320 vuse = gimple_vuse (use_stmt);
321 def_stmt = SSA_NAME_DEF_STMT (vuse);
322 if (!SSA_NAME_IS_DEFAULT_DEF (vuse)
323 && (gimple_bb (def_stmt) == bb
324 || !dominated_by_p (CDI_DOMINATORS,
325 bb, gimple_bb (def_stmt))))
326 goto next;
327
328 /* Found a proper dereference. Insert a phi node if this
329 is the first load transformation. */
330 if (!phi_inserted)
331 {
332 res = phiprop_insert_phi (bb, phi, use_stmt, phivn, n);
333 type = TREE_TYPE (res);
334
335 /* Remember the value we created for *ptr. */
336 phivn[SSA_NAME_VERSION (ptr)].value = res;
337 phivn[SSA_NAME_VERSION (ptr)].vuse = vuse;
338
339 /* Remove old stmt. The phi is taken care of by DCE, if we
340 want to delete it here we also have to delete all intermediate
341 copies. */
342 gsi = gsi_for_stmt (use_stmt);
343 gsi_remove (&gsi, true);
344
345 phi_inserted = true;
346 }
347 else
348 {
349 /* Further replacements are easy, just make a copy out of the
350 load. */
351 gimple_assign_set_rhs1 (use_stmt, res);
352 update_stmt (use_stmt);
353 }
354
355 next:;
356 /* Continue searching for a proper dereference. */
357 }
358
359 return phi_inserted;
360 }
361
362 /* Main entry for phiprop pass. */
363
364 static unsigned int
365 tree_ssa_phiprop (void)
366 {
367 vec<basic_block> bbs;
368 struct phiprop_d *phivn;
369 bool did_something = false;
370 basic_block bb;
371 gimple_stmt_iterator gsi;
372 unsigned i;
373 size_t n;
374
375 calculate_dominance_info (CDI_DOMINATORS);
376
377 n = num_ssa_names;
378 phivn = XCNEWVEC (struct phiprop_d, n);
379
380 /* Walk the dominator tree in preorder. */
381 bbs = get_all_dominated_blocks (CDI_DOMINATORS,
382 single_succ (ENTRY_BLOCK_PTR));
383 FOR_EACH_VEC_ELT (bbs, i, bb)
384 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
385 did_something |= propagate_with_phi (bb, gsi_stmt (gsi), phivn, n);
386
387 if (did_something)
388 gsi_commit_edge_inserts ();
389
390 bbs.release ();
391 free (phivn);
392
393 return 0;
394 }
395
396 static bool
397 gate_phiprop (void)
398 {
399 return flag_tree_phiprop;
400 }
401
402 namespace {
403
404 const pass_data pass_data_phiprop =
405 {
406 GIMPLE_PASS, /* type */
407 "phiprop", /* name */
408 OPTGROUP_NONE, /* optinfo_flags */
409 true, /* has_gate */
410 true, /* has_execute */
411 TV_TREE_PHIPROP, /* tv_id */
412 ( PROP_cfg | PROP_ssa ), /* properties_required */
413 0, /* properties_provided */
414 0, /* properties_destroyed */
415 0, /* todo_flags_start */
416 ( TODO_update_ssa | TODO_verify_ssa ), /* todo_flags_finish */
417 };
418
419 class pass_phiprop : public gimple_opt_pass
420 {
421 public:
422 pass_phiprop (gcc::context *ctxt)
423 : gimple_opt_pass (pass_data_phiprop, ctxt)
424 {}
425
426 /* opt_pass methods: */
427 bool gate () { return gate_phiprop (); }
428 unsigned int execute () { return tree_ssa_phiprop (); }
429
430 }; // class pass_phiprop
431
432 } // anon namespace
433
434 gimple_opt_pass *
435 make_pass_phiprop (gcc::context *ctxt)
436 {
437 return new pass_phiprop (ctxt);
438 }