]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-ssa-evrp.c
* gimple-ssa-evrp-analyze.h (class evrp_range_analyzer): Add
[thirdparty/gcc.git] / gcc / gimple-ssa-evrp.c
1 /* Support routines for Value Range Propagation (VRP).
2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "tree-pass.h"
27 #include "ssa.h"
28 #include "gimple-pretty-print.h"
29 #include "cfganal.h"
30 #include "gimple-fold.h"
31 #include "tree-eh.h"
32 #include "gimple-iterator.h"
33 #include "tree-cfg.h"
34 #include "tree-ssa-loop-manip.h"
35 #include "tree-ssa-loop.h"
36 #include "cfgloop.h"
37 #include "tree-scalar-evolution.h"
38 #include "tree-ssa-propagate.h"
39 #include "alloc-pool.h"
40 #include "domwalk.h"
41 #include "tree-cfgcleanup.h"
42 #include "vr-values.h"
43 #include "gimple-ssa-evrp-analyze.h"
44
45 class evrp_folder : public substitute_and_fold_engine
46 {
47 public:
48 tree get_value (tree) FINAL OVERRIDE;
49 evrp_folder (class vr_values *vr_values_) : vr_values (vr_values_) { }
50 bool simplify_stmt_using_ranges (gimple_stmt_iterator *gsi)
51 { return vr_values->simplify_stmt_using_ranges (gsi); }
52 class vr_values *vr_values;
53
54 private:
55 DISABLE_COPY_AND_ASSIGN (evrp_folder);
56 };
57
58 tree
59 evrp_folder::get_value (tree op)
60 {
61 return vr_values->op_with_constant_singleton_value_range (op);
62 }
63
64 /* evrp_dom_walker visits the basic blocks in the dominance order and set
65 the Value Ranges (VR) for SSA_NAMEs in the scope. Use this VR to
66 discover more VRs. */
67
68 class evrp_dom_walker : public dom_walker
69 {
70 public:
71 evrp_dom_walker ()
72 : dom_walker (CDI_DOMINATORS),
73 evrp_range_analyzer (true),
74 evrp_folder (evrp_range_analyzer.get_vr_values ())
75 {
76 need_eh_cleanup = BITMAP_ALLOC (NULL);
77 }
78 ~evrp_dom_walker ()
79 {
80 BITMAP_FREE (need_eh_cleanup);
81 }
82 virtual edge before_dom_children (basic_block);
83 virtual void after_dom_children (basic_block);
84 void cleanup (void);
85
86 private:
87 DISABLE_COPY_AND_ASSIGN (evrp_dom_walker);
88 bitmap need_eh_cleanup;
89 auto_vec<gimple *> stmts_to_fixup;
90 auto_vec<gimple *> stmts_to_remove;
91
92 class evrp_range_analyzer evrp_range_analyzer;
93 class evrp_folder evrp_folder;
94 };
95
96 edge
97 evrp_dom_walker::before_dom_children (basic_block bb)
98 {
99 if (dump_file && (dump_flags & TDF_DETAILS))
100 fprintf (dump_file, "Visiting BB%d\n", bb->index);
101
102 evrp_range_analyzer.enter (bb);
103
104 for (gphi_iterator gpi = gsi_start_phis (bb);
105 !gsi_end_p (gpi); gsi_next (&gpi))
106 {
107 gphi *phi = gpi.phi ();
108 tree lhs = PHI_RESULT (phi);
109 if (virtual_operand_p (lhs))
110 continue;
111
112 value_range *vr = evrp_range_analyzer.get_value_range (lhs);
113 /* Mark PHIs whose lhs we fully propagate for removal. */
114 tree val = value_range_constant_singleton (vr);
115 if (val && may_propagate_copy (lhs, val))
116 {
117 stmts_to_remove.safe_push (phi);
118 continue;
119 }
120 }
121
122 edge taken_edge = NULL;
123
124 /* Visit all other stmts and discover any new VRs possible. */
125 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
126 !gsi_end_p (gsi); gsi_next (&gsi))
127 {
128 gimple *stmt = gsi_stmt (gsi);
129 tree output = NULL_TREE;
130 gimple *old_stmt = stmt;
131 bool was_noreturn = (is_gimple_call (stmt)
132 && gimple_call_noreturn_p (stmt));
133
134 if (dump_file && (dump_flags & TDF_DETAILS))
135 {
136 fprintf (dump_file, "Visiting stmt ");
137 print_gimple_stmt (dump_file, stmt, 0);
138 }
139
140 evrp_range_analyzer.record_ranges_from_stmt (stmt, false);
141
142 if (gcond *cond = dyn_cast <gcond *> (stmt))
143 {
144 evrp_range_analyzer.vrp_visit_cond_stmt (cond, &taken_edge);
145 if (taken_edge)
146 {
147 if (taken_edge->flags & EDGE_TRUE_VALUE)
148 gimple_cond_make_true (cond);
149 else if (taken_edge->flags & EDGE_FALSE_VALUE)
150 gimple_cond_make_false (cond);
151 else
152 gcc_unreachable ();
153 update_stmt (stmt);
154 }
155 }
156 else if (stmt_interesting_for_vrp (stmt))
157 {
158 output = get_output_for_vrp (stmt);
159 if (output)
160 {
161 tree val;
162 value_range *vr = evrp_range_analyzer.get_value_range (output);
163
164 /* Mark stmts whose output we fully propagate for removal. */
165 if ((val = value_range_constant_singleton (vr))
166 && may_propagate_copy (output, val)
167 && !stmt_could_throw_p (cfun, stmt)
168 && !gimple_has_side_effects (stmt))
169 {
170 stmts_to_remove.safe_push (stmt);
171 continue;
172 }
173 }
174 }
175
176 /* Try folding stmts with the VR discovered. */
177 bool did_replace = evrp_folder.replace_uses_in (stmt);
178 if (fold_stmt (&gsi, follow_single_use_edges)
179 || did_replace)
180 {
181 stmt = gsi_stmt (gsi);
182 update_stmt (stmt);
183 did_replace = true;
184 }
185 if (evrp_folder.simplify_stmt_using_ranges (&gsi))
186 {
187 stmt = gsi_stmt (gsi);
188 update_stmt (stmt);
189 did_replace = true;
190 }
191
192 if (did_replace)
193 {
194 /* If we cleaned up EH information from the statement,
195 remove EH edges. */
196 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
197 bitmap_set_bit (need_eh_cleanup, bb->index);
198
199 /* If we turned a not noreturn call into a noreturn one
200 schedule it for fixup. */
201 if (!was_noreturn
202 && is_gimple_call (stmt)
203 && gimple_call_noreturn_p (stmt))
204 stmts_to_fixup.safe_push (stmt);
205
206 if (gimple_assign_single_p (stmt))
207 {
208 tree rhs = gimple_assign_rhs1 (stmt);
209 if (TREE_CODE (rhs) == ADDR_EXPR)
210 recompute_tree_invariant_for_addr_expr (rhs);
211 }
212 }
213 }
214
215 /* Visit BB successor PHI nodes and replace PHI args. */
216 edge e;
217 edge_iterator ei;
218 FOR_EACH_EDGE (e, ei, bb->succs)
219 {
220 for (gphi_iterator gpi = gsi_start_phis (e->dest);
221 !gsi_end_p (gpi); gsi_next (&gpi))
222 {
223 gphi *phi = gpi.phi ();
224 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
225 tree arg = USE_FROM_PTR (use_p);
226 if (TREE_CODE (arg) != SSA_NAME
227 || virtual_operand_p (arg))
228 continue;
229 value_range *vr = evrp_range_analyzer.get_value_range (arg);
230 tree val = value_range_constant_singleton (vr);
231 if (val && may_propagate_copy (arg, val))
232 propagate_value (use_p, val);
233 }
234 }
235
236 return taken_edge;
237 }
238
239 void
240 evrp_dom_walker::after_dom_children (basic_block bb)
241 {
242 evrp_range_analyzer.leave (bb);
243 }
244
245 /* Perform any cleanups after the main phase of EVRP has completed. */
246
247 void
248 evrp_dom_walker::cleanup (void)
249 {
250 if (dump_file)
251 {
252 fprintf (dump_file, "\nValue ranges after Early VRP:\n\n");
253 evrp_range_analyzer.dump_all_value_ranges (dump_file);
254 fprintf (dump_file, "\n");
255 }
256
257 /* Remove stmts in reverse order to make debug stmt creation possible. */
258 while (! stmts_to_remove.is_empty ())
259 {
260 gimple *stmt = stmts_to_remove.pop ();
261 if (dump_file && dump_flags & TDF_DETAILS)
262 {
263 fprintf (dump_file, "Removing dead stmt ");
264 print_gimple_stmt (dump_file, stmt, 0);
265 fprintf (dump_file, "\n");
266 }
267 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
268 if (gimple_code (stmt) == GIMPLE_PHI)
269 remove_phi_node (&gsi, true);
270 else
271 {
272 unlink_stmt_vdef (stmt);
273 gsi_remove (&gsi, true);
274 release_defs (stmt);
275 }
276 }
277
278 if (!bitmap_empty_p (need_eh_cleanup))
279 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
280
281 /* Fixup stmts that became noreturn calls. This may require splitting
282 blocks and thus isn't possible during the dominator walk. Do this
283 in reverse order so we don't inadvertedly remove a stmt we want to
284 fixup by visiting a dominating now noreturn call first. */
285 while (!stmts_to_fixup.is_empty ())
286 {
287 gimple *stmt = stmts_to_fixup.pop ();
288 fixup_noreturn_call (stmt);
289 }
290
291 evrp_folder.vr_values->cleanup_edges_and_switches ();
292 }
293
294 /* Main entry point for the early vrp pass which is a simplified non-iterative
295 version of vrp where basic blocks are visited in dominance order. Value
296 ranges discovered in early vrp will also be used by ipa-vrp. */
297
298 static unsigned int
299 execute_early_vrp ()
300 {
301 /* Ideally this setup code would move into the ctor for the dominator
302 walk. However, this setup can change the number of blocks which
303 invalidates the internal arrays that are set up by the dominator
304 walker. */
305 loop_optimizer_init (LOOPS_NORMAL | LOOPS_HAVE_RECORDED_EXITS);
306 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
307 scev_initialize ();
308 calculate_dominance_info (CDI_DOMINATORS);
309
310 /* Walk stmts in dominance order and propagate VRP. */
311 evrp_dom_walker walker;
312 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
313 walker.cleanup ();
314
315 scev_finalize ();
316 loop_optimizer_finalize ();
317 return 0;
318 }
319
320 namespace {
321
322 const pass_data pass_data_early_vrp =
323 {
324 GIMPLE_PASS, /* type */
325 "evrp", /* name */
326 OPTGROUP_NONE, /* optinfo_flags */
327 TV_TREE_EARLY_VRP, /* tv_id */
328 PROP_ssa, /* properties_required */
329 0, /* properties_provided */
330 0, /* properties_destroyed */
331 0, /* todo_flags_start */
332 ( TODO_cleanup_cfg | TODO_update_ssa | TODO_verify_all ),
333 };
334
335 class pass_early_vrp : public gimple_opt_pass
336 {
337 public:
338 pass_early_vrp (gcc::context *ctxt)
339 : gimple_opt_pass (pass_data_early_vrp, ctxt)
340 {}
341
342 /* opt_pass methods: */
343 opt_pass * clone () { return new pass_early_vrp (m_ctxt); }
344 virtual bool gate (function *)
345 {
346 return flag_tree_vrp != 0;
347 }
348 virtual unsigned int execute (function *)
349 { return execute_early_vrp (); }
350
351 }; // class pass_vrp
352 } // anon namespace
353
354 gimple_opt_pass *
355 make_pass_early_vrp (gcc::context *ctxt)
356 {
357 return new pass_early_vrp (ctxt);
358 }
359