]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-ssa-evrp.c
* doc/extend.texi (Common Function Attributes): Clarify
[thirdparty/gcc.git] / gcc / gimple-ssa-evrp.c
CommitLineData
9c015ccf 1/* Support routines for Value Range Propagation (VRP).
fbd26352 2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
9c015ccf 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 3, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "tree.h"
25#include "gimple.h"
26#include "tree-pass.h"
27#include "ssa.h"
28#include "gimple-pretty-print.h"
29#include "cfganal.h"
30#include "gimple-fold.h"
31#include "tree-eh.h"
32#include "gimple-iterator.h"
33#include "tree-cfg.h"
34#include "tree-ssa-loop-manip.h"
35#include "tree-ssa-loop.h"
36#include "cfgloop.h"
37#include "tree-scalar-evolution.h"
38#include "tree-ssa-propagate.h"
39#include "alloc-pool.h"
40#include "domwalk.h"
41#include "tree-cfgcleanup.h"
42#include "vr-values.h"
8bd73801 43#include "gimple-ssa-evrp-analyze.h"
9c015ccf 44
45class evrp_folder : public substitute_and_fold_engine
46{
47 public:
48 tree get_value (tree) FINAL OVERRIDE;
c561e1e7 49 evrp_folder (class vr_values *vr_values_) : vr_values (vr_values_) { }
d19574de 50 bool simplify_stmt_using_ranges (gimple_stmt_iterator *gsi)
51 { return vr_values->simplify_stmt_using_ranges (gsi); }
9c015ccf 52 class vr_values *vr_values;
c561e1e7 53
54 private:
55 DISABLE_COPY_AND_ASSIGN (evrp_folder);
9c015ccf 56};
57
58tree
59evrp_folder::get_value (tree op)
60{
61 return vr_values->op_with_constant_singleton_value_range (op);
62}
63
64/* evrp_dom_walker visits the basic blocks in the dominance order and set
65 the Value Ranges (VR) for SSA_NAMEs in the scope. Use this VR to
66 discover more VRs. */
67
68class evrp_dom_walker : public dom_walker
69{
70public:
c561e1e7 71 evrp_dom_walker ()
72 : dom_walker (CDI_DOMINATORS),
447602ef 73 evrp_range_analyzer (true),
5e3bbe77 74 evrp_folder (evrp_range_analyzer.get_vr_values ())
9c015ccf 75 {
76 need_eh_cleanup = BITMAP_ALLOC (NULL);
77 }
78 ~evrp_dom_walker ()
79 {
80 BITMAP_FREE (need_eh_cleanup);
81 }
82 virtual edge before_dom_children (basic_block);
83 virtual void after_dom_children (basic_block);
a62a30a4 84 void cleanup (void);
85
86 private:
87 DISABLE_COPY_AND_ASSIGN (evrp_dom_walker);
9c015ccf 88 bitmap need_eh_cleanup;
89 auto_vec<gimple *> stmts_to_fixup;
90 auto_vec<gimple *> stmts_to_remove;
91
71ddacbb 92 class evrp_range_analyzer evrp_range_analyzer;
c561e1e7 93 class evrp_folder evrp_folder;
9c015ccf 94};
95
5c56ab3e 96edge
97evrp_dom_walker::before_dom_children (basic_block bb)
98{
99 if (dump_file && (dump_flags & TDF_DETAILS))
100 fprintf (dump_file, "Visiting BB%d\n", bb->index);
101
71ddacbb 102 evrp_range_analyzer.enter (bb);
5c56ab3e 103
104 for (gphi_iterator gpi = gsi_start_phis (bb);
105 !gsi_end_p (gpi); gsi_next (&gpi))
106 {
107 gphi *phi = gpi.phi ();
108 tree lhs = PHI_RESULT (phi);
109 if (virtual_operand_p (lhs))
110 continue;
111
c561e1e7 112 value_range *vr = evrp_range_analyzer.get_value_range (lhs);
5c56ab3e 113 /* Mark PHIs whose lhs we fully propagate for removal. */
7809986b 114 tree val;
115 if (vr->singleton_p (&val) && may_propagate_copy (lhs, val))
5c56ab3e 116 {
117 stmts_to_remove.safe_push (phi);
118 continue;
119 }
120 }
9c015ccf 121
122 edge taken_edge = NULL;
123
124 /* Visit all other stmts and discover any new VRs possible. */
125 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
126 !gsi_end_p (gsi); gsi_next (&gsi))
127 {
128 gimple *stmt = gsi_stmt (gsi);
129 tree output = NULL_TREE;
130 gimple *old_stmt = stmt;
131 bool was_noreturn = (is_gimple_call (stmt)
132 && gimple_call_noreturn_p (stmt));
133
134 if (dump_file && (dump_flags & TDF_DETAILS))
135 {
136 fprintf (dump_file, "Visiting stmt ");
137 print_gimple_stmt (dump_file, stmt, 0);
138 }
139
4b69806c 140 evrp_range_analyzer.record_ranges_from_stmt (stmt, false);
71ddacbb 141
9c015ccf 142 if (gcond *cond = dyn_cast <gcond *> (stmt))
143 {
c561e1e7 144 evrp_range_analyzer.vrp_visit_cond_stmt (cond, &taken_edge);
9c015ccf 145 if (taken_edge)
146 {
147 if (taken_edge->flags & EDGE_TRUE_VALUE)
148 gimple_cond_make_true (cond);
149 else if (taken_edge->flags & EDGE_FALSE_VALUE)
150 gimple_cond_make_false (cond);
151 else
152 gcc_unreachable ();
153 update_stmt (stmt);
154 }
155 }
156 else if (stmt_interesting_for_vrp (stmt))
157 {
5c56ab3e 158 output = get_output_for_vrp (stmt);
159 if (output)
9c015ccf 160 {
71ddacbb 161 tree val;
c561e1e7 162 value_range *vr = evrp_range_analyzer.get_value_range (output);
9c015ccf 163
164 /* Mark stmts whose output we fully propagate for removal. */
7809986b 165 if (vr->singleton_p (&val)
9c015ccf 166 && may_propagate_copy (output, val)
aac19106 167 && !stmt_could_throw_p (cfun, stmt)
9c015ccf 168 && !gimple_has_side_effects (stmt))
169 {
170 stmts_to_remove.safe_push (stmt);
171 continue;
172 }
9c015ccf 173 }
174 }
175
176 /* Try folding stmts with the VR discovered. */
9c015ccf 177 bool did_replace = evrp_folder.replace_uses_in (stmt);
5ebf19e5 178 gimple_stmt_iterator prev_gsi = gsi;
179 gsi_prev (&prev_gsi);
9c015ccf 180 if (fold_stmt (&gsi, follow_single_use_edges)
181 || did_replace)
182 {
183 stmt = gsi_stmt (gsi);
184 update_stmt (stmt);
185 did_replace = true;
186 }
d19574de 187 if (evrp_folder.simplify_stmt_using_ranges (&gsi))
188 {
189 stmt = gsi_stmt (gsi);
190 update_stmt (stmt);
191 did_replace = true;
192 }
9c015ccf 193
194 if (did_replace)
195 {
5ebf19e5 196 /* If we wound up generating new stmts during folding
197 drop all their defs to VARYING. We can't easily
198 process them because we've already instantiated
199 ranges on uses on STMT that only hold after it. */
200 if (gsi_end_p (prev_gsi))
201 prev_gsi = gsi_start_bb (bb);
202 else
203 gsi_next (&prev_gsi);
204 while (gsi_stmt (prev_gsi) != gsi_stmt (gsi))
205 {
206 evrp_range_analyzer.get_vr_values ()
207 ->set_defs_to_varying (gsi_stmt (prev_gsi));
208 gsi_next (&prev_gsi);
209 }
210
9c015ccf 211 /* If we cleaned up EH information from the statement,
212 remove EH edges. */
213 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
214 bitmap_set_bit (need_eh_cleanup, bb->index);
215
216 /* If we turned a not noreturn call into a noreturn one
217 schedule it for fixup. */
218 if (!was_noreturn
219 && is_gimple_call (stmt)
220 && gimple_call_noreturn_p (stmt))
221 stmts_to_fixup.safe_push (stmt);
222
223 if (gimple_assign_single_p (stmt))
224 {
225 tree rhs = gimple_assign_rhs1 (stmt);
226 if (TREE_CODE (rhs) == ADDR_EXPR)
227 recompute_tree_invariant_for_addr_expr (rhs);
228 }
229 }
230 }
231
232 /* Visit BB successor PHI nodes and replace PHI args. */
5c56ab3e 233 edge e;
234 edge_iterator ei;
9c015ccf 235 FOR_EACH_EDGE (e, ei, bb->succs)
236 {
237 for (gphi_iterator gpi = gsi_start_phis (e->dest);
238 !gsi_end_p (gpi); gsi_next (&gpi))
239 {
240 gphi *phi = gpi.phi ();
241 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
242 tree arg = USE_FROM_PTR (use_p);
243 if (TREE_CODE (arg) != SSA_NAME
244 || virtual_operand_p (arg))
245 continue;
c561e1e7 246 value_range *vr = evrp_range_analyzer.get_value_range (arg);
7809986b 247 tree val;
248 if (vr->singleton_p (&val) && may_propagate_copy (arg, val))
9c015ccf 249 propagate_value (use_p, val);
250 }
251 }
252
9c015ccf 253 return taken_edge;
254}
255
71ddacbb 256void
257evrp_dom_walker::after_dom_children (basic_block bb)
258{
259 evrp_range_analyzer.leave (bb);
260}
261
a62a30a4 262/* Perform any cleanups after the main phase of EVRP has completed. */
9c015ccf 263
a62a30a4 264void
265evrp_dom_walker::cleanup (void)
9c015ccf 266{
9c015ccf 267 if (dump_file)
268 {
269 fprintf (dump_file, "\nValue ranges after Early VRP:\n\n");
c561e1e7 270 evrp_range_analyzer.dump_all_value_ranges (dump_file);
9c015ccf 271 fprintf (dump_file, "\n");
272 }
273
274 /* Remove stmts in reverse order to make debug stmt creation possible. */
a62a30a4 275 while (! stmts_to_remove.is_empty ())
9c015ccf 276 {
a62a30a4 277 gimple *stmt = stmts_to_remove.pop ();
9c015ccf 278 if (dump_file && dump_flags & TDF_DETAILS)
279 {
280 fprintf (dump_file, "Removing dead stmt ");
281 print_gimple_stmt (dump_file, stmt, 0);
282 fprintf (dump_file, "\n");
283 }
284 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
285 if (gimple_code (stmt) == GIMPLE_PHI)
286 remove_phi_node (&gsi, true);
287 else
288 {
289 unlink_stmt_vdef (stmt);
290 gsi_remove (&gsi, true);
291 release_defs (stmt);
292 }
293 }
294
a62a30a4 295 if (!bitmap_empty_p (need_eh_cleanup))
296 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
9c015ccf 297
298 /* Fixup stmts that became noreturn calls. This may require splitting
299 blocks and thus isn't possible during the dominator walk. Do this
300 in reverse order so we don't inadvertedly remove a stmt we want to
301 fixup by visiting a dominating now noreturn call first. */
a62a30a4 302 while (!stmts_to_fixup.is_empty ())
9c015ccf 303 {
a62a30a4 304 gimple *stmt = stmts_to_fixup.pop ();
9c015ccf 305 fixup_noreturn_call (stmt);
306 }
d443f534 307
308 evrp_folder.vr_values->cleanup_edges_and_switches ();
a62a30a4 309}
310
311/* Main entry point for the early vrp pass which is a simplified non-iterative
312 version of vrp where basic blocks are visited in dominance order. Value
313 ranges discovered in early vrp will also be used by ipa-vrp. */
314
315static unsigned int
316execute_early_vrp ()
317{
a62a30a4 318 /* Ideally this setup code would move into the ctor for the dominator
319 walk. However, this setup can change the number of blocks which
320 invalidates the internal arrays that are set up by the dominator
321 walker. */
322 loop_optimizer_init (LOOPS_NORMAL | LOOPS_HAVE_RECORDED_EXITS);
323 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
324 scev_initialize ();
325 calculate_dominance_info (CDI_DOMINATORS);
a62a30a4 326
327 /* Walk stmts in dominance order and propagate VRP. */
328 evrp_dom_walker walker;
329 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
330 walker.cleanup ();
9c015ccf 331
332 scev_finalize ();
333 loop_optimizer_finalize ();
334 return 0;
335}
336
337namespace {
338
339const pass_data pass_data_early_vrp =
340{
341 GIMPLE_PASS, /* type */
342 "evrp", /* name */
343 OPTGROUP_NONE, /* optinfo_flags */
344 TV_TREE_EARLY_VRP, /* tv_id */
345 PROP_ssa, /* properties_required */
346 0, /* properties_provided */
347 0, /* properties_destroyed */
348 0, /* todo_flags_start */
349 ( TODO_cleanup_cfg | TODO_update_ssa | TODO_verify_all ),
350};
351
352class pass_early_vrp : public gimple_opt_pass
353{
354public:
355 pass_early_vrp (gcc::context *ctxt)
356 : gimple_opt_pass (pass_data_early_vrp, ctxt)
357 {}
358
359 /* opt_pass methods: */
360 opt_pass * clone () { return new pass_early_vrp (m_ctxt); }
361 virtual bool gate (function *)
362 {
363 return flag_tree_vrp != 0;
364 }
365 virtual unsigned int execute (function *)
366 { return execute_early_vrp (); }
367
368}; // class pass_vrp
369} // anon namespace
370
371gimple_opt_pass *
372make_pass_early_vrp (gcc::context *ctxt)
373{
374 return new pass_early_vrp (ctxt);
375}
376