]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-ssa-evrp.c
c++: Adjust push_template_decl_real
[thirdparty/gcc.git] / gcc / gimple-ssa-evrp.c
CommitLineData
16207ddd 1/* Support routines for Value Range Propagation (VRP).
8d9254fc 2 Copyright (C) 2005-2020 Free Software Foundation, Inc.
16207ddd
JL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 3, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "tree.h"
25#include "gimple.h"
26#include "tree-pass.h"
27#include "ssa.h"
28#include "gimple-pretty-print.h"
29#include "cfganal.h"
30#include "gimple-fold.h"
31#include "tree-eh.h"
32#include "gimple-iterator.h"
33#include "tree-cfg.h"
34#include "tree-ssa-loop-manip.h"
35#include "tree-ssa-loop.h"
36#include "cfgloop.h"
37#include "tree-scalar-evolution.h"
38#include "tree-ssa-propagate.h"
39#include "alloc-pool.h"
40#include "domwalk.h"
41#include "tree-cfgcleanup.h"
42#include "vr-values.h"
74ba745b 43#include "gimple-ssa-evrp-analyze.h"
16207ddd
JL
44
45class evrp_folder : public substitute_and_fold_engine
46{
47 public:
48 tree get_value (tree) FINAL OVERRIDE;
a5de02e9 49 evrp_folder (class vr_values *vr_values_) : vr_values (vr_values_) { }
b55fbca3
RB
50 bool simplify_stmt_using_ranges (gimple_stmt_iterator *gsi)
51 { return vr_values->simplify_stmt_using_ranges (gsi); }
16207ddd 52 class vr_values *vr_values;
a5de02e9
JL
53
54 private:
55 DISABLE_COPY_AND_ASSIGN (evrp_folder);
16207ddd
JL
56};
57
58tree
59evrp_folder::get_value (tree op)
60{
61 return vr_values->op_with_constant_singleton_value_range (op);
62}
63
64/* evrp_dom_walker visits the basic blocks in the dominance order and set
65 the Value Ranges (VR) for SSA_NAMEs in the scope. Use this VR to
66 discover more VRs. */
67
68class evrp_dom_walker : public dom_walker
69{
70public:
a5de02e9
JL
71 evrp_dom_walker ()
72 : dom_walker (CDI_DOMINATORS),
c844c402 73 evrp_range_analyzer (true),
d49e06ce 74 evrp_folder (evrp_range_analyzer.get_vr_values ())
16207ddd
JL
75 {
76 need_eh_cleanup = BITMAP_ALLOC (NULL);
77 }
78 ~evrp_dom_walker ()
79 {
80 BITMAP_FREE (need_eh_cleanup);
81 }
82 virtual edge before_dom_children (basic_block);
83 virtual void after_dom_children (basic_block);
271eeb18
JL
84 void cleanup (void);
85
86 private:
87 DISABLE_COPY_AND_ASSIGN (evrp_dom_walker);
16207ddd
JL
88 bitmap need_eh_cleanup;
89 auto_vec<gimple *> stmts_to_fixup;
90 auto_vec<gimple *> stmts_to_remove;
91
6566b0fb 92 class evrp_range_analyzer evrp_range_analyzer;
a5de02e9 93 class evrp_folder evrp_folder;
16207ddd
JL
94};
95
0dee5a2a
JL
96edge
97evrp_dom_walker::before_dom_children (basic_block bb)
98{
99 if (dump_file && (dump_flags & TDF_DETAILS))
100 fprintf (dump_file, "Visiting BB%d\n", bb->index);
101
6566b0fb 102 evrp_range_analyzer.enter (bb);
0dee5a2a
JL
103
104 for (gphi_iterator gpi = gsi_start_phis (bb);
105 !gsi_end_p (gpi); gsi_next (&gpi))
106 {
107 gphi *phi = gpi.phi ();
108 tree lhs = PHI_RESULT (phi);
109 if (virtual_operand_p (lhs))
110 continue;
111
028d81b1 112 const value_range_equiv *vr = evrp_range_analyzer.get_value_range (lhs);
0dee5a2a 113 /* Mark PHIs whose lhs we fully propagate for removal. */
57bbc3e2
AH
114 tree val;
115 if (vr->singleton_p (&val) && may_propagate_copy (lhs, val))
0dee5a2a
JL
116 {
117 stmts_to_remove.safe_push (phi);
118 continue;
119 }
120 }
16207ddd
JL
121
122 edge taken_edge = NULL;
123
124 /* Visit all other stmts and discover any new VRs possible. */
125 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
126 !gsi_end_p (gsi); gsi_next (&gsi))
127 {
128 gimple *stmt = gsi_stmt (gsi);
129 tree output = NULL_TREE;
130 gimple *old_stmt = stmt;
131 bool was_noreturn = (is_gimple_call (stmt)
132 && gimple_call_noreturn_p (stmt));
133
134 if (dump_file && (dump_flags & TDF_DETAILS))
135 {
136 fprintf (dump_file, "Visiting stmt ");
137 print_gimple_stmt (dump_file, stmt, 0);
138 }
139
df80fc53 140 evrp_range_analyzer.record_ranges_from_stmt (stmt, false);
6566b0fb 141
16207ddd
JL
142 if (gcond *cond = dyn_cast <gcond *> (stmt))
143 {
a5de02e9 144 evrp_range_analyzer.vrp_visit_cond_stmt (cond, &taken_edge);
16207ddd
JL
145 if (taken_edge)
146 {
147 if (taken_edge->flags & EDGE_TRUE_VALUE)
148 gimple_cond_make_true (cond);
149 else if (taken_edge->flags & EDGE_FALSE_VALUE)
150 gimple_cond_make_false (cond);
151 else
152 gcc_unreachable ();
153 update_stmt (stmt);
154 }
155 }
156 else if (stmt_interesting_for_vrp (stmt))
157 {
0dee5a2a
JL
158 output = get_output_for_vrp (stmt);
159 if (output)
16207ddd 160 {
028d81b1 161 const value_range_equiv *vr
0982acbe 162 = evrp_range_analyzer.get_value_range (output);
16207ddd
JL
163
164 /* Mark stmts whose output we fully propagate for removal. */
028d81b1 165 tree val;
57bbc3e2 166 if (vr->singleton_p (&val)
16207ddd 167 && may_propagate_copy (output, val)
36bbc05d 168 && !stmt_could_throw_p (cfun, stmt)
16207ddd
JL
169 && !gimple_has_side_effects (stmt))
170 {
171 stmts_to_remove.safe_push (stmt);
172 continue;
173 }
16207ddd
JL
174 }
175 }
176
177 /* Try folding stmts with the VR discovered. */
16207ddd 178 bool did_replace = evrp_folder.replace_uses_in (stmt);
8ce6fb5f
RB
179 gimple_stmt_iterator prev_gsi = gsi;
180 gsi_prev (&prev_gsi);
16207ddd
JL
181 if (fold_stmt (&gsi, follow_single_use_edges)
182 || did_replace)
183 {
184 stmt = gsi_stmt (gsi);
185 update_stmt (stmt);
186 did_replace = true;
187 }
b55fbca3
RB
188 if (evrp_folder.simplify_stmt_using_ranges (&gsi))
189 {
190 stmt = gsi_stmt (gsi);
191 update_stmt (stmt);
192 did_replace = true;
193 }
16207ddd
JL
194
195 if (did_replace)
196 {
8ce6fb5f
RB
197 /* If we wound up generating new stmts during folding
198 drop all their defs to VARYING. We can't easily
199 process them because we've already instantiated
200 ranges on uses on STMT that only hold after it. */
201 if (gsi_end_p (prev_gsi))
202 prev_gsi = gsi_start_bb (bb);
203 else
204 gsi_next (&prev_gsi);
205 while (gsi_stmt (prev_gsi) != gsi_stmt (gsi))
206 {
207 evrp_range_analyzer.get_vr_values ()
208 ->set_defs_to_varying (gsi_stmt (prev_gsi));
209 gsi_next (&prev_gsi);
210 }
211
16207ddd
JL
212 /* If we cleaned up EH information from the statement,
213 remove EH edges. */
214 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
215 bitmap_set_bit (need_eh_cleanup, bb->index);
216
217 /* If we turned a not noreturn call into a noreturn one
218 schedule it for fixup. */
219 if (!was_noreturn
220 && is_gimple_call (stmt)
221 && gimple_call_noreturn_p (stmt))
222 stmts_to_fixup.safe_push (stmt);
223
224 if (gimple_assign_single_p (stmt))
225 {
226 tree rhs = gimple_assign_rhs1 (stmt);
227 if (TREE_CODE (rhs) == ADDR_EXPR)
228 recompute_tree_invariant_for_addr_expr (rhs);
229 }
230 }
231 }
232
233 /* Visit BB successor PHI nodes and replace PHI args. */
0dee5a2a
JL
234 edge e;
235 edge_iterator ei;
16207ddd
JL
236 FOR_EACH_EDGE (e, ei, bb->succs)
237 {
238 for (gphi_iterator gpi = gsi_start_phis (e->dest);
239 !gsi_end_p (gpi); gsi_next (&gpi))
240 {
241 gphi *phi = gpi.phi ();
242 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
243 tree arg = USE_FROM_PTR (use_p);
244 if (TREE_CODE (arg) != SSA_NAME
245 || virtual_operand_p (arg))
246 continue;
028d81b1
AH
247 const value_range_equiv
248 *vr = evrp_range_analyzer.get_value_range (arg);
57bbc3e2
AH
249 tree val;
250 if (vr->singleton_p (&val) && may_propagate_copy (arg, val))
16207ddd
JL
251 propagate_value (use_p, val);
252 }
253 }
254
16207ddd
JL
255 return taken_edge;
256}
257
6566b0fb
JL
258void
259evrp_dom_walker::after_dom_children (basic_block bb)
260{
261 evrp_range_analyzer.leave (bb);
262}
263
271eeb18 264/* Perform any cleanups after the main phase of EVRP has completed. */
16207ddd 265
271eeb18
JL
266void
267evrp_dom_walker::cleanup (void)
16207ddd 268{
16207ddd
JL
269 if (dump_file)
270 {
271 fprintf (dump_file, "\nValue ranges after Early VRP:\n\n");
a5de02e9 272 evrp_range_analyzer.dump_all_value_ranges (dump_file);
16207ddd
JL
273 fprintf (dump_file, "\n");
274 }
275
276 /* Remove stmts in reverse order to make debug stmt creation possible. */
271eeb18 277 while (! stmts_to_remove.is_empty ())
16207ddd 278 {
271eeb18 279 gimple *stmt = stmts_to_remove.pop ();
16207ddd
JL
280 if (dump_file && dump_flags & TDF_DETAILS)
281 {
282 fprintf (dump_file, "Removing dead stmt ");
283 print_gimple_stmt (dump_file, stmt, 0);
284 fprintf (dump_file, "\n");
285 }
286 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
287 if (gimple_code (stmt) == GIMPLE_PHI)
288 remove_phi_node (&gsi, true);
289 else
290 {
291 unlink_stmt_vdef (stmt);
292 gsi_remove (&gsi, true);
293 release_defs (stmt);
294 }
295 }
296
271eeb18
JL
297 if (!bitmap_empty_p (need_eh_cleanup))
298 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
16207ddd
JL
299
300 /* Fixup stmts that became noreturn calls. This may require splitting
301 blocks and thus isn't possible during the dominator walk. Do this
302 in reverse order so we don't inadvertedly remove a stmt we want to
303 fixup by visiting a dominating now noreturn call first. */
271eeb18 304 while (!stmts_to_fixup.is_empty ())
16207ddd 305 {
271eeb18 306 gimple *stmt = stmts_to_fixup.pop ();
16207ddd
JL
307 fixup_noreturn_call (stmt);
308 }
35b66f30
JL
309
310 evrp_folder.vr_values->cleanup_edges_and_switches ();
271eeb18
JL
311}
312
313/* Main entry point for the early vrp pass which is a simplified non-iterative
314 version of vrp where basic blocks are visited in dominance order. Value
315 ranges discovered in early vrp will also be used by ipa-vrp. */
316
317static unsigned int
318execute_early_vrp ()
319{
271eeb18
JL
320 /* Ideally this setup code would move into the ctor for the dominator
321 walk. However, this setup can change the number of blocks which
322 invalidates the internal arrays that are set up by the dominator
323 walker. */
324 loop_optimizer_init (LOOPS_NORMAL | LOOPS_HAVE_RECORDED_EXITS);
325 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
326 scev_initialize ();
327 calculate_dominance_info (CDI_DOMINATORS);
271eeb18
JL
328
329 /* Walk stmts in dominance order and propagate VRP. */
330 evrp_dom_walker walker;
331 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
332 walker.cleanup ();
16207ddd
JL
333
334 scev_finalize ();
335 loop_optimizer_finalize ();
336 return 0;
337}
338
339namespace {
340
341const pass_data pass_data_early_vrp =
342{
343 GIMPLE_PASS, /* type */
344 "evrp", /* name */
345 OPTGROUP_NONE, /* optinfo_flags */
346 TV_TREE_EARLY_VRP, /* tv_id */
347 PROP_ssa, /* properties_required */
348 0, /* properties_provided */
349 0, /* properties_destroyed */
350 0, /* todo_flags_start */
351 ( TODO_cleanup_cfg | TODO_update_ssa | TODO_verify_all ),
352};
353
354class pass_early_vrp : public gimple_opt_pass
355{
356public:
357 pass_early_vrp (gcc::context *ctxt)
358 : gimple_opt_pass (pass_data_early_vrp, ctxt)
359 {}
360
361 /* opt_pass methods: */
362 opt_pass * clone () { return new pass_early_vrp (m_ctxt); }
363 virtual bool gate (function *)
364 {
365 return flag_tree_vrp != 0;
366 }
367 virtual unsigned int execute (function *)
368 { return execute_early_vrp (); }
369
370}; // class pass_vrp
371} // anon namespace
372
373gimple_opt_pass *
374make_pass_early_vrp (gcc::context *ctxt)
375{
376 return new pass_early_vrp (ctxt);
377}
378