]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-ssa-evrp-analyze.c
Mark necessary 2nd and later args for delete op.
[thirdparty/gcc.git] / gcc / gimple-ssa-evrp-analyze.c
CommitLineData
8bd73801 1/* Support routines for Value Range Propagation (VRP).
fbd26352 2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
8bd73801 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 3, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "tree.h"
25#include "gimple.h"
26#include "tree-pass.h"
27#include "ssa.h"
28#include "gimple-pretty-print.h"
29#include "cfganal.h"
30#include "gimple-fold.h"
31#include "tree-eh.h"
32#include "gimple-iterator.h"
33#include "tree-cfg.h"
34#include "tree-ssa-loop-manip.h"
35#include "tree-ssa-loop.h"
36#include "cfgloop.h"
37#include "tree-scalar-evolution.h"
38#include "tree-ssa-propagate.h"
39#include "alloc-pool.h"
40#include "domwalk.h"
41#include "tree-cfgcleanup.h"
42#include "vr-values.h"
43#include "gimple-ssa-evrp-analyze.h"
44
447602ef 45evrp_range_analyzer::evrp_range_analyzer (bool update_global_ranges)
46 : stack (10), m_update_global_ranges (update_global_ranges)
8bd73801 47{
48 edge e;
49 edge_iterator ei;
50 basic_block bb;
51 FOR_EACH_BB_FN (bb, cfun)
52 {
53 bb->flags &= ~BB_VISITED;
54 FOR_EACH_EDGE (e, ei, bb->preds)
55 e->flags |= EDGE_EXECUTABLE;
56 }
c561e1e7 57 vr_values = new class vr_values;
8bd73801 58}
59
4b69806c 60/* Push an unwinding marker onto the unwinding stack. */
61
8bd73801 62void
4b69806c 63evrp_range_analyzer::push_marker ()
8bd73801 64{
65 stack.safe_push (std::make_pair (NULL_TREE, (value_range *)NULL));
4b69806c 66}
67
68/* Analyze ranges as we enter basic block BB. */
69
70void
71evrp_range_analyzer::enter (basic_block bb)
72{
e9332a4e 73 if (!optimize)
74 return;
4b69806c 75 push_marker ();
8bd73801 76 record_ranges_from_incoming_edge (bb);
77 record_ranges_from_phis (bb);
78 bb->flags |= BB_VISITED;
79}
80
81/* Find new range for NAME such that (OP CODE LIMIT) is true. */
82value_range *
83evrp_range_analyzer::try_find_new_range (tree name,
84 tree op, tree_code code, tree limit)
85{
be44111e 86 value_range vr;
448df21a 87 const value_range *old_vr = get_value_range (name);
8bd73801 88
89 /* Discover VR when condition is true. */
c561e1e7 90 vr_values->extract_range_for_var_from_comparison_expr (name, code, op,
91 limit, &vr);
8bd73801 92 /* If we found any usable VR, set the VR to ssa_name and create a
93 PUSH old value in the stack with the old VR. */
be44111e 94 if (!vr.undefined_p () && !vr.varying_p ())
8bd73801 95 {
be44111e 96 if (old_vr->kind () == vr.kind ()
97 && vrp_operand_equal_p (old_vr->min (), vr.min ())
98 && vrp_operand_equal_p (old_vr->max (), vr.max ()))
8bd73801 99 return NULL;
2e1f26dd 100 value_range *new_vr = vr_values->allocate_value_range ();
48625f58 101 new_vr->move (&vr);
8bd73801 102 return new_vr;
103 }
104 return NULL;
105}
106
dd435793 107/* For LHS record VR in the SSA info. */
108void
109evrp_range_analyzer::set_ssa_range_info (tree lhs, value_range *vr)
110{
447602ef 111 gcc_assert (m_update_global_ranges);
112
dd435793 113 /* Set the SSA with the value range. */
114 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
115 {
be44111e 116 if (vr->constant_p ())
117 set_range_info (lhs, vr->kind (),
118 wi::to_wide (vr->min ()),
119 wi::to_wide (vr->max ()));
dd435793 120 }
121 else if (POINTER_TYPE_P (TREE_TYPE (lhs))
6180f4cd 122 && range_includes_zero_p (vr) == 0)
dd435793 123 set_ptr_nonnull (lhs);
124}
125
126/* Return true if all uses of NAME are dominated by STMT or feed STMT
127 via a chain of single immediate uses. */
128
129static bool
130all_uses_feed_or_dominated_by_stmt (tree name, gimple *stmt)
131{
132 use_operand_p use_p, use2_p;
133 imm_use_iterator iter;
134 basic_block stmt_bb = gimple_bb (stmt);
135
136 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
137 {
138 gimple *use_stmt = USE_STMT (use_p), *use_stmt2;
139 if (use_stmt == stmt
140 || is_gimple_debug (use_stmt)
141 || (gimple_bb (use_stmt) != stmt_bb
142 && dominated_by_p (CDI_DOMINATORS,
143 gimple_bb (use_stmt), stmt_bb)))
144 continue;
145 while (use_stmt != stmt
146 && is_gimple_assign (use_stmt)
147 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
148 && single_imm_use (gimple_assign_lhs (use_stmt),
149 &use2_p, &use_stmt2))
150 use_stmt = use_stmt2;
151 if (use_stmt != stmt)
152 return false;
153 }
154 return true;
155}
156
8bd73801 157void
158evrp_range_analyzer::record_ranges_from_incoming_edge (basic_block bb)
159{
160 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
161 if (pred_e)
162 {
163 gimple *stmt = last_stmt (pred_e->src);
164 tree op0 = NULL_TREE;
165
166 if (stmt
167 && gimple_code (stmt) == GIMPLE_COND
168 && (op0 = gimple_cond_lhs (stmt))
169 && TREE_CODE (op0) == SSA_NAME
170 && (INTEGRAL_TYPE_P (TREE_TYPE (gimple_cond_lhs (stmt)))
171 || POINTER_TYPE_P (TREE_TYPE (gimple_cond_lhs (stmt)))))
172 {
173 if (dump_file && (dump_flags & TDF_DETAILS))
174 {
175 fprintf (dump_file, "Visiting controlling predicate ");
176 print_gimple_stmt (dump_file, stmt, 0);
177 }
178 /* Entering a new scope. Try to see if we can find a VR
179 here. */
180 tree op1 = gimple_cond_rhs (stmt);
181 if (TREE_OVERFLOW_P (op1))
182 op1 = drop_tree_overflow (op1);
183 tree_code code = gimple_cond_code (stmt);
184
185 auto_vec<assert_info, 8> asserts;
186 register_edge_assert_for (op0, pred_e, code, op0, op1, asserts);
187 if (TREE_CODE (op1) == SSA_NAME)
188 register_edge_assert_for (op1, pred_e, code, op0, op1, asserts);
189
190 auto_vec<std::pair<tree, value_range *>, 8> vrs;
191 for (unsigned i = 0; i < asserts.length (); ++i)
192 {
193 value_range *vr = try_find_new_range (asserts[i].name,
194 asserts[i].expr,
195 asserts[i].comp_code,
196 asserts[i].val);
197 if (vr)
198 vrs.safe_push (std::make_pair (asserts[i].name, vr));
199 }
dd435793 200
201 /* If pred_e is really a fallthru we can record value ranges
202 in SSA names as well. */
203 bool is_fallthru = assert_unreachable_fallthru_edge_p (pred_e);
204
8bd73801 205 /* Push updated ranges only after finding all of them to avoid
206 ordering issues that can lead to worse ranges. */
207 for (unsigned i = 0; i < vrs.length (); ++i)
dd435793 208 {
3127e17b 209 /* But make sure we do not weaken ranges like when
210 getting first [64, +INF] and then ~[0, 0] from
211 conditions like (s & 0x3cc0) == 0). */
448df21a 212 const value_range *old_vr = get_value_range (vrs[i].first);
0f01167a 213 value_range_base tem (old_vr->kind (), old_vr->min (),
214 old_vr->max ());
3127e17b 215 tem.intersect (vrs[i].second);
0f01167a 216 if (tem.equal_p (*old_vr))
3127e17b 217 continue;
dd435793 218 push_value_range (vrs[i].first, vrs[i].second);
219 if (is_fallthru
447602ef 220 && m_update_global_ranges
dd435793 221 && all_uses_feed_or_dominated_by_stmt (vrs[i].first, stmt))
222 {
223 set_ssa_range_info (vrs[i].first, vrs[i].second);
224 maybe_set_nonzero_bits (pred_e, vrs[i].first);
225 }
226 }
8bd73801 227 }
228 }
229}
230
231void
232evrp_range_analyzer::record_ranges_from_phis (basic_block bb)
233{
234 /* Visit PHI stmts and discover any new VRs possible. */
235 bool has_unvisited_preds = false;
236 edge_iterator ei;
237 edge e;
238 FOR_EACH_EDGE (e, ei, bb->preds)
239 if (e->flags & EDGE_EXECUTABLE
240 && !(e->src->flags & BB_VISITED))
241 {
242 has_unvisited_preds = true;
243 break;
244 }
245
246 for (gphi_iterator gpi = gsi_start_phis (bb);
247 !gsi_end_p (gpi); gsi_next (&gpi))
248 {
249 gphi *phi = gpi.phi ();
250 tree lhs = PHI_RESULT (phi);
251 if (virtual_operand_p (lhs))
252 continue;
253
be44111e 254 value_range vr_result;
8bd73801 255 bool interesting = stmt_interesting_for_vrp (phi);
256 if (!has_unvisited_preds && interesting)
c561e1e7 257 vr_values->extract_range_from_phi_node (phi, &vr_result);
8bd73801 258 else
259 {
5a780b31 260 vr_result.set_varying ();
8bd73801 261 /* When we have an unvisited executable predecessor we can't
262 use PHI arg ranges which may be still UNDEFINED but have
263 to use VARYING for them. But we can still resort to
264 SCEV for loop header PHIs. */
2e966e2a 265 class loop *l;
891c5e19 266 if (scev_initialized_p ()
267 && interesting
8bd73801 268 && (l = loop_containing_stmt (phi))
269 && l->header == gimple_bb (phi))
c561e1e7 270 vr_values->adjust_range_with_scev (&vr_result, l, phi, lhs);
8bd73801 271 }
c561e1e7 272 vr_values->update_value_range (lhs, &vr_result);
8bd73801 273
274 /* Set the SSA with the value range. */
447602ef 275 if (m_update_global_ranges)
276 set_ssa_range_info (lhs, &vr_result);
8bd73801 277 }
278}
279
4b69806c 280/* Record ranges from STMT into our VR_VALUES class. If TEMPORARY is
281 true, then this is a temporary equivalence and should be recorded
282 into the unwind table. Othewise record the equivalence into the
283 global table. */
284
8bd73801 285void
4b69806c 286evrp_range_analyzer::record_ranges_from_stmt (gimple *stmt, bool temporary)
8bd73801 287{
288 tree output = NULL_TREE;
289
e9332a4e 290 if (!optimize)
291 return;
292
8bd73801 293 if (dyn_cast <gcond *> (stmt))
294 ;
295 else if (stmt_interesting_for_vrp (stmt))
296 {
297 edge taken_edge;
be44111e 298 value_range vr;
c561e1e7 299 vr_values->extract_range_from_stmt (stmt, &taken_edge, &output, &vr);
256aaca0 300 if (output)
8bd73801 301 {
4b69806c 302 /* Set the SSA with the value range. There are two cases to
303 consider. First (the the most common) is we are processing
304 STMT in a context where its resulting range globally holds
305 and thus it can be reflected into the global ranges and need
306 not be unwound as we leave scope.
307
308 The second case occurs if we are processing a statement in
309 a context where the resulting range must not be reflected
310 into the global tables and must be unwound as we leave
311 the current context. This happens in jump threading for
312 example. */
313 if (!temporary)
314 {
315 /* Case one. We can just update the underlying range
316 information as well as the global information. */
317 vr_values->update_value_range (output, &vr);
447602ef 318 if (m_update_global_ranges)
319 set_ssa_range_info (output, &vr);
4b69806c 320 }
321 else
322 {
07c11f2b 323 /* We're going to need to unwind this range. We cannot
324 use VR as that's a stack object. We have to allocate
4b69806c 325 a new range and push the old range onto the stack. We
326 also have to be very careful about sharing the underlying
327 bitmaps. Ugh. */
328 value_range *new_vr = vr_values->allocate_value_range ();
48625f58 329 new_vr->set (vr.kind (), vr.min (), vr.max ());
330 vr.equiv_clear ();
4b69806c 331 push_value_range (output, new_vr);
332 }
8bd73801 333 }
334 else
c561e1e7 335 vr_values->set_defs_to_varying (stmt);
8bd73801 336 }
337 else
c561e1e7 338 vr_values->set_defs_to_varying (stmt);
8bd73801 339
340 /* See if we can derive a range for any of STMT's operands. */
341 tree op;
342 ssa_op_iter i;
343 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
344 {
345 tree value;
346 enum tree_code comp_code;
347
348 /* If OP is used in such a way that we can infer a value
349 range for it, and we don't find a previous assertion for
350 it, create a new assertion location node for OP. */
351 if (infer_value_range (stmt, op, &comp_code, &value))
352 {
353 /* If we are able to infer a nonzero value range for OP,
354 then walk backwards through the use-def chain to see if OP
355 was set via a typecast.
356 If so, then we can also infer a nonzero value range
357 for the operand of the NOP_EXPR. */
358 if (comp_code == NE_EXPR && integer_zerop (value))
359 {
360 tree t = op;
361 gimple *def_stmt = SSA_NAME_DEF_STMT (t);
362 while (is_gimple_assign (def_stmt)
363 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
364 && TREE_CODE
365 (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
366 && POINTER_TYPE_P
367 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
368 {
369 t = gimple_assign_rhs1 (def_stmt);
370 def_stmt = SSA_NAME_DEF_STMT (t);
371
372 /* Add VR when (T COMP_CODE value) condition is
373 true. */
374 value_range *op_range
375 = try_find_new_range (t, t, comp_code, value);
376 if (op_range)
377 push_value_range (t, op_range);
378 }
379 }
380 /* Add VR when (OP COMP_CODE value) condition is true. */
381 value_range *op_range = try_find_new_range (op, op,
382 comp_code, value);
383 if (op_range)
384 push_value_range (op, op_range);
385 }
386 }
387}
388
4b69806c 389/* Unwind recorded ranges to their most recent state. */
8bd73801 390
391void
4b69806c 392evrp_range_analyzer::pop_to_marker (void)
8bd73801 393{
394 gcc_checking_assert (!stack.is_empty ());
395 while (stack.last ().first != NULL_TREE)
396 pop_value_range (stack.last ().first);
397 stack.pop ();
398}
399
4b69806c 400/* Restore/pop VRs valid only for BB when we leave BB. */
401
402void
403evrp_range_analyzer::leave (basic_block bb ATTRIBUTE_UNUSED)
404{
e9332a4e 405 if (!optimize)
406 return;
4b69806c 407 pop_to_marker ();
408}
409
410
8bd73801 411/* Push the Value Range of VAR to the stack and update it with new VR. */
412
413void
414evrp_range_analyzer::push_value_range (tree var, value_range *vr)
415{
416 if (dump_file && (dump_flags & TDF_DETAILS))
417 {
418 fprintf (dump_file, "pushing new range for ");
419 print_generic_expr (dump_file, var);
420 fprintf (dump_file, ": ");
421 dump_value_range (dump_file, vr);
422 fprintf (dump_file, "\n");
423 }
424 stack.safe_push (std::make_pair (var, get_value_range (var)));
c561e1e7 425 vr_values->set_vr_value (var, vr);
8bd73801 426}
427
428/* Pop the Value Range from the vrp_stack and update VAR with it. */
429
448df21a 430const value_range *
8bd73801 431evrp_range_analyzer::pop_value_range (tree var)
432{
448df21a 433 const value_range *vr = stack.last ().second;
8bd73801 434 gcc_checking_assert (var == stack.last ().first);
435 if (dump_file && (dump_flags & TDF_DETAILS))
436 {
437 fprintf (dump_file, "popping range for ");
438 print_generic_expr (dump_file, var);
439 fprintf (dump_file, ", restoring ");
440 dump_value_range (dump_file, vr);
441 fprintf (dump_file, "\n");
442 }
448df21a 443 /* We saved off a lattice entry, now give it back - it can now
444 be modified again, thus the const casting. */
445 vr_values->set_vr_value (var, const_cast <value_range *> (vr));
8bd73801 446 stack.pop ();
447 return vr;
448}