// --------------------------------------------------------------------------
ranger_cache::ranger_cache (int not_executable_flag, bool use_imm_uses)
- : m_gori (not_executable_flag, param_vrp_switch_limit)
{
m_workback.create (0);
m_workback.safe_grow_cleared (last_basic_block_for_fn (cfun));
// If DOM info is available, spawn an oracle as well.
create_relation_oracle ();
create_infer_oracle (use_imm_uses);
+ create_gori (not_executable_flag, param_vrp_switch_limit);
unsigned x, lim = last_basic_block_for_fn (cfun);
// Calculate outgoing range info upfront. This will fully populate the
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, x);
if (bb)
- m_gori.map ()->exports (bb);
+ gori ().map ()->exports (bb);
}
m_update = new update_list ();
}
void
ranger_cache::dump_bb (FILE *f, basic_block bb)
{
- m_gori.map ()->dump (f, bb, false);
+ gori ().map ()->dump (f, bb, false);
m_on_entry.dump (f, bb);
m_relation->dump (f, bb);
}
current_p = false;
if (had_global)
current_p = r.singleton_p ()
- || m_temporal->current_p (name, m_gori.map ()->depend1 (name),
- m_gori.map ()->depend2 (name));
+ || m_temporal->current_p (name, gori ().map ()->depend1 (name),
+ gori ().map ()->depend2 (name));
else
{
// If no global value has been set and value is VARYING, fold the stmt
if (!changed)
{
// If there are dependencies, make sure this is not out of date.
- if (!m_temporal->current_p (name, m_gori.map ()->depend1 (name),
- m_gori.map ()->depend2 (name)))
+ if (!m_temporal->current_p (name, gori ().map ()->depend1 (name),
+ gori ().map ()->depend2 (name)))
m_temporal->set_timestamp (name);
return;
}
if (r.singleton_p ()
|| (POINTER_TYPE_P (TREE_TYPE (name)) && r.nonzero_p ()))
- m_gori.map ()->set_range_invariant (name);
+ gori ().map ()->set_range_invariant (name);
m_temporal->set_timestamp (name);
}
if ((e->flags & (EDGE_EH | EDGE_ABNORMAL)) == 0)
infer_oracle ().maybe_adjust_range (r, name, e->src);
Value_Range er (TREE_TYPE (name));
- if (m_gori.edge_range_p (er, e, name, *this))
+ if (gori ().edge_range_p (er, e, name, *this))
r.intersect (er);
return true;
}
// If there are no range calculations anywhere in the IL, global range
// applies everywhere, so don't bother caching it.
- if (!m_gori.has_edge_range_p (name))
+ if (!gori ().has_edge_range_p (name))
return false;
if (calc)
continue;
// Check if the equiv has any ranges calculated.
- if (!m_gori.has_edge_range_p (equiv_name))
+ if (!gori ().has_edge_range_p (equiv_name))
continue;
// Check if the equiv definition dominates this block
r.dump (dump_file);
fprintf (dump_file, ", ");
}
- if (!r.undefined_p () || m_gori.has_edge_range_p (name, e))
+ if (!r.undefined_p () || gori ().has_edge_range_p (name, e))
{
m_update->add (node);
if (DEBUG_RANGE_CACHE)
infer_oracle ().maybe_adjust_range (infer, name, bb);
// This block has an outgoing range.
- if (m_gori.has_edge_range_p (name, bb))
+ if (gori ().has_edge_range_p (name, bb))
m_workback.quick_push (prev_bb);
else
{
// If the first pred does not generate a range, then we will be
// using the dominator range anyway, so that's all the check needed.
if (EDGE_COUNT (prev_bb->preds) > 1
- && m_gori.has_edge_range_p (name, EDGE_PRED (prev_bb, 0)->src))
+ && gori ().has_edge_range_p (name, EDGE_PRED (prev_bb, 0)->src))
{
edge e;
edge_iterator ei;
edge e = single_pred_edge (prev_bb);
bb = e->src;
- if (m_gori.edge_range_p (er, e, name, *this))
+ if (gori ().edge_range_p (er, e, name, *this))
{
r.intersect (er);
// If this is a normal edge, apply any inferred ranges.
{
m_on_entry.set_bb_range (name, bb, r);
// If this range was invariant before, remove invariant.
- if (!m_gori.has_edge_range_p (name))
- m_gori.map ()->set_range_invariant (name, false);
+ if (!gori ().has_edge_range_p (name))
+ gori ().map ()->set_range_invariant (name, false);
}
}
void register_inferred_value (const vrange &r, tree name, basic_block bb);
void apply_inferred_ranges (gimple *s);
- gori_compute m_gori;
void dump_bb (FILE *f, basic_block bb);
virtual void dump (FILE *f) override;
m_query = q;
else
m_query = get_range_query (cfun);
- m_gori = NULL;
+ m_depend_p = false;
}
// Invoke range_of_expr on EXPR.
// Instantiate a stmt based fur_source with a GORI object.
-fur_depend::fur_depend (gimple *s, gori_compute *gori, range_query *q)
+fur_depend::fur_depend (gimple *s, range_query *q)
: fur_stmt (s, q)
{
- gcc_checking_assert (gori);
- m_gori = gori;
+ m_depend_p = true;
}
// Register a relation on a stmt if there is an oracle.
public:
fur_source (range_query *q = NULL);
inline range_query *query () { return m_query; }
- inline class gori_compute *gori () { return m_gori; };
+ inline class gimple_outgoing_range *gori ()
+ { return m_depend_p ? &(m_query->gori ()) : NULL; }
virtual bool get_operand (vrange &r, tree expr);
virtual bool get_phi_operand (vrange &r, tree expr, edge e);
virtual relation_kind query_relation (tree op1, tree op2);
void register_outgoing_edges (gcond *, irange &lhs_range, edge e0, edge e1);
protected:
range_query *m_query;
- gori_compute *m_gori;
+ bool m_depend_p;
};
// fur_stmt is the specification for drawing an operand from range_query Q
class fur_depend : public fur_stmt
{
public:
- fur_depend (gimple *s, gori_compute *gori, range_query *q = NULL);
+ fur_depend (gimple *s, range_query *q = NULL);
virtual void register_relation (gimple *stmt, relation_kind k, tree op1,
tree op2) override;
virtual void register_relation (edge e, relation_kind k, tree op1,
m_ranger (ranger),
m_resolve (resolve)
{
+ share_query (ranger);
+ // Override the relation oracle with a local path relation oracle.
m_relation = new path_oracle (&(m_ranger.relation ()));
reset_path (path, dependencies);
m_ranger (ranger),
m_resolve (resolve)
{
+ share_query (ranger);
+ // Override the relation oracle with a local path relation oracle.
m_relation = new path_oracle (&(m_ranger.relation ()));
}
}
if (bb && POINTER_TYPE_P (TREE_TYPE (name)))
- m_ranger.infer_oracle ().maybe_adjust_range (r, name, bb);
+ infer_oracle ().maybe_adjust_range (r, name, bb);
if (DEBUG_SOLVER && (bb || !r.varying_p ()))
{
p->reset_path ();
}
- gori_compute &g = m_ranger.gori ();
- bitmap exports = g.map()->exports (bb);
+ bitmap exports = gori ().map ()->exports (bb);
EXECUTE_IF_AND_IN_BITMAP (m_exit_dependencies, exports, 0, i, bi)
{
tree name = ssa_name (i);
Value_Range r (TREE_TYPE (name));
- if (g.edge_range_p (r, e, name, *this))
+ if (gori ().edge_range_p (r, e, name, *this))
{
Value_Range cached_range (TREE_TYPE (name));
if (get_cache (cached_range, name))
else
r.set_varying (TREE_TYPE (name));
- if (m_ranger.infer_oracle ().maybe_adjust_range (r, name, bb))
+ if (infer_oracle ().maybe_adjust_range (r, name, bb))
m_cache.set_range (name, r);
}
}
{
// Start with the imports from the exit block...
basic_block exit = m_path[0];
- gori_compute &gori = m_ranger.gori ();
- bitmap_copy (dependencies, gori.map()->imports (exit));
+ bitmap_copy (dependencies, gori ().map()->imports (exit));
auto_vec<tree> worklist (bitmap_count_bits (dependencies));
bitmap_iterator bi;
{
basic_block bb = m_path[i];
tree name;
- FOR_EACH_GORI_EXPORT_NAME (*(gori.map ()), bb, name)
+ FOR_EACH_GORI_EXPORT_NAME (*(gori ().map ()), bb, name)
if (TREE_CODE (TREE_TYPE (name)) == BOOLEAN_TYPE)
bitmap_set_bit (dependencies, SSA_NAME_VERSION (name));
}
class jt_fur_source : public fur_depend
{
public:
- jt_fur_source (gimple *s, path_range_query *, gori_compute *,
- const vec<basic_block> &);
+ jt_fur_source (gimple *s, path_range_query *, const vec<basic_block> &);
relation_kind query_relation (tree op1, tree op2) override;
void register_relation (gimple *, relation_kind, tree op1, tree op2) override;
void register_relation (edge, relation_kind, tree op1, tree op2) override;
jt_fur_source::jt_fur_source (gimple *s,
path_range_query *query,
- gori_compute *gori,
const vec<basic_block> &path)
- : fur_depend (s, gori, query)
+ : fur_depend (s, query)
{
gcc_checking_assert (!path.is_empty ());
if (m_resolve)
{
fold_using_range f;
- jt_fur_source src (stmt, this, &m_ranger.gori (), m_path);
+ jt_fur_source src (stmt, this, m_path);
if (!f.fold_stmt (r, stmt, src))
r.set_varying (type);
}
else
gcc_unreachable ();
- jt_fur_source src (NULL, this, &m_ranger.gori (), m_path);
+ jt_fur_source src (NULL, this, m_path);
src.register_outgoing_edges (cond, r, e0, e1);
}
}
gimple_ranger::fold_range_internal (vrange &r, gimple *s, tree name)
{
fold_using_range f;
- fur_depend src (s, &(gori ()), this);
+ fur_depend src (s, this);
return f.fold_stmt (r, s, src, name);
}
// Update any exports in the cache if this is a gimple cond statement.
tree exp;
basic_block bb = gimple_bb (s);
- FOR_EACH_GORI_EXPORT_NAME (*(m_cache.m_gori.map ()), bb, exp)
+ FOR_EACH_GORI_EXPORT_NAME (*(gori ().map ()), bb, exp)
m_cache.propagate_updated_value (exp, bb);
}
}
assume_query::assume_query ()
{
+ create_gori (0, param_vrp_switch_limit);
basic_block exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
if (single_pred_p (exit_bb))
{
}
}
+assume_query::~assume_query ()
+{
+ destroy_gori ();
+}
+
// Evaluate operand OP on statement S, using the provided LHS range.
// If successful, set the range in the global table, then visit OP's def stmt.
assume_query::calculate_op (tree op, gimple *s, vrange &lhs, fur_source &src)
{
Value_Range op_range (TREE_TYPE (op));
- if (m_gori.compute_operand_range (op_range, s, lhs, op, src)
+ if (gori ().compute_operand_range (op_range, s, lhs, op, src)
&& !op_range.varying_p ())
{
// Set the global range, merging if there is already a range.
virtual bool range_on_entry (vrange &r, basic_block bb, tree name) override;
virtual bool range_on_exit (vrange &r, basic_block bb, tree name) override;
void export_global_ranges ();
- inline gori_compute &gori () { return m_cache.m_gori; }
virtual void dump (FILE *f) override;
void debug ();
void dump_bb (FILE *f, basic_block bb);
{
public:
assume_query ();
+ ~assume_query ();
bool assume_range_p (vrange &r, tree name);
virtual bool range_of_expr (vrange &r, tree expr, gimple * = NULL);
void dump (FILE *f);
void check_taken_edge (edge e, fur_source &src);
ssa_lazy_cache global;
- gori_compute m_gori;
};
// DOM based ranger for fast VRP.
return;
tree name;
- gori_compute &gori = m_ranger->gori ();
- FOR_EACH_GORI_EXPORT_NAME (*(gori.map()), pred_e->src, name)
+ FOR_EACH_GORI_EXPORT_NAME (*(m_ranger->gori ().map()), pred_e->src, name)
if (all_uses_feed_or_dominated_by_stmt (name, stmt)
// The condition must post-dominate the definition point.
&& (SSA_NAME_IS_DEFAULT_DEF (name)
const vec<basic_block> &path,
gimple *stmt)
{
- gori_compute &gori = m_ranger->gori ();
-
// Start with the imports to the final conditional.
- bitmap_copy (dependencies, gori.map ()->imports (path[0]));
+ bitmap_copy (dependencies, m_ranger->gori ().map ()->imports (path[0]));
// Add any other interesting operands we may have missed.
if (gimple_bb (stmt) != path[0])
// can be used anywhere.
relation_oracle default_relation_oracle;
infer_range_oracle default_infer_oracle;
+gimple_outgoing_range default_gori;
+
+void
+range_query::create_gori (int not_executable_flag, int sw_max_edges)
+{
+ gcc_checking_assert (m_gori == &default_gori);
+ m_gori = new gori_compute (not_executable_flag, sw_max_edges);
+ gcc_checking_assert (m_gori);
+}
+
+void
+range_query::destroy_gori ()
+{
+ if (m_gori && m_gori != &default_gori)
+ delete m_gori;
+ m_gori= &default_gori;
+}
void
range_query::create_infer_oracle (bool do_search)
{
m_relation = q.m_relation;
m_infer = q.m_infer;
+ m_gori = q.m_gori;
m_shared_copy_p = true;
}
{
m_relation = &default_relation_oracle;
m_infer = &default_infer_oracle;
+ m_gori = &default_gori;
m_shared_copy_p = false;
}
// Do not destroy anything if this is a shared copy.
if (m_shared_copy_p)
return;
+ destroy_gori ();
destroy_infer_oracle ();
destroy_relation_oracle ();
}
void create_infer_oracle (bool do_search = TRUE);
void destroy_infer_oracle ();
+ inline class gimple_outgoing_range &gori () const { return *m_gori; }
+ void create_gori (int not_executable_flag = 0, int sw_max_edges = INT_MAX);
+ void destroy_gori ();
+
virtual void dump (FILE *);
protected:
bool get_arith_expr_range (vrange &r, tree expr, gimple *stmt);
relation_oracle *m_relation;
infer_range_oracle *m_infer;
+ gimple_outgoing_range *m_gori;
// When multiple related range queries wish to share oracles.
// This is an internal interface
void share_query (range_query &q);