+2014-08-02 Trevor Saunders <tsaunders@mozilla.com>
+
+ * hash-set.h: new File.
+ * cfgexpand.c, cfgloop.c, cgraph.c, cgraphbuild.c, cgraphunit.c,
+ cprop.c, cse.c, gimple-walk.c, gimple-walk.h, gimplify.c, godump.c,
+ ipa-devirt.c, ipa-pure-const.c, ipa-visibility.c, ipa.c, lto-cgraph.c,
+ lto-streamer-out.c, stmt.c, tree-cfg.c, tree-core.h, tree-eh.c,
+ tree-inline.c, tree-inline.h, tree-nested.c, tree-pretty-print.c,
+ tree-ssa-loop-niter.c, tree-ssa-phiopt.c, tree-ssa-threadedge.c,
+ tree-ssa-uninit.c, tree.c, tree.h, value-prof.c, varasm.c,
+ varpool.c: Use hash_set instead of pointer_set.
+
2014-08-01 Alan Lawrence <alan.lawrence@arm.com>
* config/aarch64/aarch64-simd-builtins.def (dup_lane, get_lane): Delete.
+2014-08-02 Trevor Saunders <tsaunders@mozilla.com>
+
+ * gcc-interface/trans.c: Use hash_set instead of pointer_set.
+
2014-08-01 Ed Schonberg <schonberg@adacore.com>
* restrict.adb (Update_Restrictions): For restrictions with a
#include "output.h"
#include "libfuncs.h" /* For set_stack_check_libfunc. */
#include "tree-iterator.h"
-#include "pointer-set.h"
+#include "hash-set.h"
#include "gimple-expr.h"
#include "gimplify.h"
#include "bitmap.h"
bitmap nrv;
tree result;
Node_Id gnat_ret;
- struct pointer_set_t *visited;
+ hash_set<tree> *visited;
};
/* Return true if T is a Named Return Value. */
/* Avoid walking into the same tree more than once. Unfortunately, we
can't just use walk_tree_without_duplicates because it would only
call us for the first occurrence of NRVs in the function body. */
- if (pointer_set_insert (dp->visited, *tp))
+ if (dp->visited->add (*tp))
*walk_subtrees = 0;
return NULL_TREE;
/* Avoid walking into the same tree more than once. Unfortunately, we
can't just use walk_tree_without_duplicates because it would only
call us for the first occurrence of NRVs in the function body. */
- if (pointer_set_insert (dp->visited, *tp))
+ if (dp->visited->add (*tp))
*walk_subtrees = 0;
return NULL_TREE;
data.nrv = nrv;
data.result = DECL_RESULT (fndecl);
data.gnat_ret = gnat_ret;
- data.visited = pointer_set_create ();
+ data.visited = new hash_set<tree>;
if (TYPE_RETURN_UNCONSTRAINED_P (TREE_TYPE (fndecl)))
func = finalize_nrv_unc_r;
else
func = finalize_nrv_r;
walk_tree (&DECL_SAVED_TREE (fndecl), func, &data, NULL);
- pointer_set_destroy (data.visited);
+ delete data.visited;
}
/* Return true if RET_VAL can be used as a Named Return Value for the
+2014-08-02 Trevor Saunders <tsaunders@mozilla.com>
+
+ * c-gimplify.c: Use hash_set instead of pointer_set.
+
2014-08-01 Igor Zamyatin <igor.zamyatin@intel.com>
PR middle-end/61455
static tree
ubsan_walk_array_refs_r (tree *tp, int *walk_subtrees, void *data)
{
- struct pointer_set_t *pset = (struct pointer_set_t *) data;
+ hash_set<tree> *pset = (hash_set<tree> *) data;
/* Since walk_tree doesn't call the callback function on the decls
in BIND_EXPR_VARS, we have to walk them manually. */
if (flag_sanitize & SANITIZE_BOUNDS)
{
- struct pointer_set_t *pset = pointer_set_create ();
- walk_tree (&DECL_SAVED_TREE (fndecl), ubsan_walk_array_refs_r, pset,
- pset);
- pointer_set_destroy (pset);
+ hash_set<tree> pset;
+ walk_tree (&DECL_SAVED_TREE (fndecl), ubsan_walk_array_refs_r, &pset,
+ &pset);
}
/* Dump the C-specific tree IR. */
+2014-08-02 Trevor Saunders <tsaunders@mozilla.com>
+
+ * c-decl.c: Use hash_set instead of pointer_set.
+
2014-08-01 Igor Zamyatin <igor.zamyatin@intel.com>
PR middle-end/61455
#include "cgraph.h"
#include "hash-table.h"
#include "langhooks-def.h"
-#include "pointer-set.h"
+#include "hash-set.h"
#include "plugin.h"
#include "c-family/c-ada-spec.h"
#include "cilk.h"
if (!struct_parse_info->typedefs_seen.is_empty ()
&& fieldlist != NULL_TREE)
{
- /* Use a pointer_set using the name of the typedef. We can use
- a pointer_set because identifiers are interned. */
- struct pointer_set_t *tset = pointer_set_create ();
+ /* Use a hash_set<tree> using the name of the typedef. We can use
+ a hash_set<tree> because identifiers are interned. */
+ hash_set<tree> tset;
FOR_EACH_VEC_ELT (struct_parse_info->typedefs_seen, ix, x)
- pointer_set_insert (tset, DECL_NAME (x));
+ tset.add (DECL_NAME (x));
for (x = fieldlist; x != NULL_TREE; x = DECL_CHAIN (x))
{
if (DECL_NAME (x) != NULL_TREE
- && pointer_set_contains (tset, DECL_NAME (x)))
+ && tset.contains (DECL_NAME (x)))
{
warning_at (DECL_SOURCE_LOCATION (x), OPT_Wc___compat,
("using %qD as both field and typedef name is "
the typedef name is used. */
}
}
-
- pointer_set_destroy (tset);
}
/* For each field which has a binding and which was not defined in
struct c_binding *b;
tree parm, decl, last;
tree parmids = arg_info->parms;
- struct pointer_set_t *seen_args = pointer_set_create ();
+ hash_set<tree> seen_args;
if (!in_system_header_at (input_location))
warning_at (DECL_SOURCE_LOCATION (fndecl),
"%qD declared as a non-parameter", decl);
/* If the declaration is already marked, we have a duplicate
name. Complain and ignore the duplicate. */
- else if (pointer_set_contains (seen_args, decl))
+ else if (seen_args.contains (decl))
{
error_at (DECL_SOURCE_LOCATION (decl),
"multiple parameters named %qD", decl);
}
TREE_PURPOSE (parm) = decl;
- pointer_set_insert (seen_args, decl);
+ seen_args.add (decl);
}
/* Now examine the parms chain for incomplete declarations
TREE_TYPE (parm) = error_mark_node;
}
- if (!pointer_set_contains (seen_args, parm))
+ if (!seen_args.contains (parm))
{
error_at (DECL_SOURCE_LOCATION (parm),
"declaration for parameter %qD but no such parameter",
DECL_CHAIN (last) = 0;
}
- pointer_set_destroy (seen_args);
-
/* If there was a previous prototype,
set the DECL_ARG_TYPE of each argument according to
the type previously specified, and report any mismatches. */
#include "expr.h"
#include "langhooks.h"
#include "bitmap.h"
+#include "hash-set.h"
#include "pointer-set.h"
#include "tree-ssa-alias.h"
#include "internal-fn.h"
static void
add_partitioned_vars_to_ptset (struct pt_solution *pt,
struct pointer_map_t *decls_to_partitions,
- struct pointer_set_t *visited, bitmap temp)
+ hash_set<bitmap> *visited, bitmap temp)
{
bitmap_iterator bi;
unsigned i;
|| pt->vars == NULL
/* The pointed-to vars bitmap is shared, it is enough to
visit it once. */
- || pointer_set_insert (visited, pt->vars))
+ || visited->add (pt->vars))
return;
bitmap_clear (temp);
if (decls_to_partitions)
{
unsigned i;
- struct pointer_set_t *visited = pointer_set_create ();
+ hash_set<bitmap> visited;
bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
for (i = 1; i < num_ssa_names; i++)
&& POINTER_TYPE_P (TREE_TYPE (name))
&& ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
- visited, temp);
+ &visited, temp);
}
add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
- decls_to_partitions, visited, temp);
+ decls_to_partitions, &visited, temp);
- pointer_set_destroy (visited);
pointer_map_destroy (decls_to_partitions);
BITMAP_FREE (temp);
}
#include "diagnostic-core.h"
#include "flags.h"
#include "tree.h"
-#include "pointer-set.h"
+#include "hash-set.h"
#include "tree-ssa-alias.h"
#include "internal-fn.h"
#include "gimple-expr.h"
/* Callback for make_forwarder_block. Returns true if the edge E is marked
in the set MFB_REIS_SET. */
-static struct pointer_set_t *mfb_reis_set;
+static hash_set<edge> *mfb_reis_set;
static bool
mfb_redirect_edges_in_set (edge e)
{
- return pointer_set_contains (mfb_reis_set, e);
+ return mfb_reis_set->contains (e);
}
/* Creates a subloop of LOOP with latch edge LATCH. */
edge e, new_entry;
struct loop *new_loop;
- mfb_reis_set = pointer_set_create ();
+ mfb_reis_set = new hash_set<edge>;
FOR_EACH_EDGE (e, ei, loop->header->preds)
{
if (e != latch)
- pointer_set_insert (mfb_reis_set, e);
+ mfb_reis_set->add (e);
}
new_entry = make_forwarder_block (loop->header, mfb_redirect_edges_in_set,
NULL);
- pointer_set_destroy (mfb_reis_set);
+ delete mfb_reis_set;
loop->header = new_entry->src;
if (dump_file)
fprintf (dump_file, "Merged latch edges of loop %d\n", loop->num);
- mfb_reis_set = pointer_set_create ();
+ mfb_reis_set = new hash_set<edge>;
FOR_EACH_VEC_ELT (latches, i, e)
- pointer_set_insert (mfb_reis_set, e);
+ mfb_reis_set->add (e);
latch = make_forwarder_block (loop->header, mfb_redirect_edges_in_set,
NULL);
- pointer_set_destroy (mfb_reis_set);
+ delete mfb_reis_set;
loop->header = latch->dest;
loop->latch = latch->src;
#include "tree-inline.h"
#include "langhooks.h"
#include "hashtab.h"
+#include "hash-set.h"
#include "toplev.h"
#include "flags.h"
#include "debug.h"
{
if (this_cfun->cfg)
{
- pointer_set_t *stmts = pointer_set_create ();
+ hash_set<gimple> stmts;
int i;
struct ipa_ref *ref = NULL;
{
for (gsi = gsi_start_phis (this_block);
!gsi_end_p (gsi); gsi_next (&gsi))
- pointer_set_insert (stmts, gsi_stmt (gsi));
+ stmts.add (gsi_stmt (gsi));
for (gsi = gsi_start_bb (this_block);
!gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
- pointer_set_insert (stmts, stmt);
+ stmts.add (stmt);
if (is_gimple_call (stmt))
{
struct cgraph_edge *e = get_edge (stmt);
}
}
for (i = 0; iterate_reference (i, ref); i++)
- if (ref->stmt && !pointer_set_contains (stmts, ref->stmt))
+ if (ref->stmt && !stmts.contains (ref->stmt))
{
error ("reference to dead statement");
cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
error_found = true;
}
- pointer_set_destroy (stmts);
}
else
/* No CFG available?! */
{
basic_block bb;
struct cgraph_node *node = cgraph_node::get (current_function_decl);
- struct pointer_set_t *visited_nodes = pointer_set_create ();
gimple_stmt_iterator gsi;
tree decl;
unsigned ix;
varpool_node::finalize_decl (decl);
record_eh_tables (node, fun);
- pointer_set_destroy (visited_nodes);
return 0;
}
void
record_references_in_initializer (tree decl, bool only_vars)
{
- struct pointer_set_t *visited_nodes = pointer_set_create ();
varpool_node *node = varpool_node::get_create (decl);
+ hash_set<tree> visited_nodes;
struct record_reference_ctx ctx = {false, NULL};
ctx.varpool_node = node;
ctx.only_vars = only_vars;
walk_tree (&DECL_INITIAL (decl), record_reference,
- &ctx, visited_nodes);
- pointer_set_destroy (visited_nodes);
+ &ctx, &visited_nodes);
}
/* Rebuild cgraph edges for current function node. This needs to be run after
avoid udplicate work. */
static void
-walk_polymorphic_call_targets (pointer_set_t *reachable_call_targets,
+walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
struct cgraph_edge *edge)
{
unsigned int i;
= possible_polymorphic_call_targets
(edge, &final, &cache_token);
- if (!pointer_set_insert (reachable_call_targets,
- cache_token))
+ if (!reachable_call_targets->add (cache_token))
{
if (cgraph_dump_file)
dump_possible_polymorphic_call_targets
struct cgraph_node *first_handled = first_analyzed;
static varpool_node *first_analyzed_var;
varpool_node *first_handled_var = first_analyzed_var;
- struct pointer_set_t *reachable_call_targets = pointer_set_create ();
+ hash_set<void *> reachable_call_targets;
symtab_node *node;
symtab_node *next;
{
next = edge->next_callee;
if (edge->indirect_info->polymorphic)
- walk_polymorphic_call_targets (reachable_call_targets,
+ walk_polymorphic_call_targets (&reachable_call_targets,
edge);
}
}
symtab_node::dump_table (cgraph_dump_file);
}
bitmap_obstack_release (NULL);
- pointer_set_destroy (reachable_call_targets);
ggc_collect ();
/* Initialize assembler name hash, in particular we want to trigger C++
mangling and same body alias creation before we free DECL_ARGUMENTS
+2014-08-02 Trevor Saunders <tsaunders@mozilla.com>
+
+ * class.c, cp-gimplify.c, cp-tree.h, decl.c, decl2.c, error.c,
+ method.c, name-lookup.c, pt.c, semantics.c, tree.c: Use hash_set
+ instead of pointer_set.
+
2014-08-01 Jason Merrill <jason@redhat.com>
PR c++/60417
void
explain_non_literal_class (tree t)
{
- static struct pointer_set_t *diagnosed;
+ static hash_set<tree> *diagnosed;
if (!CLASS_TYPE_P (t))
return;
t = TYPE_MAIN_VARIANT (t);
if (diagnosed == NULL)
- diagnosed = pointer_set_create ();
- if (pointer_set_insert (diagnosed, t) != 0)
+ diagnosed = new hash_set<tree>;
+ if (diagnosed->add (t))
/* Already explained. */
return;
struct cp_genericize_data
{
- struct pointer_set_t *p_set;
+ hash_set<tree> *p_set;
vec<tree> bind_expr_stack;
struct cp_genericize_omp_taskreg *omp_ctx;
};
{
tree stmt = *stmt_p;
struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
- struct pointer_set_t *p_set = wtd->p_set;
+ hash_set<tree> *p_set = wtd->p_set;
/* If in an OpenMP context, note var uses. */
if (__builtin_expect (wtd->omp_ctx != NULL, 0)
}
/* Other than invisiref parms, don't walk the same tree twice. */
- if (pointer_set_contains (p_set, stmt))
+ if (p_set->contains (stmt))
{
*walk_subtrees = 0;
return NULL_TREE;
}
}
- pointer_set_insert (p_set, *stmt_p);
+ p_set->add (*stmt_p);
return NULL;
}
{
struct cp_genericize_data wtd;
- wtd.p_set = pointer_set_create ();
+ wtd.p_set = new hash_set<tree>;
wtd.bind_expr_stack.create (0);
wtd.omp_ctx = NULL;
cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
- pointer_set_destroy (wtd.p_set);
+ delete wtd.p_set;
wtd.bind_expr_stack.release ();
}
extern linkage_kind decl_linkage (tree);
extern duration_kind decl_storage_duration (tree);
extern tree cp_walk_subtrees (tree*, int*, walk_tree_fn,
- void*, struct pointer_set_t*);
+ void*, hash_set<tree> *);
#define cp_walk_tree(tp,func,data,pset) \
walk_tree_1 (tp, func, data, pset, cp_walk_subtrees)
#define cp_walk_tree_without_duplicates(tp,func,data) \
static tree
stabilize_save_expr_r (tree *expr_p, int *walk_subtrees, void *data)
{
- struct pointer_set_t *pset = (struct pointer_set_t *)data;
+ hash_set<tree> *pset = (hash_set<tree> *)data;
tree expr = *expr_p;
if (TREE_CODE (expr) == SAVE_EXPR)
{
static void
stabilize_vla_size (tree size)
{
- struct pointer_set_t *pset = pointer_set_create ();
+ hash_set<tree> pset;
/* Break out any function calls into temporary variables. */
- cp_walk_tree (&size, stabilize_save_expr_r, pset, pset);
- pointer_set_destroy (pset);
+ cp_walk_tree (&size, stabilize_save_expr_r, &pset, &pset);
}
/* Helper function for compute_array_index_type. Look for SIZEOF_EXPR
supported, collect and return all the functions for which we should
emit a hidden alias. */
-static struct pointer_set_t *
+static hash_set<tree> *
collect_candidates_for_java_method_aliases (void)
{
struct cgraph_node *node;
- struct pointer_set_t *candidates = NULL;
+ hash_set<tree> *candidates = NULL;
#ifndef HAVE_GAS_HIDDEN
return candidates;
&& TARGET_USE_LOCAL_THUNK_ALIAS_P (fndecl))
{
if (candidates == NULL)
- candidates = pointer_set_create ();
- pointer_set_insert (candidates, fndecl);
+ candidates = new hash_set<tree>;
+ candidates->add (fndecl);
}
}
by collect_candidates_for_java_method_aliases. */
static void
-build_java_method_aliases (struct pointer_set_t *candidates)
+build_java_method_aliases (hash_set<tree> *candidates)
{
struct cgraph_node *node;
tree fndecl = node->decl;
if (TREE_ASM_WRITTEN (fndecl)
- && pointer_set_contains (candidates, fndecl))
+ && candidates->contains (fndecl))
{
/* Mangle the name in a predictable way; we need to reference
this from a java compiled object file. */
unsigned ssdf_count = 0;
int retries = 0;
tree decl;
- struct pointer_set_t *candidates;
+ hash_set<tree> *candidates;
locus = input_location;
at_eof = 1;
if (candidates)
{
build_java_method_aliases (candidates);
- pointer_set_destroy (candidates);
+ delete candidates;
}
finish_repo ();
struct find_typenames_t
{
- struct pointer_set_t *p_set;
+ hash_set<tree> *p_set;
vec<tree, va_gc> *typenames;
};
return NULL_TREE;
}
- if (mv && (mv == *tp || !pointer_set_insert (d->p_set, mv)))
+ if (mv && (mv == *tp || !d->p_set->add (mv)))
vec_safe_push (d->typenames, mv);
/* Search into class template arguments, which cp_walk_subtrees
find_typenames (tree t)
{
struct find_typenames_t ft;
- ft.p_set = pointer_set_create ();
+ ft.p_set = new hash_set<tree>;
ft.typenames = NULL;
cp_walk_tree (&TREE_TYPE (DECL_TEMPLATE_RESULT (t)),
find_typenames_r, &ft, ft.p_set);
- pointer_set_destroy (ft.p_set);
+ delete ft.p_set;
return ft.typenames;
}
if (DECL_DEFAULTED_FN (decl))
{
/* Not marked GTY; it doesn't need to be GC'd or written to PCH. */
- static struct pointer_set_t *explained;
+ static hash_set<tree> *explained;
special_function_kind sfk;
location_t loc;
tree ctype;
if (!explained)
- explained = pointer_set_create ();
- if (pointer_set_insert (explained, decl))
+ explained = new hash_set<tree>;
+ if (explained->add (decl))
return true;
sfk = special_function_p (decl);
#include "debug.h"
#include "c-family/c-pragma.h"
#include "params.h"
-#include "pointer-set.h"
+#include "hash-set.h"
/* The bindings for a particular name in a particular scope. */
vec<tree, va_gc> *namespaces;
vec<tree, va_gc> *classes;
tree functions;
- struct pointer_set_t *fn_set;
+ hash_set<tree> *fn_set;
};
static bool arg_assoc (struct arg_lookup*, tree);
if (!is_overloaded_fn (fn))
/* All names except those of (possibly overloaded) functions and
function templates are ignored. */;
- else if (k->fn_set && pointer_set_insert (k->fn_set, fn))
+ else if (k->fn_set && k->fn_set->add (fn))
/* It's already in the list. */;
else if (!k->functions)
k->functions = fn;
/* We shouldn't be here if lookup found something other than
namespace-scope functions. */
gcc_assert (DECL_NAMESPACE_SCOPE_P (OVL_CURRENT (fns)));
- k.fn_set = pointer_set_create ();
+ k.fn_set = new hash_set<tree>;
for (ovl = fns; ovl; ovl = OVL_NEXT (ovl))
- pointer_set_insert (k.fn_set, OVL_CURRENT (ovl));
+ k.fn_set->add (OVL_CURRENT (ovl));
}
else
k.fn_set = NULL;
release_tree_vector (k.classes);
release_tree_vector (k.namespaces);
- if (k.fn_set)
- pointer_set_destroy (k.fn_set);
+ delete k.fn_set;
return fns;
}
static tree convert_template_argument (tree, tree, tree,
tsubst_flags_t, int, tree);
static int for_each_template_parm (tree, tree_fn_t, void*,
- struct pointer_set_t*, bool);
+ hash_set<tree> *, bool);
static tree expand_template_argument_pack (tree);
static tree build_template_parm_index (int, int, int, tree, tree);
static bool inline_needs_template_parms (tree, bool);
tree* parameter_packs;
/* Set of AST nodes that have been visited by the traversal. */
- struct pointer_set_t *visited;
+ hash_set<tree> *visited;
};
/* Identifies all of the argument packs that occur in a template
tree parameter_packs = NULL_TREE;
struct find_parameter_pack_data ppd;
ppd.parameter_packs = ¶meter_packs;
- ppd.visited = pointer_set_create ();
+ ppd.visited = new hash_set<tree>;
cp_walk_tree (&t, &find_parameter_packs_r, &ppd, ppd.visited);
- pointer_set_destroy (ppd.visited);
+ delete ppd.visited;
return parameter_packs != NULL_TREE;
}
/* Determine which parameter packs will be used by the base
class expansion. */
- ppd.visited = pointer_set_create ();
+ ppd.visited = new hash_set<tree>;
ppd.parameter_packs = ¶meter_packs;
cp_walk_tree (&TREE_PURPOSE (arg), &find_parameter_packs_r,
&ppd, ppd.visited);
if (parameter_packs == NULL_TREE)
{
error ("base initializer expansion %<%T%> contains no parameter packs", arg);
- pointer_set_destroy (ppd.visited);
+ delete ppd.visited;
return error_mark_node;
}
}
}
- pointer_set_destroy (ppd.visited);
+ delete ppd.visited;
/* Create the pack expansion type for the base type. */
purpose = cxx_make_type (TYPE_PACK_EXPANSION);
/* Determine which parameter packs will be expanded. */
ppd.parameter_packs = ¶meter_packs;
- ppd.visited = pointer_set_create ();
+ ppd.visited = new hash_set<tree>;
cp_walk_tree (&arg, &find_parameter_packs_r, &ppd, ppd.visited);
- pointer_set_destroy (ppd.visited);
+ delete ppd.visited;
/* Make sure we found some parameter packs. */
if (parameter_packs == NULL_TREE)
t = TREE_TYPE (t);
ppd.parameter_packs = ¶meter_packs;
- ppd.visited = pointer_set_create ();
+ ppd.visited = new hash_set<tree>;
cp_walk_tree (&t, &find_parameter_packs_r, &ppd, ppd.visited);
- pointer_set_destroy (ppd.visited);
+ delete ppd.visited;
if (parameter_packs)
{
tree parameter_packs = NULL_TREE;
struct find_parameter_pack_data ppd;
ppd.parameter_packs = ¶meter_packs;
- ppd.visited = pointer_set_create ();
+ ppd.visited = new hash_set<tree>;
fixed_parameter_pack_p_1 (parm, &ppd);
- pointer_set_destroy (ppd.visited);
+ delete ppd.visited;
return parameter_packs;
}
/* True when we should also visit template parameters that occur in
non-deduced contexts. */
bool include_nondeduced_p;
- struct pointer_set_t *visited;
+ hash_set<tree> *visited;
};
/* Called from for_each_template_parm via walk_tree. */
static int
for_each_template_parm (tree t, tree_fn_t fn, void* data,
- struct pointer_set_t *visited,
+ hash_set<tree> *visited,
bool include_nondeduced_p)
{
struct pair_fn_data pfd;
if (visited)
pfd.visited = visited;
else
- pfd.visited = pointer_set_create ();
+ pfd.visited = new hash_set<tree>;
result = cp_walk_tree (&t,
for_each_template_parm_r,
&pfd,
/* Clean up. */
if (!visited)
{
- pointer_set_destroy (pfd.visited);
+ delete pfd.visited;
pfd.visited = 0;
}
void
explain_invalid_constexpr_fn (tree fun)
{
- static struct pointer_set_t *diagnosed;
+ static hash_set<tree> *diagnosed;
tree body;
location_t save_loc;
/* Only diagnose defaulted functions or instantiations. */
&& !is_instantiation_of_constexpr (fun))
return;
if (diagnosed == NULL)
- diagnosed = pointer_set_create ();
- if (pointer_set_insert (diagnosed, fun) != 0)
+ diagnosed = new hash_set<tree>;
+ if (diagnosed->add (fun))
/* Already explained. */
return;
tree
cp_walk_subtrees (tree *tp, int *walk_subtrees_p, walk_tree_fn func,
- void *data, struct pointer_set_t *pset)
+ void *data, hash_set<tree> *pset)
{
enum tree_code code = TREE_CODE (*tp);
tree result;
??? May need to make things more elaborate. Later, as necessary. */
static unsigned int
-hash_set (int regno, int hash_table_size)
+hash_mod (int regno, int hash_table_size)
{
return (unsigned) regno % hash_table_size;
}
struct expr *cur_expr, *last_expr = NULL;
struct occr *cur_occr;
- hash = hash_set (REGNO (dest), table->size);
+ hash = hash_mod (REGNO (dest), table->size);
for (cur_expr = table->table[hash]; cur_expr;
cur_expr = cur_expr->next_same_hash)
static struct expr *
lookup_set (unsigned int regno, struct hash_table_d *table)
{
- unsigned int hash = hash_set (regno, table->size);
+ unsigned int hash = hash_mod (regno, table->size);
struct expr *expr;
expr = table->table[hash];
#include "tree-pass.h"
#include "df.h"
#include "dbgcnt.h"
-#include "pointer-set.h"
+#include "hash-set.h"
/* The basic idea of common subexpression elimination is to go
through the code, keeping a record of expressions that would
enum machine_mode *pmode1, enum machine_mode *pmode2)
{
rtx arg1, arg2;
- struct pointer_set_t *visited = NULL;
+ hash_set<rtx> *visited = NULL;
/* Set nonzero when we find something of interest. */
rtx x = NULL;
if (x)
{
if (!visited)
- visited = pointer_set_create ();
- pointer_set_insert (visited, x);
+ visited = new hash_set<rtx>;
+ visited->add (x);
x = 0;
}
continue;
/* If it's a comparison we've used before, skip it. */
- if (visited && pointer_set_contains (visited, p->exp))
+ if (visited && visited->contains (p->exp))
continue;
if (GET_CODE (p->exp) == COMPARE
*parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
if (visited)
- pointer_set_destroy (visited);
+ delete visited;
return code;
}
\f
+2014-08-02 Trevor Saunders <tsaunders@mozilla.com>
+
+ * openmp.c, trans-decl.c: Use hash_set instead of pointer_set.
+
2014-07-26 Tobias Burnus <burnus@net-b.de>
PR fortran/61881
#include "arith.h"
#include "match.h"
#include "parse.h"
-#include "pointer-set.h"
+#include "hash-set.h"
/* Match an end of OpenMP directive. End of OpenMP directive is optional
whitespace, followed by '\n' or comment '!'. */
struct omp_context
{
gfc_code *code;
- struct pointer_set_t *sharing_clauses;
- struct pointer_set_t *private_iterators;
+ hash_set<gfc_symbol *> *sharing_clauses;
+ hash_set<gfc_symbol *> *private_iterators;
struct omp_context *previous;
} *omp_current_ctx;
static gfc_code *omp_current_do_code;
int list;
ctx.code = code;
- ctx.sharing_clauses = pointer_set_create ();
- ctx.private_iterators = pointer_set_create ();
+ ctx.sharing_clauses = new hash_set<gfc_symbol *>;
+ ctx.private_iterators = new hash_set<gfc_symbol *>;
ctx.previous = omp_current_ctx;
omp_current_ctx = &ctx;
case OMP_LIST_REDUCTION:
case OMP_LIST_LINEAR:
for (n = omp_clauses->lists[list]; n; n = n->next)
- pointer_set_insert (ctx.sharing_clauses, n->sym);
+ ctx.sharing_clauses->add (n->sym);
break;
default:
break;
}
omp_current_ctx = ctx.previous;
- pointer_set_destroy (ctx.sharing_clauses);
- pointer_set_destroy (ctx.private_iterators);
+ delete ctx.sharing_clauses;
+ delete ctx.private_iterators;
}
if (omp_current_ctx == NULL)
return;
- if (pointer_set_contains (omp_current_ctx->sharing_clauses, sym))
+ if (omp_current_ctx->sharing_clauses->contains (sym))
return;
- if (! pointer_set_insert (omp_current_ctx->private_iterators, sym))
+ if (! omp_current_ctx->private_iterators->add (sym))
{
gfc_omp_clauses *omp_clauses = omp_current_ctx->code->ext.omp_clauses;
gfc_omp_namelist *p;
#include "cgraph.h"
#include "debug.h"
#include "gfortran.h"
-#include "pointer-set.h"
+#include "hash-set.h"
#include "constructor.h"
#include "trans.h"
#include "trans-types.h"
static GTY(()) tree saved_function_decls;
static GTY(()) tree saved_parent_function_decls;
-static struct pointer_set_t *nonlocal_dummy_decl_pset;
+static hash_set<tree> *nonlocal_dummy_decl_pset;
static GTY(()) tree nonlocal_dummy_decls;
/* Holds the variable DECLs that are locals. */
tree decl, dummy;
if (! nonlocal_dummy_decl_pset)
- nonlocal_dummy_decl_pset = pointer_set_create ();
+ nonlocal_dummy_decl_pset = new hash_set<tree>;
- if (pointer_set_insert (nonlocal_dummy_decl_pset, sym->backend_decl))
+ if (nonlocal_dummy_decl_pset->add (sym->backend_decl))
return;
dummy = GFC_DECL_SAVED_DESCRIPTOR (sym->backend_decl);
{
BLOCK_VARS (DECL_INITIAL (fndecl))
= chainon (BLOCK_VARS (DECL_INITIAL (fndecl)), nonlocal_dummy_decls);
- pointer_set_destroy (nonlocal_dummy_decl_pset);
+ delete nonlocal_dummy_decl_pset;
nonlocal_dummy_decls = NULL;
nonlocal_dummy_decl_pset = NULL;
}
walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
struct walk_stmt_info *wi)
{
- struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
+ hash_set<tree> *pset = (wi) ? wi->pset : NULL;
unsigned i;
tree ret = NULL_TREE;
/* Pointer map used to mark visited tree nodes when calling
walk_tree on each operand. If set to NULL, duplicate tree nodes
will be visited more than once. */
- struct pointer_set_t *pset;
+ hash_set<tree> *pset;
/* Operand returned by the callbacks. This is set when calling
walk_gimple_seq. If the walk_stmt_fn or walk_tree_fn callback
#include "coretypes.h"
#include "tree.h"
#include "expr.h"
+#include "hash-set.h"
#include "pointer-set.h"
#include "hash-table.h"
#include "basic-block.h"
{
struct gimplify_omp_ctx *outer_context;
splay_tree variables;
- struct pointer_set_t *privatized_types;
+ hash_set<tree> *privatized_types;
location_t location;
enum omp_clause_default_kind default_kind;
enum omp_region_type region_type;
c = XCNEW (struct gimplify_omp_ctx);
c->outer_context = gimplify_omp_ctxp;
c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
- c->privatized_types = pointer_set_create ();
+ c->privatized_types = new hash_set<tree>;
c->location = input_location;
c->region_type = region_type;
if ((region_type & ORT_TASK) == 0)
delete_omp_context (struct gimplify_omp_ctx *c)
{
splay_tree_delete (c->variables);
- pointer_set_destroy (c->privatized_types);
+ delete c->privatized_types;
XDELETE (c);
}
copy their subtrees if we can make sure to do it only once. */
if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
{
- if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
+ if (data && !((hash_set<tree> *)data)->add (t))
;
else
*walk_subtrees = 0;
struct cgraph_node *cgn = cgraph_node::get (fndecl);
/* If the language requires deep unsharing, we need a pointer set to make
sure we don't repeatedly unshare subtrees of unshareable nodes. */
- struct pointer_set_t *visited
- = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
+ hash_set<tree> *visited
+ = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
- if (visited)
- pointer_set_destroy (visited);
+ delete visited;
if (cgn)
for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
}
/* Nonlocal VLAs seen in the current function. */
-static struct pointer_set_t *nonlocal_vlas;
+static hash_set<tree> *nonlocal_vlas;
/* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
static tree nonlocal_vla_vars;
&& (ctx->region_type == ORT_WORKSHARE
|| ctx->region_type == ORT_SIMD))
ctx = ctx->outer_context;
- if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
+ if (!ctx && !nonlocal_vlas->add (decl))
{
tree copy = copy_node (decl);
return;
type = TYPE_MAIN_VARIANT (type);
- if (pointer_set_insert (ctx->privatized_types, type))
+ if (ctx->privatized_types->add (type))
return;
switch (TREE_CODE (type))
cgn = cgraph_node::get (fndecl);
if (cgn && cgn->origin)
- nonlocal_vlas = pointer_set_create ();
+ nonlocal_vlas = new hash_set<tree>;
/* Make sure input_location isn't set to something weird. */
input_location = DECL_SOURCE_LOCATION (fndecl);
nonlocal_vla_vars);
nonlocal_vla_vars = NULL_TREE;
}
- pointer_set_destroy (nonlocal_vlas);
+ delete nonlocal_vlas;
nonlocal_vlas = NULL;
}
#include "diagnostic-core.h"
#include "tree.h"
#include "ggc.h"
-#include "pointer-set.h"
+#include "hash-set.h"
#include "obstack.h"
#include "debug.h"
#include "wide-int-print.h"
struct godump_container
{
/* DECLs that we have already seen. */
- struct pointer_set_t *decls_seen;
+ hash_set<tree> decls_seen;
/* Types which may potentially have to be defined as dummy
types. */
- struct pointer_set_t *pot_dummy_types;
+ hash_set<const char *> pot_dummy_types;
/* Go keywords. */
htab_t keyword_hash;
ob = &container->type_obstack;
if (TYPE_NAME (type) != NULL_TREE
- && (pointer_set_contains (container->decls_seen, type)
- || pointer_set_contains (container->decls_seen, TYPE_NAME (type)))
+ && (container->decls_seen.contains (type)
+ || container->decls_seen.contains (TYPE_NAME (type)))
&& (AGGREGATE_TYPE_P (type)
|| POINTER_TYPE_P (type)
|| TREE_CODE (type) == FUNCTION_TYPE))
return ret;
}
- pointer_set_insert (container->decls_seen, type);
+ container->decls_seen.add (type);
switch (TREE_CODE (type))
{
definition. So this struct or union is a potential dummy
type. */
if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
- pointer_set_insert (container->pot_dummy_types,
- IDENTIFIER_POINTER (name));
+ container->pot_dummy_types.add (IDENTIFIER_POINTER (name));
return ret;
}
separately. */
if (TREE_CODE (TREE_TYPE (decl)) == ENUMERAL_TYPE
&& TYPE_SIZE (TREE_TYPE (decl)) != 0
- && !pointer_set_contains (container->decls_seen, TREE_TYPE (decl))
+ && !container->decls_seen.contains (TREE_TYPE (decl))
&& (TYPE_CANONICAL (TREE_TYPE (decl)) == NULL_TREE
- || !pointer_set_contains (container->decls_seen,
- TYPE_CANONICAL (TREE_TYPE (decl)))))
+ || !container->decls_seen.contains
+ (TYPE_CANONICAL (TREE_TYPE (decl)))))
{
tree element;
mhval->value = xstrdup (buf);
*slot = mhval;
}
- pointer_set_insert (container->decls_seen, TREE_TYPE (decl));
+ container->decls_seen.add (TREE_TYPE (decl));
if (TYPE_CANONICAL (TREE_TYPE (decl)) != NULL_TREE)
- pointer_set_insert (container->decls_seen,
- TYPE_CANONICAL (TREE_TYPE (decl)));
+ container->decls_seen.add (TYPE_CANONICAL (TREE_TYPE (decl)));
}
if (DECL_NAME (decl) != NULL_TREE)
size);
}
- pointer_set_insert (container->decls_seen, decl);
+ container->decls_seen.add (decl);
}
else if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
{
{
bool is_valid;
- if (pointer_set_contains (container->decls_seen, decl)
- || pointer_set_contains (container->decls_seen, DECL_NAME (decl)))
+ if (container->decls_seen.contains (decl)
+ || container->decls_seen.contains (DECL_NAME (decl)))
return;
- pointer_set_insert (container->decls_seen, decl);
- pointer_set_insert (container->decls_seen, DECL_NAME (decl));
+ container->decls_seen.add (decl);
+ container->decls_seen.add (DECL_NAME (decl));
is_valid = go_format_type (container, TREE_TYPE (decl), true, false);
if (is_valid
{
tree type_name = TYPE_NAME (TREE_TYPE (decl));
if (TREE_CODE (type_name) == IDENTIFIER_NODE)
- pointer_set_insert (container->pot_dummy_types,
- IDENTIFIER_POINTER (type_name));
+ container->pot_dummy_types.add (IDENTIFIER_POINTER (type_name));
else if (TREE_CODE (type_name) == TYPE_DECL)
- pointer_set_insert (container->pot_dummy_types,
- IDENTIFIER_POINTER (DECL_NAME (type_name)));
+ container->pot_dummy_types.add
+ (IDENTIFIER_POINTER (DECL_NAME (type_name)));
}
}
/* Traversing the pot_dummy_types and seeing which types are present
in the global types hash table and creating dummy definitions if
- not found. This function is invoked by pointer_set_traverse. */
+ not found. This function is invoked by hash_set::traverse. */
-static bool
-find_dummy_types (const void *ptr, void *adata)
+bool
+find_dummy_types (const char *const &ptr, godump_container *adata)
{
struct godump_container *data = (struct godump_container *) adata;
const char *type = (const char *) ptr;
real_debug_hooks->finish (filename);
- container.decls_seen = pointer_set_create ();
- container.pot_dummy_types = pointer_set_create ();
container.type_hash = htab_create (100, htab_hash_string,
string_hash_eq, NULL);
container.invalid_hash = htab_create (10, htab_hash_string,
htab_traverse_noresize (macro_hash, go_print_macro, NULL);
/* To emit dummy definitions. */
- pointer_set_traverse (container.pot_dummy_types, find_dummy_types,
- (void *) &container);
+ container.pot_dummy_types.traverse<godump_container *, find_dummy_types>
+ (&container);
- pointer_set_destroy (container.decls_seen);
- pointer_set_destroy (container.pot_dummy_types);
htab_delete (container.type_hash);
htab_delete (container.invalid_hash);
htab_delete (container.keyword_hash);
--- /dev/null
+/* A type-safe hash set.
+ Copyright (C) 2014 Free Software Foundation, Inc.
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 3, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
+
+
+#ifndef hash_set_h
+#define hash_set_h
+
+#include "hash-table.h"
+
+/* implement default behavior for traits when types allow it. */
+
+struct default_hashset_traits
+{
+ /* Hashes the passed in key. */
+
+ template<typename T>
+ static hashval_t
+ hash (T *p)
+ {
+ return uintptr_t(p) >> 3;
+ }
+
+ template<typename T> static hashval_t hash(const T &v) { return v; }
+
+ /* Return true if the two keys passed as arguments are equal. */
+
+ template<typename T>
+ static bool
+ equal (const T &a, const T &b)
+ {
+ return a == b;
+ }
+
+ /* Called to dispose of the key before marking the entry as deleted. */
+
+ template<typename T> static void remove (T &v) { v.~T (); }
+
+ /* Mark the passed in entry as being deleted. */
+
+ template<typename T>
+ static void
+ mark_deleted (T *&e)
+ {
+ e = reinterpret_cast<void *> (1);
+ }
+
+ /* Mark the passed in entry as being empty. */
+
+ template<typename T>
+ static void
+ mark_empty (T *&e)
+ {
+ e = NULL;
+ }
+
+ /* Return true if the passed in entry is marked as deleted. */
+
+ template<typename T>
+ static bool
+ is_deleted (T *e)
+ {
+ return e == reinterpret_cast<void *> (1);
+ }
+
+ /* Return true if the passed in entry is marked as empty. */
+
+ template<typename T> static bool is_empty (T *e) { return e == NULL; }
+};
+
+template<typename Key, typename Traits = default_hashset_traits>
+class hash_set
+{
+ struct hash_entry
+ {
+ Key m_key;
+
+ typedef hash_entry value_type;
+ typedef Key compare_type;
+ typedef int store_values_directly;
+
+ static hashval_t hash (const hash_entry &e)
+ {
+ return Traits::hash (e.m_key);
+ }
+
+ static bool equal (const hash_entry &a, const Key &b)
+ {
+ return Traits::equal (a.m_key, b);
+ }
+
+ static void remove (hash_entry &e) { Traits::remove (e.m_key); }
+
+ static void
+ mark_deleted (hash_entry &e)
+ {
+ Traits::mark_deleted (e.m_key);
+ }
+
+ static bool is_deleted (const hash_entry &e)
+ {
+ return Traits::is_deleted (e.m_key);
+ }
+
+ static void
+ mark_empty (hash_entry &e)
+ {
+ Traits::mark_empty (e.m_key);
+ }
+
+ static bool
+ is_empty (const hash_entry &e)
+ {
+ return Traits::is_empty (e.m_key);
+ }
+ };
+
+public:
+ explicit hash_set (size_t n = 13) : m_table (n) {}
+
+ /* If key k isn't already in the map add it to the map, and
+ return false. Otherwise return true. */
+
+ bool add (const Key &k)
+ {
+ hash_entry *e = m_table.find_slot_with_hash (k, Traits::hash (k),
+ INSERT);
+ bool existed = !hash_entry::is_empty (*e);
+ if (!existed)
+ e->m_key = k;
+
+ return existed;
+ }
+
+ /* if the passed in key is in the map return its value otherwise NULL. */
+
+ bool contains (const Key &k)
+ {
+ hash_entry &e = m_table.find_with_hash (k, Traits::hash (k));
+ return !Traits::is_empty (e.m_key);
+ }
+
+ /* Call the call back on each pair of key and value with the passed in
+ arg. */
+
+ template<typename Arg, bool (*f)(const Key &, Arg)>
+ void traverse (Arg a) const
+ {
+ for (typename hash_table<hash_entry>::iterator iter = m_table.begin ();
+ iter != m_table.end (); ++iter)
+ f ((*iter).m_key, a);
+ }
+
+private:
+ hash_table<hash_entry> m_table;
+};
+
+#endif
#include "cgraph.h"
#include "expr.h"
#include "tree-pass.h"
-#include "pointer-set.h"
+#include "hash-set.h"
#include "target.h"
#include "hash-table.h"
#include "inchash.h"
#include "stor-layout.h"
#include "intl.h"
-static bool odr_types_equivalent_p (tree, tree, bool, bool *, pointer_set_t *);
+static bool odr_types_equivalent_p (tree, tree, bool, bool *,
+ hash_set<tree> *);
static bool odr_violation_reported = false;
= {0, 0, NULL, NULL, false, true, true};
/* Pointer set of all call targets appearing in the cache. */
-static pointer_set_t *cached_polymorphic_call_targets;
+static hash_set<cgraph_node *> *cached_polymorphic_call_targets;
/* The node of type inheritance graph. For each type unique in
One Defintion Rule (ODR) sense, we produce one node linking all
/* All equivalent types, if more than one. */
vec<tree, va_gc> *types;
/* Set of all equivalent types, if NON-NULL. */
- pointer_set_t * GTY((skip)) types_set;
+ hash_set<tree> * GTY((skip)) types_set;
/* Unique ID indexing the type in odr_types array. */
int id;
v->bases.release ();
v->derived_types.release ();
if (v->types_set)
- pointer_set_destroy (v->types_set);
+ delete v->types_set;
ggc_free (v);
}
/* Compare T2 and T2 based on name or structure. */
static bool
-odr_subtypes_equivalent_p (tree t1, tree t2, pointer_set_t *visited)
+odr_subtypes_equivalent_p (tree t1, tree t2, hash_set<tree> *visited)
{
bool an1, an2;
/* This should really be a pair hash, but for the moment we do not need
100% reliability and it would be better to compare all ODR types so
recursion here is needed only for component types. */
- if (pointer_set_insert (visited, t1))
+ if (visited->add (t1))
return true;
return odr_types_equivalent_p (t1, t2, false, NULL, visited);
}
gimple_canonical_types_compatible_p. */
static bool
-odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned, pointer_set_t *visited)
+odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned, hash_set<tree> *visited)
{
/* Check first for the obvious case of pointer identity. */
if (t1 == t2)
{
bool build_bases = false;
if (!val->types_set)
- val->types_set = pointer_set_create ();
+ val->types_set = new hash_set<tree>;
/* Always prefer complete type to be the leader. */
if (!COMPLETE_TYPE_P (val->type)
}
/* See if this duplicate is new. */
- if (!pointer_set_insert (val->types_set, type))
+ if (!val->types_set->add (type))
{
bool merge = true;
bool base_mismatch = false;
unsigned int i,j;
bool warned = false;
- pointer_set_t *visited = pointer_set_create ();
+ hash_set<tree> visited;
gcc_assert (in_lto_p);
vec_safe_push (val->types, type);
/* First we compare memory layout. */
if (!odr_types_equivalent_p (val->type, type, !flag_ltrans && !val->odr_violated,
- &warned, visited))
+ &warned, &visited))
{
merge = false;
odr_violation_reported = true;
putc ('\n',cgraph_dump_file);
}
}
- pointer_set_destroy (visited);
/* Next sanity check that bases are the same. If not, we will end
up producing wrong answers. */
static void
maybe_record_node (vec <cgraph_node *> &nodes,
- tree target, pointer_set_t *inserted,
+ tree target, hash_set<tree> *inserted,
bool can_refer,
bool *completep)
{
{
gcc_assert (!target_node->global.inlined_to);
gcc_assert (target_node->real_symbol_p ());
- if (!pointer_set_insert (inserted, target_node->decl))
+ if (!inserted->add (target))
{
- pointer_set_insert (cached_polymorphic_call_targets,
- target_node);
+ cached_polymorphic_call_targets->add (target_node);
nodes.safe_push (target_node);
}
}
HOST_WIDE_INT otr_token,
tree outer_type,
HOST_WIDE_INT offset,
- pointer_set_t *inserted,
- pointer_set_t *matched_vtables,
+ hash_set<tree> *inserted,
+ hash_set<tree> *matched_vtables,
bool anonymous,
bool *completep)
{
}
gcc_assert (inner_binfo);
if (bases_to_consider
- ? !pointer_set_contains (matched_vtables, BINFO_VTABLE (inner_binfo))
- : !pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
+ ? !matched_vtables->contains (BINFO_VTABLE (inner_binfo))
+ : !matched_vtables->add (BINFO_VTABLE (inner_binfo)))
{
bool can_refer;
tree target = gimple_get_virt_method_for_binfo (otr_token,
static void
possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
- pointer_set_t *inserted,
- pointer_set_t *matched_vtables,
+ hash_set<tree> *inserted,
+ hash_set<tree> *matched_vtables,
tree otr_type,
odr_type type,
HOST_WIDE_INT otr_token,
{
delete polymorphic_call_target_hash;
polymorphic_call_target_hash = NULL;
- pointer_set_destroy (cached_polymorphic_call_targets);
+ delete cached_polymorphic_call_targets;
cached_polymorphic_call_targets = NULL;
}
}
devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
{
if (cached_polymorphic_call_targets
- && pointer_set_contains (cached_polymorphic_call_targets, n))
+ && cached_polymorphic_call_targets->contains (n))
free_polymorphic_call_targets_hash ();
}
tree outer_type,
HOST_WIDE_INT offset,
vec <cgraph_node *> &nodes,
- pointer_set_t *inserted,
- pointer_set_t *matched_vtables,
+ hash_set<tree> *inserted,
+ hash_set<tree> *matched_vtables,
bool *completep)
{
while (true)
return;
}
gcc_assert (base_binfo);
- if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
+ if (!matched_vtables->add (BINFO_VTABLE (base_binfo)))
{
bool can_refer;
tree target = gimple_get_virt_method_for_binfo (otr_token,
&can_refer);
if (!target || ! DECL_CXX_DESTRUCTOR_P (target))
maybe_record_node (nodes, target, inserted, can_refer, completep);
- pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
+ matched_vtables->add (BINFO_VTABLE (base_binfo));
}
}
}
int *speculative_targetsp)
{
static struct cgraph_node_hook_list *node_removal_hook_holder;
- pointer_set_t *inserted;
- pointer_set_t *matched_vtables;
vec <cgraph_node *> nodes = vNULL;
vec <tree> bases_to_consider = vNULL;
odr_type type, outer_type;
/* Initialize query cache. */
if (!cached_polymorphic_call_targets)
{
- cached_polymorphic_call_targets = pointer_set_create ();
+ cached_polymorphic_call_targets = new hash_set<cgraph_node *>;
polymorphic_call_target_hash
= new polymorphic_call_target_hash_type (23);
if (!node_removal_hook_holder)
(*slot)->context = context;
(*slot)->speculative_targets = 0;
- inserted = pointer_set_create ();
- matched_vtables = pointer_set_create ();
+ hash_set<tree> inserted;
+ hash_set<tree> matched_vtables;
if (context.speculative_outer_type)
{
context.speculative_maybe_derived_type = false;
}
if (type_possibly_instantiated_p (speculative_outer_type->type))
- maybe_record_node (nodes, target, inserted, can_refer, &complete);
+ maybe_record_node (nodes, target, &inserted, can_refer, &complete);
if (binfo)
- pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
+ matched_vtables.add (BINFO_VTABLE (binfo));
/* Next walk recursively all derived types. */
if (context.speculative_maybe_derived_type)
{
if (!type->all_derivations_known)
complete = false;
for (i = 0; i < speculative_outer_type->derived_types.length(); i++)
- possible_polymorphic_call_targets_1 (nodes, inserted,
- matched_vtables,
+ possible_polymorphic_call_targets_1 (nodes, &inserted,
+ &matched_vtables,
otr_type,
speculative_outer_type->derived_types[i],
otr_token, speculative_outer_type->type,
/* If OUTER_TYPE is abstract, we know we are not seeing its instance. */
if (type_possibly_instantiated_p (outer_type->type))
- maybe_record_node (nodes, target, inserted, can_refer, &complete);
+ maybe_record_node (nodes, target, &inserted, can_refer, &complete);
else
{
skipped = true;
}
if (binfo)
- pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
+ matched_vtables.add (BINFO_VTABLE (binfo));
/* Next walk recursively all derived types. */
if (context.maybe_derived_type)
if (!type->all_derivations_known)
complete = false;
for (i = 0; i < outer_type->derived_types.length(); i++)
- possible_polymorphic_call_targets_1 (nodes, inserted,
- matched_vtables,
+ possible_polymorphic_call_targets_1 (nodes, &inserted,
+ &matched_vtables,
otr_type,
outer_type->derived_types[i],
otr_token, outer_type->type,
|| (context.maybe_derived_type
&& !type_all_derivations_known_p (outer_type->type))))
record_targets_from_bases (otr_type, otr_token, outer_type->type,
- context.offset, nodes, inserted,
- matched_vtables, &complete);
+ context.offset, nodes, &inserted,
+ &matched_vtables, &complete);
if (skipped)
- maybe_record_node (nodes, target, inserted, can_refer, &complete);
+ maybe_record_node (nodes, target, &inserted, can_refer, &complete);
for (i = 0; i < bases_to_consider.length(); i++)
- maybe_record_node (nodes, bases_to_consider[i], inserted, can_refer, &complete);
+ maybe_record_node (nodes, bases_to_consider[i], &inserted, can_refer, &complete);
}
bases_to_consider.release();
if (speculative_targetsp)
*speculative_targetsp = (*slot)->speculative_targets;
- pointer_set_destroy (inserted);
- pointer_set_destroy (matched_vtables);
timevar_pop (TV_IPA_VIRTUAL_CALL);
return nodes;
}
ipa_devirt (void)
{
struct cgraph_node *n;
- struct pointer_set_t *bad_call_targets = pointer_set_create ();
+ hash_set<void *> bad_call_targets;
struct cgraph_edge *e;
int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
if (!dump_file)
continue;
}
- if (pointer_set_contains (bad_call_targets,
- cache_token))
+ if (bad_call_targets.contains (cache_token))
{
if (dump_file)
fprintf (dump_file, "Target list is known to be useless\n\n");
}
if (!likely_target)
{
- pointer_set_insert (bad_call_targets, cache_token);
+ bad_call_targets.add (cache_token);
continue;
}
/* This is reached only when dumping; check if we agree or disagree
if (update)
inline_update_overall_summary (n);
}
- pointer_set_destroy (bad_call_targets);
if (dump_file)
fprintf (dump_file,
#include "tree-scalar-evolution.h"
#include "intl.h"
#include "opts.h"
-
-static struct pointer_set_t *visited_nodes;
+#include "hash-set.h"
/* Lattice values for const and pure functions. Everything starts out
being const, then may drop to pure and then neither depending on
/* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
is true if the function is known to be finite. The diagnostic is
- controlled by OPTION. WARNED_ABOUT is a pointer_set unique for
+ controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
OPTION, this function may initialize it and it is always returned
by the function. */
-static struct pointer_set_t *
+static hash_set<tree> *
suggest_attribute (int option, tree decl, bool known_finite,
- struct pointer_set_t *warned_about,
+ hash_set<tree> *warned_about,
const char * attrib_name)
{
if (!option_enabled (option, &global_options))
return warned_about;
if (!warned_about)
- warned_about = pointer_set_create ();
- if (pointer_set_contains (warned_about, decl))
+ warned_about = new hash_set<tree>;
+ if (warned_about->contains (decl))
return warned_about;
- pointer_set_insert (warned_about, decl);
+ warned_about->add (decl);
warning_at (DECL_SOURCE_LOCATION (decl),
option,
known_finite
static void
warn_function_pure (tree decl, bool known_finite)
{
- static struct pointer_set_t *warned_about;
+ static hash_set<tree> *warned_about;
warned_about
= suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
static void
warn_function_const (tree decl, bool known_finite)
{
- static struct pointer_set_t *warned_about;
+ static hash_set<tree> *warned_about;
warned_about
= suggest_attribute (OPT_Wsuggest_attribute_const, decl,
known_finite, warned_about, "const");
static void
warn_function_noreturn (tree decl)
{
- static struct pointer_set_t *warned_about;
+ static hash_set<tree> *warned_about;
if (!lang_hooks.missing_noreturn_ok_p (decl)
&& targetm.warn_func_return (decl))
warned_about
static declarations. We do not need to scan them more than once
since all we would be interested in are the addressof
operations. */
- visited_nodes = pointer_set_create ();
if (node->get_availability () > AVAIL_INTERPOSABLE)
set_function_state (node, analyze_function (node, true));
- pointer_set_destroy (visited_nodes);
- visited_nodes = NULL;
}
/* Called when new clone is inserted to callgraph late. */
register_hooks ();
- /* There are some shared nodes, in particular the initializers on
- static declarations. We do not need to scan them more than once
- since all we would be interested in are the addressof
- operations. */
- visited_nodes = pointer_set_create ();
-
/* Process all of the functions.
We process AVAIL_INTERPOSABLE functions. We can not use the results
FOR_EACH_DEFINED_FUNCTION (node)
if (node->get_availability () >= AVAIL_INTERPOSABLE)
set_function_state (node, analyze_function (node, true));
-
- pointer_set_destroy (visited_nodes);
- visited_nodes = NULL;
}
}
if (found)
{
- struct pointer_set_t *visited_nodes = pointer_set_create ();
+ hash_set<tree> visited_nodes;
vnode->get_constructor ();
walk_tree (&DECL_INITIAL (vnode->decl),
- update_vtable_references, NULL, visited_nodes);
- pointer_set_destroy (visited_nodes);
+ update_vtable_references, NULL, &visited_nodes);
vnode->remove_all_references ();
record_references_in_initializer (vnode->decl, false);
}
#include "cgraph.h"
#include "tree-pass.h"
#include "hash-map.h"
-#include "pointer-set.h"
+#include "hash-set.h"
#include "gimple-expr.h"
#include "gimplify.h"
#include "flags.h"
static void
enqueue_node (symtab_node *node, symtab_node **first,
- struct pointer_set_t *reachable)
+ hash_set<symtab_node *> *reachable)
{
/* Node is still in queue; do nothing. */
if (node->aux && node->aux != (void *) 2)
return;
/* Node was already processed as unreachable, re-enqueue
only if it became reachable now. */
- if (node->aux == (void *)2 && !pointer_set_contains (reachable, node))
+ if (node->aux == (void *)2 && !reachable->contains (node))
return;
node->aux = *first;
*first = node;
process_references (symtab_node *snode,
symtab_node **first,
bool before_inlining_p,
- struct pointer_set_t *reachable)
+ hash_set<symtab_node *> *reachable)
{
int i;
struct ipa_ref *ref = NULL;
&& flag_wpa
&& ctor_for_folding (node->decl)
!= error_mark_node))))
- pointer_set_insert (reachable, node);
+ reachable->add (node);
enqueue_node (node, first, reachable);
}
}
possible. */
static void
-walk_polymorphic_call_targets (pointer_set_t *reachable_call_targets,
+walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
struct cgraph_edge *edge,
symtab_node **first,
- pointer_set_t *reachable, bool before_inlining_p)
+ hash_set<symtab_node *> *reachable,
+ bool before_inlining_p)
{
unsigned int i;
void *cache_token;
= possible_polymorphic_call_targets
(edge, &final, &cache_token);
- if (!pointer_set_insert (reachable_call_targets,
- cache_token))
+ if (!reachable_call_targets->add (cache_token))
{
for (i = 0; i < targets.length (); i++)
{
&& (cgraph_state < CGRAPH_STATE_IPA_SSA
|| !lookup_attribute ("always_inline",
DECL_ATTRIBUTES (n->decl)))))
- pointer_set_insert (reachable, n);
+ reachable->add (n);
/* Even after inlining we want to keep the possible targets in the
boundary, so late passes can still produce direct call even if
struct cgraph_node *node, *next;
varpool_node *vnode, *vnext;
bool changed = false;
- struct pointer_set_t *reachable = pointer_set_create ();
- struct pointer_set_t *body_needed_for_clonning = pointer_set_create ();
- struct pointer_set_t *reachable_call_targets = pointer_set_create ();
+ hash_set<symtab_node *> reachable;
+ hash_set<tree> body_needed_for_clonning;
+ hash_set<void *> reachable_call_targets;
timevar_push (TV_IPA_UNREACHABLE);
if (optimize && flag_devirtualize)
&& !node->can_remove_if_no_direct_calls_and_refs_p ())
{
gcc_assert (!node->global.inlined_to);
- pointer_set_insert (reachable, node);
- enqueue_node (node, &first, reachable);
+ reachable.add (node);
+ enqueue_node (node, &first, &reachable);
}
else
gcc_assert (!node->aux);
if (!vnode->can_remove_if_no_refs_p()
&& !vnode->in_other_partition)
{
- pointer_set_insert (reachable, vnode);
- enqueue_node (vnode, &first, reachable);
+ reachable.add (vnode);
+ enqueue_node (vnode, &first, &reachable);
}
/* Perform reachability analysis. */
while (first != (symtab_node *) (void *) 1)
{
- bool in_boundary_p = !pointer_set_contains (reachable, first);
+ bool in_boundary_p = !reachable.contains (first);
symtab_node *node = first;
first = (symtab_node *)first->aux;
struct cgraph_node *origin_node
= cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (node->decl));
origin_node->used_as_abstract_origin = true;
- enqueue_node (origin_node, &first, reachable);
+ enqueue_node (origin_node, &first, &reachable);
}
/* If any symbol in a comdat group is reachable, force
all externally visible symbols in the same comdat
next != node;
next = next->same_comdat_group)
if (!next->comdat_local_p ()
- && !pointer_set_insert (reachable, next))
- enqueue_node (next, &first, reachable);
+ && !reachable.add (next))
+ enqueue_node (next, &first, &reachable);
}
/* Mark references as reachable. */
- process_references (node, &first, before_inlining_p, reachable);
+ process_references (node, &first, before_inlining_p, &reachable);
}
if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
{
next = e->next_callee;
if (e->indirect_info->polymorphic)
- walk_polymorphic_call_targets (reachable_call_targets,
- e, &first, reachable,
+ walk_polymorphic_call_targets (&reachable_call_targets,
+ e, &first, &reachable,
before_inlining_p);
}
}
if (DECL_EXTERNAL (e->callee->decl)
&& e->callee->alias
&& before_inlining_p)
- pointer_set_insert (reachable,
- e->callee->function_symbol ());
- pointer_set_insert (reachable, e->callee);
+ reachable.add (e->callee->function_symbol ());
+ reachable.add (e->callee);
}
- enqueue_node (e->callee, &first, reachable);
+ enqueue_node (e->callee, &first, &reachable);
}
/* When inline clone exists, mark body to be preserved so when removing
offline copy of the function we don't kill it. */
if (cnode->global.inlined_to)
- pointer_set_insert (body_needed_for_clonning, cnode->decl);
+ body_needed_for_clonning.add (cnode->decl);
/* For non-inline clones, force their origins to the boundary and ensure
that body is not removed. */
cnode = cnode->clone_of;
if (noninline)
{
- pointer_set_insert (body_needed_for_clonning, cnode->decl);
- enqueue_node (cnode, &first, reachable);
+ body_needed_for_clonning.add (cnode->decl);
+ enqueue_node (cnode, &first, &reachable);
}
}
next;
next = next->simdclone->next_clone)
if (in_boundary_p
- || !pointer_set_insert (reachable, next))
- enqueue_node (next, &first, reachable);
+ || !reachable.add (next))
+ enqueue_node (next, &first, &reachable);
}
}
/* When we see constructor of external variable, keep referred nodes in the
{
struct ipa_ref *ref = NULL;
for (int i = 0; node->iterate_reference (i, ref); i++)
- enqueue_node (ref->referred, &first, reachable);
+ enqueue_node (ref->referred, &first, &reachable);
}
}
changed = true;
}
/* If node is unreachable, remove its body. */
- else if (!pointer_set_contains (reachable, node))
+ else if (!reachable.contains (node))
{
- if (!pointer_set_contains (body_needed_for_clonning, node->decl))
+ if (!body_needed_for_clonning.contains (node->decl))
node->release_body ();
else if (!node->clone_of)
gcc_assert (in_lto_p || DECL_RESULT (node->decl));
vnode->remove ();
changed = true;
}
- else if (!pointer_set_contains (reachable, vnode))
+ else if (!reachable.contains (vnode))
{
tree init;
if (vnode->definition)
vnode->aux = NULL;
}
- pointer_set_destroy (reachable);
- pointer_set_destroy (body_needed_for_clonning);
- pointer_set_destroy (reachable_call_targets);
-
/* Now update address_taken flags and try to promote functions to be local. */
if (file)
fprintf (file, "\nClearing address taken flags:");
#include "params.h"
#include "input.h"
#include "hashtab.h"
+#include "hash-set.h"
#include "langhooks.h"
#include "bitmap.h"
#include "function.h"
int i;
lto_symtab_encoder_t encoder;
lto_symtab_encoder_iterator lsei;
- struct pointer_set_t *reachable_call_targets = pointer_set_create ();
+ hash_set<void *> reachable_call_targets;
encoder = lto_symtab_encoder_new (false);
vec <cgraph_node *>targets
= possible_polymorphic_call_targets
(edge, &final, &cache_token);
- if (!pointer_set_insert (reachable_call_targets,
- cache_token))
+ if (!reachable_call_targets.add (cache_token))
{
for (i = 0; i < targets.length (); i++)
{
}
}
lto_symtab_encoder_delete (in_encoder);
- pointer_set_destroy (reachable_call_targets);
return encoder;
}
#include "params.h"
#include "input.h"
#include "hashtab.h"
+#include "hash-set.h"
#include "basic-block.h"
#include "tree-ssa-alias.h"
#include "internal-fn.h"
static void
write_symbol (struct streamer_tree_cache_d *cache,
- tree t, struct pointer_set_t *seen, bool alias)
+ tree t, hash_set<const char *> *seen, bool alias)
{
const char *name;
enum gcc_plugin_symbol_kind kind;
same name manipulations that ASM_OUTPUT_LABELREF does. */
name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
- if (pointer_set_contains (seen, name))
+ if (seen->add (name))
return;
- pointer_set_insert (seen, name);
streamer_tree_cache_lookup (cache, t, &slot_num);
gcc_assert (slot_num != (unsigned)-1);
{
struct streamer_tree_cache_d *cache = ob->writer_cache;
char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
- struct pointer_set_t *seen;
lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
lto_symtab_encoder_iterator lsei;
lto_begin_section (section_name, false);
free (section_name);
- seen = pointer_set_create ();
+ hash_set<const char *> seen;
/* Write the symbol table.
First write everything defined and then all declarations.
if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
continue;
- write_symbol (cache, node->decl, seen, false);
+ write_symbol (cache, node->decl, &seen, false);
}
for (lsei = lsei_start (encoder);
!lsei_end_p (lsei); lsei_next (&lsei))
if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
continue;
- write_symbol (cache, node->decl, seen, false);
+ write_symbol (cache, node->decl, &seen, false);
}
- pointer_set_destroy (seen);
-
lto_end_section ();
}
+2014-08-02 Trevor Saunders <tsaunders@mozilla.com>
+
+ * lto-partition.c, lto-partition.h: Use hash_set instead of
+ pointer_set.
+
2014-07-31 Andi Kleen <ak@linux.intel.com>
* lto.c (hash_canonical_type): Use inchash::hash
for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++)
{
if (part->initializers_visited)
- pointer_set_destroy (part->initializers_visited);
+ delete part->initializers_visited;
/* Symtab encoder is freed after streaming. */
free (part);
}
&& !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
{
if (!part->initializers_visited)
- part->initializers_visited = pointer_set_create ();
- if (!pointer_set_insert (part->initializers_visited, ref->referred))
+ part->initializers_visited = new hash_set<symtab_node *>;
+ if (!part->initializers_visited->add (ref->referred))
add_references_to_partition (part, ref->referred);
}
}
/* After UNDO we no longer know what was visited. */
if (partition->initializers_visited)
- pointer_set_destroy (partition->initializers_visited);
+ delete partition->initializers_visited;
partition->initializers_visited = NULL;
if (!node->alias && (cnode = dyn_cast <cgraph_node *> (node)))
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
+#include "hash-set.h"
/* Structure describing ltrans partitions. */
lto_symtab_encoder_t encoder;
const char * name;
int insns;
- pointer_set_t *initializers_visited;
+ hash_set<symtab_node *> *initializers_visited;
};
typedef struct ltrans_partition_def *ltrans_partition;
#include "predict.h"
#include "optabs.h"
#include "target.h"
+#include "hash-set.h"
#include "pointer-set.h"
#include "basic-block.h"
#include "tree-ssa-alias.h"
how to expand this switch(). */
uniq = 0;
count = 0;
- struct pointer_set_t *seen_labels = pointer_set_create ();
+ hash_set<tree> seen_labels;
compute_cases_per_edge (stmt);
for (i = ncases - 1; i >= 1; --i)
/* If we have not seen this label yet, then increase the
number of unique case node targets seen. */
- if (!pointer_set_insert (seen_labels, lab))
+ if (!seen_labels.add (lab))
uniq++;
/* The bounds on the case range, LOW and HIGH, have to be converted
case_edge->probability / (intptr_t)(case_edge->aux),
case_node_pool);
}
- pointer_set_destroy (seen_labels);
reset_out_edges_aux (bb);
/* cleanup_tree_cfg removes all SWITCH_EXPR with a single
static tree
verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
{
- struct pointer_set_t *visited = (struct pointer_set_t *) data;
+ hash_set<void *> *visited = (hash_set<void *> *) data;
if (tree_node_can_be_shared (*tp))
{
return NULL;
}
- if (pointer_set_insert (visited, *tp))
+ if (visited->add (*tp))
return *tp;
return NULL;
verify_eh_throw_stmt_node (void **slot, void *data)
{
struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
- struct pointer_set_t *visited = (struct pointer_set_t *) data;
+ hash_set<void *> *visited = (hash_set<void *> *) data;
- if (!pointer_set_contains (visited, node->stmt))
+ if (!visited->contains (node->stmt))
{
error ("dead STMT in EH table");
debug_gimple_stmt (node->stmt);
/* Verify if the location LOCs block is in BLOCKS. */
static bool
-verify_location (pointer_set_t *blocks, location_t loc)
+verify_location (hash_set<tree> *blocks, location_t loc)
{
tree block = LOCATION_BLOCK (loc);
if (block != NULL_TREE
- && !pointer_set_contains (blocks, block))
+ && !blocks->contains (block))
{
error ("location references block not in block tree");
return true;
static tree
verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
{
- struct pointer_set_t *blocks = (struct pointer_set_t *) data;
+ hash_set<tree> *blocks = (hash_set<tree> *) data;
if (TREE_CODE (*tp) == VAR_DECL
&& DECL_HAS_DEBUG_EXPR_P (*tp))
/* Insert all subblocks of BLOCK into BLOCKS and recurse. */
static void
-collect_subblocks (pointer_set_t *blocks, tree block)
+collect_subblocks (hash_set<tree> *blocks, tree block)
{
tree t;
for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
{
- pointer_set_insert (blocks, t);
+ blocks->add (t);
collect_subblocks (blocks, t);
}
}
{
basic_block bb;
bool err = false;
- struct pointer_set_t *visited, *visited_stmts, *blocks;
timevar_push (TV_TREE_STMT_VERIFY);
- visited = pointer_set_create ();
- visited_stmts = pointer_set_create ();
+ hash_set<void *> visited;
+ hash_set<gimple> visited_stmts;
/* Collect all BLOCKs referenced by the BLOCK tree of FN. */
- blocks = pointer_set_create ();
+ hash_set<tree> blocks;
if (DECL_INITIAL (fn->decl))
{
- pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
- collect_subblocks (blocks, DECL_INITIAL (fn->decl));
+ blocks.add (DECL_INITIAL (fn->decl));
+ collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
}
FOR_EACH_BB_FN (bb, fn)
bool err2 = false;
unsigned i;
- pointer_set_insert (visited_stmts, phi);
+ visited_stmts.add (phi);
if (gimple_bb (phi) != bb)
{
{
tree arg = gimple_phi_arg_def (phi, i);
tree addr = walk_tree (&arg, verify_node_sharing_1,
- visited, NULL);
+ &visited, NULL);
if (addr)
{
error ("incorrect sharing of tree nodes");
error ("virtual PHI with argument locations");
err2 = true;
}
- addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
+ addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
if (addr)
{
debug_generic_expr (addr);
err2 = true;
}
- err2 |= verify_location (blocks, loc);
+ err2 |= verify_location (&blocks, loc);
}
if (err2)
tree addr;
int lp_nr;
- pointer_set_insert (visited_stmts, stmt);
+ visited_stmts.add (stmt);
if (gimple_bb (stmt) != bb)
{
}
err2 |= verify_gimple_stmt (stmt);
- err2 |= verify_location (blocks, gimple_location (stmt));
+ err2 |= verify_location (&blocks, gimple_location (stmt));
memset (&wi, 0, sizeof (wi));
- wi.info = (void *) visited;
+ wi.info = (void *) &visited;
addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
if (addr)
{
}
memset (&wi, 0, sizeof (wi));
- wi.info = (void *) blocks;
+ wi.info = (void *) &blocks;
addr = walk_gimple_op (stmt, verify_expr_location, &wi);
if (addr)
{
if (get_eh_throw_stmt_table (cfun))
htab_traverse (get_eh_throw_stmt_table (cfun),
verify_eh_throw_stmt_node,
- visited_stmts);
+ &visited_stmts);
if (err || eh_error_found)
internal_error ("verify_gimple failed");
- pointer_set_destroy (visited);
- pointer_set_destroy (visited_stmts);
- pointer_set_destroy (blocks);
verify_histograms ();
timevar_pop (TV_TREE_STMT_VERIFY);
}
#define GCC_TREE_CORE_H
#include "hashtab.h"
+#include "hash-set.h"
#include "machmode.h"
#include "input.h"
#include "statistics.h"
struct ptr_info_def;
struct range_info_def;
struct die_struct;
-struct pointer_set_t;
/*---------------------------------------------------------------------------
/* The type of a callback function that represents a custom walk_tree. */
typedef tree (*walk_tree_lh) (tree *, int *, tree (*) (tree *, int *, void *),
- void *, struct pointer_set_t*);
+ void *, hash_set<tree> *);
/*---------------------------------------------------------------------------
#include "flags.h"
#include "function.h"
#include "except.h"
+#include "hash-set.h"
#include "pointer-set.h"
#include "basic-block.h"
#include "tree-ssa-alias.h"
eh_catch c;
edge_iterator ei;
edge e;
- struct pointer_set_t *seen_values = pointer_set_create ();
+ hash_set<tree> seen_values;
/* Collect the labels for a switch. Zero the post_landing_pad
field becase we'll no longer have anything keeping these labels
attached to the handler anymore, we remove
the corresponding edge and then we delete unreachable
blocks at the end of this pass. */
- if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node)))
+ if (! seen_values.contains (TREE_VALUE (flt_node)))
{
tree t = build_case_label (TREE_VALUE (flt_node),
NULL, lab);
labels.safe_push (t);
- pointer_set_insert (seen_values, TREE_VALUE (flt_node));
+ seen_values.add (TREE_VALUE (flt_node));
have_label = true;
}
x = gimple_build_switch (filter, default_label, labels);
gsi_insert_before (&gsi, x, GSI_SAME_STMT);
}
- pointer_set_destroy (seen_values);
}
break;
expensive, copy_body can be told to watch for nontrivial
changes. */
if (id->statements_to_fold)
- pointer_set_insert (id->statements_to_fold, stmt);
+ id->statements_to_fold->add (stmt);
/* We're duplicating a CALL_EXPR. Find any corresponding
callgraph edges and update or duplicate them. */
{
struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
struct walk_stmt_info wi;
- struct pointer_set_t *visited_nodes;
basic_block bb;
bool forbidden_p = false;
/* Next, walk the statements of the function looking for
constraucts we can't handle, or are non-optimal for inlining. */
- visited_nodes = pointer_set_create ();
+ hash_set<tree> visited_nodes;
memset (&wi, 0, sizeof (wi));
wi.info = (void *) fndecl;
- wi.pset = visited_nodes;
+ wi.pset = &visited_nodes;
FOR_EACH_BB_FN (bb, fun)
{
break;
}
- pointer_set_destroy (visited_nodes);
return forbidden_p;
}
\f
in the STATEMENTS pointer set. */
static void
-fold_marked_statements (int first, struct pointer_set_t *statements)
+fold_marked_statements (int first, hash_set<gimple> *statements)
{
for (; first < n_basic_blocks_for_fn (cfun); first++)
if (BASIC_BLOCK_FOR_FN (cfun, first))
for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
!gsi_end_p (gsi);
gsi_next (&gsi))
- if (pointer_set_contains (statements, gsi_stmt (gsi)))
+ if (statements->contains (gsi_stmt (gsi)))
{
gimple old_stmt = gsi_stmt (gsi);
tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
id.transform_return_to_modify = true;
id.transform_parameter = true;
id.transform_lang_insert_block = NULL;
- id.statements_to_fold = pointer_set_create ();
+ id.statements_to_fold = new hash_set<gimple>;
push_gimplify_context ();
/* Fold queued statements. */
fold_marked_statements (last, id.statements_to_fold);
- pointer_set_destroy (id.statements_to_fold);
+ delete id.statements_to_fold;
gcc_assert (!id.debug_stmts.exists ());
{
copy_body_data id;
struct walk_stmt_info wi;
- struct pointer_set_t *visited;
gimple_seq copy;
/* There's nothing to do for NULL_TREE. */
/* Walk the tree once to find local labels. */
memset (&wi, 0, sizeof (wi));
- visited = pointer_set_create ();
+ hash_set<tree> visited;
wi.info = &id;
- wi.pset = visited;
+ wi.pset = &visited;
walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
- pointer_set_destroy (visited);
copy = gimple_seq_copy (seq);
memset (&id, 0, sizeof (id));
/* Generate a new name for the new version. */
- id.statements_to_fold = pointer_set_create ();
+ id.statements_to_fold = new hash_set<gimple>;
id.decl_map = pointer_map_create ();
id.debug_map = NULL;
free_dominance_info (CDI_POST_DOMINATORS);
fold_marked_statements (0, id.statements_to_fold);
- pointer_set_destroy (id.statements_to_fold);
+ delete id.statements_to_fold;
fold_cond_expr_cond ();
delete_unreachable_blocks_update_callgraph (&id);
if (id.dst_node->definition)
#ifndef GCC_TREE_INLINE_H
#define GCC_TREE_INLINE_H
+#include "hash-set.h"
+
struct cgraph_edge;
/* Indicate the desired behavior wrt call graph edges. We can either
void (*transform_lang_insert_block) (tree);
/* Statements that might be possibly folded. */
- struct pointer_set_t *statements_to_fold;
+ hash_set<gimple> *statements_to_fold;
/* Entry basic block to currently copied body. */
basic_block entry_bb;
struct pointer_map_t *field_map;
struct pointer_map_t *var_map;
- struct pointer_set_t *mem_refs;
+ hash_set<tree *> *mem_refs;
bitmap suppress_expansion;
tree context;
struct nesting_info *info = XCNEW (struct nesting_info);
info->field_map = pointer_map_create ();
info->var_map = pointer_map_create ();
- info->mem_refs = pointer_set_create ();
+ info->mem_refs = new hash_set<tree *>;
info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
info->context = cgn->decl;
fold here, as the chain record type is not yet finalized. */
if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
&& !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
- pointer_set_insert (info->mem_refs, tp);
+ info->mem_refs->add (tp);
wi->val_only = save_val_only;
break;
}
/* Fold the MEM_REF *E. */
-static bool
-fold_mem_refs (const void *e, void *data ATTRIBUTE_UNUSED)
+bool
+fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
{
tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
*ref_p = fold (*ref_p);
}
/* Fold the rewritten MEM_REF trees. */
- pointer_set_traverse (root->mem_refs, fold_mem_refs, NULL);
+ root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
/* Dump the translated tree function. */
if (dump_file)
next = iter_nestinfo_next (node);
pointer_map_destroy (node->var_map);
pointer_map_destroy (node->field_map);
- pointer_set_destroy (node->mem_refs);
+ delete node->mem_refs;
free (node);
node = next;
}
#include "expr.h"
#include "tree-pretty-print.h"
#include "hashtab.h"
-#include "pointer-set.h"
+#include "hash-set.h"
#include "gimple-expr.h"
#include "cgraph.h"
#include "langhooks.h"
DEBUG_FUNCTION void
debug_tree_chain (tree t)
{
- struct pointer_set_t *seen = pointer_set_create ();
+ hash_set<tree> seen;
while (t)
{
print_generic_expr (stderr, t, TDF_VOPS|TDF_MEMSYMS|TDF_UID);
fprintf (stderr, " ");
t = TREE_CHAIN (t);
- if (pointer_set_insert (seen, t))
+ if (seen.add (t))
{
fprintf (stderr, "... [cycled back to ");
print_generic_expr (stderr, t, TDF_VOPS|TDF_MEMSYMS|TDF_UID);
}
}
fprintf (stderr, "\n");
-
- pointer_set_destroy (seen);
}
/* Prints declaration DECL to the FILE with details specified by FLAGS. */
#include "basic-block.h"
#include "gimple-pretty-print.h"
#include "intl.h"
+#include "hash-set.h"
#include "pointer-set.h"
#include "tree-ssa-alias.h"
#include "internal-fn.h"
static void
maybe_lower_iteration_bound (struct loop *loop)
{
- pointer_set_t *not_executed_last_iteration = NULL;
+ hash_set<gimple> *not_executed_last_iteration = NULL;
struct nb_iter_bound *elt;
bool found_exit = false;
vec<basic_block> queue = vNULL;
&& wi::ltu_p (elt->bound, loop->nb_iterations_upper_bound))
{
if (!not_executed_last_iteration)
- not_executed_last_iteration = pointer_set_create ();
- pointer_set_insert (not_executed_last_iteration, elt->stmt);
+ not_executed_last_iteration = new hash_set<gimple>;
+ not_executed_last_iteration->add (elt->stmt);
}
}
if (!not_executed_last_iteration)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
- if (pointer_set_contains (not_executed_last_iteration, stmt))
+ if (not_executed_last_iteration->contains (stmt))
{
stmt_found = true;
break;
}
BITMAP_FREE (visited);
queue.release ();
- pointer_set_destroy (not_executed_last_iteration);
+ delete not_executed_last_iteration;
}
/* Records estimates on numbers of iterations of LOOP. If USE_UNDEFINED_P
#include "flags.h"
#include "tm_p.h"
#include "basic-block.h"
+#include "hash-set.h"
#include "pointer-set.h"
#include "tree-ssa-alias.h"
#include "internal-fn.h"
static bool neg_replacement (basic_block, basic_block,
edge, edge, gimple, tree, tree);
static bool cond_store_replacement (basic_block, basic_block, edge, edge,
- struct pointer_set_t *);
+ hash_set<tree> *);
static bool cond_if_else_store_replacement (basic_block, basic_block, basic_block);
-static struct pointer_set_t * get_non_trapping (void);
+static hash_set<tree> * get_non_trapping ();
static void replace_phi_edge_with_variable (basic_block, edge, gimple, tree);
static void hoist_adjacent_loads (basic_block, basic_block,
basic_block, basic_block);
basic_block *bb_order;
unsigned n, i;
bool cfgchanged = false;
- struct pointer_set_t *nontrap = 0;
+ hash_set<tree> *nontrap = 0;
if (do_store_elim)
/* Calculate the set of non-trapping memory accesses. */
free (bb_order);
if (do_store_elim)
- pointer_set_destroy (nontrap);
+ delete nontrap;
/* If the CFG has changed, we should cleanup the CFG. */
if (cfgchanged && do_store_elim)
{
class nontrapping_dom_walker : public dom_walker
{
public:
- nontrapping_dom_walker (cdi_direction direction, pointer_set_t *ps)
+ nontrapping_dom_walker (cdi_direction direction, hash_set<tree> *ps)
: dom_walker (direction), m_nontrapping (ps), m_seen_ssa_names (128) {}
virtual void before_dom_children (basic_block);
the RHS. */
void add_or_mark_expr (basic_block, tree, bool);
- pointer_set_t *m_nontrapping;
+ hash_set<tree> *m_nontrapping;
/* The hash table for remembering what we've seen. */
hash_table<ssa_names_hasher> m_seen_ssa_names;
then we can't trap. */
if (found_bb && (((size_t)found_bb->aux) & 1) == 1)
{
- pointer_set_insert (m_nontrapping, exp);
+ m_nontrapping->add (exp);
}
else
{
It will do a dominator walk over the whole function, and it will
make use of the bb->aux pointers. It returns a set of trees
(the MEM_REFs itself) which can't trap. */
-static struct pointer_set_t *
+static hash_set<tree> *
get_non_trapping (void)
{
nt_call_phase = 0;
- pointer_set_t *nontrap = pointer_set_create ();
+ hash_set<tree> *nontrap = new hash_set<tree>;
/* We're going to do a dominator walk, so ensure that we have
dominance information. */
calculate_dominance_info (CDI_DOMINATORS);
static bool
cond_store_replacement (basic_block middle_bb, basic_block join_bb,
- edge e0, edge e1, struct pointer_set_t *nontrap)
+ edge e0, edge e1, hash_set<tree> *nontrap)
{
gimple assign = last_and_only_stmt (middle_bb);
tree lhs, rhs, name, name2;
/* Prove that we can move the store down. We could also check
TREE_THIS_NOTRAP here, but in that case we also could move stores,
whose value is not available readily, which we want to avoid. */
- if (!pointer_set_contains (nontrap, lhs))
+ if (!nontrap->contains (lhs))
return false;
/* Now we've checked the constraints, so do the transformation:
#include "function.h"
#include "timevar.h"
#include "dumpfile.h"
-#include "pointer-set.h"
+#include "hash-set.h"
#include "tree-ssa-alias.h"
#include "internal-fn.h"
#include "gimple-expr.h"
}
auto_vec<tree, alloc_count> fewvars;
- pointer_set_t *vars = NULL;
+ hash_set<tree> *vars = NULL;
/* If we're already starting with 3/4 of alloc_count, go for a
- pointer_set, otherwise start with an unordered stack-allocated
+ hash_set, otherwise start with an unordered stack-allocated
VEC. */
if (i * 4 > alloc_count * 3)
- vars = pointer_set_create ();
+ vars = new hash_set<tree>;
/* Now go through the initial debug stmts in DEST again, this time
actually inserting in VARS or FEWVARS. Don't bother checking for
gcc_unreachable ();
if (vars)
- pointer_set_insert (vars, var);
+ vars->add (var);
else
fewvars.quick_push (var);
}
or somesuch. Adding `&& bb == src' to the condition
below will preserve all potentially relevant debug
notes. */
- if (vars && pointer_set_insert (vars, var))
+ if (vars && vars->add (var))
continue;
else if (!vars)
{
fewvars.quick_push (var);
else
{
- vars = pointer_set_create ();
+ vars = new hash_set<tree>;
for (i = 0; i < alloc_count; i++)
- pointer_set_insert (vars, fewvars[i]);
+ vars->add (fewvars[i]);
fewvars.release ();
- pointer_set_insert (vars, var);
+ vars->add (var);
}
}
while (bb != src && single_pred_p (bb));
if (vars)
- pointer_set_destroy (vars);
+ delete vars;
else if (fewvars.exists ())
fewvars.release ();
}
#include "function.h"
#include "gimple-pretty-print.h"
#include "bitmap.h"
+#include "hash-set.h"
#include "pointer-set.h"
#include "tree-ssa-alias.h"
#include "internal-fn.h"
/* Pointer set of potentially undefined ssa names, i.e.,
ssa names that are defined by phi with operands that
are not defined or potentially undefined. */
-static pointer_set_t *possibly_undefined_names = 0;
+static hash_set<tree> *possibly_undefined_names = 0;
/* Bit mask handling macros. */
#define MASK_SET_BIT(mask, pos) mask |= (1 << pos)
{
return (ssa_undefined_value_p (t)
|| (possibly_undefined_names
- && pointer_set_contains (possibly_undefined_names, t)));
+ && possibly_undefined_names->contains (t)));
}
static void
collect_phi_def_edges (gimple phi, basic_block cd_root,
vec<edge> *edges,
- pointer_set_t *visited_phis)
+ hash_set<gimple> *visited_phis)
{
size_t i, n;
edge opnd_edge;
tree opnd;
- if (pointer_set_insert (visited_phis, phi))
+ if (visited_phis->add (phi))
return;
n = gimple_phi_num_args (phi);
vec<edge> def_edges = vNULL;
bool has_valid_pred = false;
basic_block phi_bb, cd_root = 0;
- pointer_set_t *visited_phis;
phi_bb = gimple_bb (phi);
/* First find the closest dominating bb to be
if (!cd_root)
return false;
- visited_phis = pointer_set_create ();
- collect_phi_def_edges (phi, cd_root, &def_edges, visited_phis);
- pointer_set_destroy (visited_phis);
+ hash_set<gimple> visited_phis;
+ collect_phi_def_edges (phi, cd_root, &def_edges, &visited_phis);
n = def_edges.length ();
if (n == 0)
basic_block use_bb,
gimple phi,
unsigned uninit_opnds,
- pointer_set_t *visited_phis);
+ hash_set<gimple> *visited_phis);
/* Returns true if all uninitialized opnds are pruned. Returns false
otherwise. PHI is the phi node with uninitialized operands,
gimple flag_def,
tree boundary_cst,
enum tree_code cmp_code,
- pointer_set_t *visited_phis,
+ hash_set<gimple> *visited_phis,
bitmap *visited_flag_phis)
{
unsigned i;
static bool
use_pred_not_overlap_with_undef_path_pred (pred_chain_union preds,
gimple phi, unsigned uninit_opnds,
- pointer_set_t *visited_phis)
+ hash_set<gimple> *visited_phis)
{
unsigned int i, n;
gimple flag_def = 0;
inline static void
push_to_worklist (tree op, vec<pred_info, va_heap, vl_ptr> *work_list,
- pointer_set_t *mark_set)
+ hash_set<tree> *mark_set)
{
- if (pointer_set_contains (mark_set, op))
+ if (mark_set->contains (op))
return;
- pointer_set_insert (mark_set, op);
+ mark_set->add (op);
pred_info arg_pred;
arg_pred.pred_lhs = op;
pred_info pred,
enum tree_code and_or_code,
vec<pred_info, va_heap, vl_ptr> *work_list,
- pointer_set_t *mark_set)
+ hash_set<tree> *mark_set)
{
if (!is_neq_zero_form_p (pred))
{
pred_info pred)
{
vec<pred_info, va_heap, vl_ptr> work_list = vNULL;
- pointer_set_t *mark_set = NULL;
enum tree_code and_or_code = ERROR_MARK;
pred_chain norm_chain = vNULL;
}
work_list.safe_push (pred);
- mark_set = pointer_set_create ();
+ hash_set<tree> mark_set;
while (!work_list.is_empty ())
{
pred_info a_pred = work_list.pop ();
normalize_one_pred_1 (norm_preds, &norm_chain, a_pred,
- and_or_code, &work_list, mark_set);
+ and_or_code, &work_list, &mark_set);
}
if (and_or_code == BIT_AND_EXPR)
norm_preds->safe_push (norm_chain);
work_list.release ();
- pointer_set_destroy (mark_set);
}
static void
pred_chain one_chain)
{
vec<pred_info, va_heap, vl_ptr> work_list = vNULL;
- pointer_set_t *mark_set = pointer_set_create ();
+ hash_set<tree> mark_set;
pred_chain norm_chain = vNULL;
size_t i;
for (i = 0; i < one_chain.length (); i++)
{
work_list.safe_push (one_chain[i]);
- pointer_set_insert (mark_set, one_chain[i].pred_lhs);
+ mark_set.add (one_chain[i].pred_lhs);
}
while (!work_list.is_empty ())
{
pred_info a_pred = work_list.pop ();
normalize_one_pred_1 (0, &norm_chain, a_pred,
- BIT_AND_EXPR, &work_list, mark_set);
+ BIT_AND_EXPR, &work_list, &mark_set);
}
norm_preds->safe_push (norm_chain);
work_list.release ();
- pointer_set_destroy (mark_set);
}
/* Normalize predicate chains PREDS and returns the normalized one. */
basic_block use_bb,
gimple phi,
unsigned uninit_opnds,
- pointer_set_t *visited_phis)
+ hash_set<gimple> *visited_phis)
{
basic_block phi_bb;
pred_chain_union preds = vNULL;
bool has_valid_preds = false;
bool is_properly_guarded = false;
- if (pointer_set_insert (visited_phis, phi))
+ if (visited_phis->add (phi))
return false;
phi_bb = gimple_bb (phi);
static gimple
find_uninit_use (gimple phi, unsigned uninit_opnds,
vec<gimple> *worklist,
- pointer_set_t *added_to_worklist)
+ hash_set<gimple> *added_to_worklist)
{
tree phi_result;
use_operand_p use_p;
FOR_EACH_IMM_USE_FAST (use_p, iter, phi_result)
{
- pointer_set_t *visited_phis;
basic_block use_bb;
use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
continue;
- visited_phis = pointer_set_create ();
-
if (gimple_code (use_stmt) == GIMPLE_PHI)
use_bb = gimple_phi_arg_edge (use_stmt,
PHI_ARG_INDEX_FROM_USE (use_p))->src;
else
use_bb = gimple_bb (use_stmt);
+ hash_set<gimple> visited_phis;
if (is_use_properly_guarded (use_stmt, use_bb, phi, uninit_opnds,
- visited_phis))
- {
- pointer_set_destroy (visited_phis);
- continue;
- }
- pointer_set_destroy (visited_phis);
+ &visited_phis))
+ continue;
if (dump_file && (dump_flags & TDF_DETAILS))
{
/* Found a phi use that is not guarded,
add the phi to the worklist. */
- if (!pointer_set_insert (added_to_worklist, use_stmt))
+ if (!added_to_worklist->add (use_stmt))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
}
worklist->safe_push (use_stmt);
- pointer_set_insert (possibly_undefined_names, phi_result);
+ possibly_undefined_names->add (phi_result);
}
}
static void
warn_uninitialized_phi (gimple phi, vec<gimple> *worklist,
- pointer_set_t *added_to_worklist)
+ hash_set<gimple> *added_to_worklist)
{
unsigned uninit_opnds;
gimple uninit_use_stmt = 0;
basic_block bb;
gimple_stmt_iterator gsi;
vec<gimple> worklist = vNULL;
- pointer_set_t *added_to_worklist;
calculate_dominance_info (CDI_DOMINATORS);
calculate_dominance_info (CDI_POST_DOMINATORS);
timevar_push (TV_TREE_UNINIT);
- possibly_undefined_names = pointer_set_create ();
- added_to_worklist = pointer_set_create ();
+ possibly_undefined_names = new hash_set<tree>;
+ hash_set<gimple> added_to_worklist;
/* Initialize worklist */
FOR_EACH_BB_FN (bb, fun)
&& uninit_undefined_value_p (op))
{
worklist.safe_push (phi);
- pointer_set_insert (added_to_worklist, phi);
+ added_to_worklist.add (phi);
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "[WORKLIST]: add to initial list: ");
{
gimple cur_phi = 0;
cur_phi = worklist.pop ();
- warn_uninitialized_phi (cur_phi, &worklist, added_to_worklist);
+ warn_uninitialized_phi (cur_phi, &worklist, &added_to_worklist);
}
worklist.release ();
- pointer_set_destroy (added_to_worklist);
- pointer_set_destroy (possibly_undefined_names);
+ delete possibly_undefined_names;
possibly_undefined_names = NULL;
free_dominance_info (CDI_POST_DOMINATORS);
timevar_pop (TV_TREE_UNINIT);
vec<tree> worklist;
/* Set of traversed objects. Used to avoid duplicate visits. */
- struct pointer_set_t *pset;
+ hash_set<tree> *pset;
/* Array of symbols to process with free_lang_data_in_decl. */
vec<tree> decls;
static inline void
fld_worklist_push (tree t, struct free_lang_data_d *fld)
{
- if (t && !is_lang_specific (t) && !pointer_set_contains (fld->pset, t))
+ if (t && !is_lang_specific (t) && !fld->pset->contains (t))
fld->worklist.safe_push ((t));
}
{
while (1)
{
- if (!pointer_set_contains (fld->pset, t))
+ if (!fld->pset->contains (t))
walk_tree (&t, find_decls_types_r, fld, fld->pset);
if (fld->worklist.is_empty ())
break;
alias_pair *p;
/* Initialize sets and arrays to store referenced decls and types. */
- fld.pset = pointer_set_create ();
+ fld.pset = new hash_set<tree>;
fld.worklist.create (0);
fld.decls.create (100);
fld.types.create (100);
FOR_EACH_VEC_ELT (fld.types, i, t)
free_lang_data_in_type (t);
- pointer_set_destroy (fld.pset);
+ delete fld.pset;
fld.worklist.release ();
fld.decls.release ();
fld.types.release ();
static tree
walk_type_fields (tree type, walk_tree_fn func, void *data,
- struct pointer_set_t *pset, walk_tree_lh lh)
+ hash_set<tree> *pset, walk_tree_lh lh)
{
tree result = NULL_TREE;
tree
walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
- struct pointer_set_t *pset, walk_tree_lh lh)
+ hash_set<tree> *pset, walk_tree_lh lh)
{
enum tree_code code;
int walk_subtrees;
/* Don't walk the same tree twice, if the user has requested
that we avoid doing so. */
- if (pset && pointer_set_insert (pset, *tp))
+ if (pset && pset->add (*tp))
return NULL_TREE;
/* Call the function. */
walk_tree_lh lh)
{
tree result;
- struct pointer_set_t *pset;
- pset = pointer_set_create ();
- result = walk_tree_1 (tp, func, data, pset, lh);
- pointer_set_destroy (pset);
+ hash_set<tree> pset;
+ result = walk_tree_1 (tp, func, data, &pset, lh);
return result;
}
#define GCC_TREE_H
#include "tree-core.h"
+#include "hash-set.h"
#include "wide-int.h"
#include "inchash.h"
extern bool using_eh_for_cleanups_p (void);
extern const char *get_tree_code_name (enum tree_code);
extern void set_call_expr_flags (tree, int);
-extern tree walk_tree_1 (tree*, walk_tree_fn, void*, struct pointer_set_t*,
+extern tree walk_tree_1 (tree*, walk_tree_fn, void*, hash_set<tree>*,
walk_tree_lh);
extern tree walk_tree_without_duplicates_1 (tree*, walk_tree_fn, void*,
walk_tree_lh);
#include "data-streamer.h"
#include "builtins.h"
#include "tree-nested.h"
+#include "hash-set.h"
/* In this file value profile based optimizations are placed. Currently the
following optimizations are implemented (for more detailed descriptions
static int
visit_hist (void **slot, void *data)
{
- struct pointer_set_t *visited = (struct pointer_set_t *) data;
+ hash_set<histogram_value> *visited = (hash_set<histogram_value> *) data;
histogram_value hist = *(histogram_value *) slot;
- if (!pointer_set_contains (visited, hist)
+ if (!visited->contains (hist)
&& hist->type != HIST_TYPE_TIME_PROFILE)
{
error ("dead histogram");
basic_block bb;
gimple_stmt_iterator gsi;
histogram_value hist;
- struct pointer_set_t *visited_hists;
error_found = false;
- visited_hists = pointer_set_create ();
+ hash_set<histogram_value> visited_hists;
FOR_EACH_BB_FN (bb, cfun)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
dump_histogram_value (stderr, hist);
error_found = true;
}
- pointer_set_insert (visited_hists, hist);
+ visited_hists.add (hist);
}
}
if (VALUE_HISTOGRAMS (cfun))
- htab_traverse (VALUE_HISTOGRAMS (cfun), visit_hist, visited_hists);
- pointer_set_destroy (visited_hists);
+ htab_traverse (VALUE_HISTOGRAMS (cfun), visit_hist, &visited_hists);
if (error_found)
internal_error ("verify_histograms failed");
}
#include "common/common-target.h"
#include "targhooks.h"
#include "cgraph.h"
-#include "pointer-set.h"
+#include "hash-set.h"
#include "asan.h"
#include "basic-block.h"
/* Avoid O(external_decls**2) lookups in the pending_assemble_externals
TREE_LIST in assemble_external. */
-static struct pointer_set_t *pending_assemble_externals_set;
+static hash_set<tree> *pending_assemble_externals_set;
/* True if DECL is a function decl for which no out-of-line copy exists.
It is assumed that DECL's assembler name has been set. */
pending_assemble_externals = 0;
pending_assemble_externals_processed = true;
- pointer_set_destroy (pending_assemble_externals_set);
+ delete pending_assemble_externals_set;
#endif
}
return;
}
- if (! pointer_set_insert (pending_assemble_externals_set, decl))
+ if (! pending_assemble_externals_set->add (decl))
pending_assemble_externals = tree_cons (NULL, decl,
pending_assemble_externals);
#endif
readonly_data_section = text_section;
#ifdef ASM_OUTPUT_EXTERNAL
- pending_assemble_externals_set = pointer_set_create ();
+ pending_assemble_externals_set = new hash_set<tree>;
#endif
}
#include "tree-ssa-alias.h"
#include "gimple.h"
#include "lto-streamer.h"
+#include "hash-set.h"
const char * const tls_model_names[]={"none", "tls-emulated", "tls-real",
"tls-global-dynamic", "tls-local-dynamic",
varpool_node *first = (varpool_node *)(void *)1;
int i;
struct ipa_ref *ref = NULL;
- struct pointer_set_t *referenced = pointer_set_create ();
+ hash_set<varpool_node *> referenced;
if (seen_error ())
return;
&& vnode->analyzed)
enqueue_node (vnode, &first);
else
- pointer_set_insert (referenced, node);
+ referenced.add (node);
}
}
if (cgraph_dump_file)
{
if (cgraph_dump_file)
fprintf (cgraph_dump_file, " %s", node->asm_name ());
- if (pointer_set_contains (referenced, node))
+ if (referenced.contains (node))
node->remove_initializer ();
else
node->remove ();
}
}
- pointer_set_destroy (referenced);
+
if (cgraph_dump_file)
fprintf (cgraph_dump_file, "\n");
}