+2014-08-07 Trevor Saunders <tsaunders@mozilla.com>
+
+ * hash-map.h (default_hashmap_traits): Adjust overloads of hash
+ function to not conflict.
+ * alias.c, cfgexpand.c, dse.c, except.h, gimple-expr.c,
+ gimple-ssa-strength-reduction.c, gimple-ssa.h, ifcvt.c,
+ lto-streamer-out.c, lto-streamer.h, tree-affine.c, tree-affine.h,
+ tree-predcom.c, tree-scalar-evolution.c, tree-ssa-loop-im.c,
+ tree-ssa-loop-niter.c, tree-ssa.c, value-prof.c: Use hash_map instead
+ of pointer_map.
+
2014-08-07 Marek Polacek <polacek@redhat.com>
* fold-const.c (fold_binary_loc): Add folding of
&& ! is_global_var (base)
&& cfun->gimple_df->decls_to_pointers != NULL)
{
- void *namep;
- namep = pointer_map_contains (cfun->gimple_df->decls_to_pointers, base);
+ tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
if (namep)
- ref->base = build_simple_mem_ref (*(tree *)namep);
+ ref->base = build_simple_mem_ref (*namep);
}
ref->ref_alias_set = MEM_ALIAS_SET (mem);
static struct stack_var *stack_vars;
static size_t stack_vars_alloc;
static size_t stack_vars_num;
-static struct pointer_map_t *decl_to_stack_part;
+static hash_map<tree, size_t> *decl_to_stack_part;
/* Conflict bitmaps go on this obstack. This allows us to destroy
all of them in one big sweep. */
= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
}
if (!decl_to_stack_part)
- decl_to_stack_part = pointer_map_create ();
+ decl_to_stack_part = new hash_map<tree, size_t>;
v = &stack_vars[stack_vars_num];
- * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
+ decl_to_stack_part->put (decl, stack_vars_num);
v->decl = decl;
v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
&& DECL_P (op)
&& DECL_RTL_IF_SET (op) == pc_rtx)
{
- size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
+ size_t *v = decl_to_stack_part->get (op);
if (v)
bitmap_set_bit (active, *v);
}
&& DECL_P (op)
&& DECL_RTL_IF_SET (op) == pc_rtx)
{
- size_t *v =
- (size_t *) pointer_map_contains (decl_to_stack_part, op);
+ size_t *v = decl_to_stack_part->get (op);
if (v && bitmap_set_bit (active, *v))
{
size_t num = *v;
if (TREE_CODE (lhs) != VAR_DECL)
continue;
if (DECL_RTL_IF_SET (lhs) == pc_rtx
- && (v = (size_t *)
- pointer_map_contains (decl_to_stack_part, lhs)))
+ && (v = decl_to_stack_part->get (lhs)))
bitmap_clear_bit (work, *v);
}
else if (!is_gimple_debug (stmt))
return 0;
}
+struct part_traits : default_hashmap_traits
+{
+ template<typename T>
+ static bool
+ is_deleted (T &e)
+ { return e.m_value == reinterpret_cast<void *> (1); }
+
+ template<typename T> static bool is_empty (T &e) { return e.m_value == NULL; }
+ template<typename T>
+ static void
+ mark_deleted (T &e)
+ { e.m_value = reinterpret_cast<T> (1); }
+
+ template<typename T>
+ static void
+ mark_empty (T &e)
+ { e.m_value = NULL; }
+};
+
+typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
/* If the points-to solution *PI points to variables that are in a partition
together with other variables add all partition members to the pointed-to
static void
add_partitioned_vars_to_ptset (struct pt_solution *pt,
- struct pointer_map_t *decls_to_partitions,
+ part_hashmap *decls_to_partitions,
hash_set<bitmap> *visited, bitmap temp)
{
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
if ((!temp
|| !bitmap_bit_p (temp, i))
- && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
- (void *)(size_t) i)))
+ && (part = decls_to_partitions->get (i)))
bitmap_ior_into (temp, *part);
if (!bitmap_empty_p (temp))
bitmap_ior_into (pt->vars, temp);
static void
update_alias_info_with_stack_vars (void)
{
- struct pointer_map_t *decls_to_partitions = NULL;
+ part_hashmap *decls_to_partitions = NULL;
size_t i, j;
tree var = NULL_TREE;
if (!decls_to_partitions)
{
- decls_to_partitions = pointer_map_create ();
- cfun->gimple_df->decls_to_pointers = pointer_map_create ();
+ decls_to_partitions = new part_hashmap;
+ cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
}
/* Create an SSA_NAME that points to the partition for use
tree decl = stack_vars[j].decl;
unsigned int uid = DECL_PT_UID (decl);
bitmap_set_bit (part, uid);
- *((bitmap *) pointer_map_insert (decls_to_partitions,
- (void *)(size_t) uid)) = part;
- *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
- decl)) = name;
+ decls_to_partitions->put (uid, part);
+ cfun->gimple_df->decls_to_pointers->put (decl, name);
if (TREE_ADDRESSABLE (decl))
TREE_ADDRESSABLE (name) = 1;
}
add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
decls_to_partitions, &visited, temp);
- pointer_map_destroy (decls_to_partitions);
+ delete decls_to_partitions;
BITMAP_FREE (temp);
}
}
bitmap_obstack_initialize (&stack_var_bitmap_obstack);
/* A map from decl to stack partition. */
- decl_to_stack_part = pointer_map_create ();
+ decl_to_stack_part = new hash_map<tree, size_t>;
/* Initialize local stack smashing state. */
has_protected_decls = false;
stack_vars = NULL;
stack_vars_sorted = NULL;
stack_vars_alloc = stack_vars_num = 0;
- pointer_map_destroy (decl_to_stack_part);
+ delete decl_to_stack_part;
decl_to_stack_part = NULL;
}
tree var, outer_block = DECL_INITIAL (current_function_decl);
vec<tree> maybe_local_decls = vNULL;
rtx var_end_seq = NULL_RTX;
- struct pointer_map_t *ssa_name_decls;
unsigned i;
unsigned len;
bool gen_stack_protect_signal = false;
init_vars_expansion ();
- ssa_name_decls = pointer_map_create ();
+ hash_map<tree, tree> ssa_name_decls;
for (i = 0; i < SA.map->num_partitions; i++)
{
tree var = partition_to_var (SA.map, i);
we could have coalesced (those with the same type). */
if (SSA_NAME_VAR (var) == NULL_TREE)
{
- void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
+ tree *slot = &ssa_name_decls.get_or_insert (TREE_TYPE (var));
if (!*slot)
- *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
- replace_ssa_name_symbol (var, (tree) *slot);
+ *slot = create_tmp_reg (TREE_TYPE (var), NULL);
+ replace_ssa_name_symbol (var, *slot);
}
/* Always allocate space for partitions based on VAR_DECLs. But for
}
}
}
- pointer_map_destroy (ssa_name_decls);
if (flag_stack_protect == SPCT_FLAG_STRONG)
gen_stack_protect_signal
/* Maps the blocks that do not contain tree labels to rtx labels. */
-static struct pointer_map_t *lab_rtx_for_bb;
+static hash_map<basic_block, rtx> *lab_rtx_for_bb;
/* Returns the label_rtx expression for a label starting basic block BB. */
gimple_stmt_iterator gsi;
tree lab;
gimple lab_stmt;
- void **elt;
if (bb->flags & BB_RTL)
return block_label (bb);
- elt = pointer_map_contains (lab_rtx_for_bb, bb);
+ rtx *elt = lab_rtx_for_bb->get (bb);
if (elt)
- return (rtx) *elt;
+ return *elt;
/* Find the tree label if it is present. */
return label_rtx (lab);
}
- elt = pointer_map_insert (lab_rtx_for_bb, bb);
- *elt = gen_label_rtx ();
- return (rtx) *elt;
+ rtx l = gen_label_rtx ();
+ lab_rtx_for_bb->put (bb, l);
+ return l;
}
rtx note, last;
edge e;
edge_iterator ei;
- void **elt;
if (dump_file)
fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
stmt = NULL;
}
- elt = pointer_map_contains (lab_rtx_for_bb, bb);
+ rtx *elt = lab_rtx_for_bb->get (bb);
if (stmt || elt)
{
}
if (elt)
- emit_label ((rtx) *elt);
+ emit_label (*elt);
/* Java emits line number notes in the top of labels.
??? Make this go away once line number notes are obsoleted. */
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
e->flags &= ~EDGE_EXECUTABLE;
- lab_rtx_for_bb = pointer_map_create ();
+ lab_rtx_for_bb = new hash_map<basic_block, rtx>;
FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
next_bb)
bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
/* Expansion is used by optimization passes too, set maybe_hot_insn_p
conservatively to true until they are all profile aware. */
- pointer_map_destroy (lab_rtx_for_bb);
+ delete lab_rtx_for_bb;
free_histograms ();
construct_exit_block ();
+2014-08-07 Trevor Saunders <tsaunders@mozilla.com>
+
+ * cp-tree.h, pt.c: Use hash_map instead of pointer_map.
+
2014-08-06 Jason Merrill <jason@redhat.com>
* init.c (build_vec_init): Fix constant initialization of
#include "function.h"
#include "hashtab.h"
#include "vec.h"
+#include "hash-map.h"
/* In order for the format checking to accept the C++ front end
diagnostic framework extensions, you must include this file before
cp_binding_level *class_bindings;
cp_binding_level *bindings;
- struct pointer_map_t *x_local_specializations;
+ hash_map<tree, tree> *GTY((skip)) x_local_specializations;
struct saved_scope *prev;
};
static tree
retrieve_local_specialization (tree tmpl)
{
- void **slot;
-
if (local_specializations == NULL)
return NULL_TREE;
- slot = pointer_map_contains (local_specializations, tmpl);
- return slot ? (tree) *slot : NULL_TREE;
+ tree *slot = local_specializations->get (tmpl);
+ return slot ? *slot : NULL_TREE;
}
/* Returns nonzero iff DECL is a specialization of TMPL. */
static void
register_local_specialization (tree spec, tree tmpl)
{
- void **slot;
-
- slot = pointer_map_insert (local_specializations, tmpl);
- *slot = spec;
+ local_specializations->put (tmpl, spec);
}
/* TYPE is a class type. Returns true if TYPE is an explicitly
bool unsubstituted_packs = false;
int i, len = -1;
tree result;
- struct pointer_map_t *saved_local_specializations = NULL;
+ hash_map<tree, tree> *saved_local_specializations = NULL;
bool need_local_specializations = false;
int levels;
case of recursive unification) might have bindings that we don't
want to use or alter. */
saved_local_specializations = local_specializations;
- local_specializations = pointer_map_create ();
+ local_specializations = new hash_map<tree, tree>;
}
/* For each argument in each argument pack, substitute into the
if (need_local_specializations)
{
- pointer_map_destroy (local_specializations);
+ delete local_specializations;
local_specializations = saved_local_specializations;
}
synthesize_method (d);
else if (TREE_CODE (d) == FUNCTION_DECL)
{
- struct pointer_map_t *saved_local_specializations;
+ hash_map<tree, tree> *saved_local_specializations;
tree subst_decl;
tree tmpl_parm;
tree spec_parm;
saved_local_specializations = local_specializations;
/* Set up the list of local specializations. */
- local_specializations = pointer_map_create ();
+ local_specializations = new hash_map<tree, tree>;
/* Set up context. */
if (DECL_OMP_DECLARE_REDUCTION_P (code_pattern)
}
/* We don't need the local specializations any more. */
- pointer_map_destroy (local_specializations);
+ delete local_specializations;
local_specializations = saved_local_specializations;
/* Finish the function. */
of the escape analysis. */
if (cfun->gimple_df->decls_to_pointers != NULL)
{
- void *namep
- = pointer_map_contains (cfun->gimple_df->decls_to_pointers, decl);
+ tree *namep = cfun->gimple_df->decls_to_pointers->get (decl);
if (namep)
- return TREE_ADDRESSABLE (*(tree *)namep);
+ return TREE_ADDRESSABLE (*namep);
}
return false;
struct function;
struct eh_region_d;
-struct pointer_map_t;
/* The type of an exception region. */
enum eh_region_type
&& cfun->gimple_df != NULL
&& cfun->gimple_df->decls_to_pointers != NULL)
{
- void *namep
- = pointer_map_contains (cfun->gimple_df->decls_to_pointers, x);
+ tree *namep = cfun->gimple_df->decls_to_pointers->get (x);
if (namep)
- TREE_ADDRESSABLE (*(tree *)namep) = 1;
+ TREE_ADDRESSABLE (*namep) = 1;
}
}
static hash_table<cand_chain_hasher> *base_cand_map;
\f
/* Pointer map used by tree_to_aff_combination_expand. */
-static struct pointer_map_t *name_expansions;
+static hash_map<tree, name_expansion *> *name_expansions;
/* Pointer map embodying a mapping from bases to alternative bases. */
static hash_map<tree, tree> *alt_base_map;
#ifndef GCC_GIMPLE_SSA_H
#define GCC_GIMPLE_SSA_H
+#include "hash-map.h"
#include "tree-ssa-operands.h"
/* This structure is used to map a gimple statement to a label,
/* A map of decls to artificial ssa-names that point to the partition
of the decl. */
- struct pointer_map_t * GTY((skip(""))) decls_to_pointers;
+ hash_map<tree, tree> * GTY((skip(""))) decls_to_pointers;
/* Free list of SSA_NAMEs. */
vec<tree, va_gc> *free_ssanames;
return uintptr_t(p) >> 3;
}
- /* The right thing to do here would be using is_integral to only allow
- template arguments of integer type, but reimplementing that is a pain, so
- we'll just promote everything to [u]int64_t and truncate to hashval_t. */
+ /* If the value converts to hashval_t just use it. */
- static hashval_t hash (uint64_t v) { return v; }
- static hashval_t hash (int64_t v) { return v; }
+ template<typename T> static hashval_t hash (T v) { return v; }
/* Return true if the two keys passed as arguments are equal. */
static int
check_cond_move_block (basic_block bb,
- struct pointer_map_t *vals,
+ hash_map<rtx, rtx> *vals,
vec<rtx> *regs,
rtx cond)
{
FOR_BB_INSNS (bb, insn)
{
rtx set, dest, src;
- void **slot;
if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
continue;
/* Don't try to handle this if the source register was
modified earlier in the block. */
if ((REG_P (src)
- && pointer_map_contains (vals, src))
+ && vals->get (src))
|| (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
- && pointer_map_contains (vals, SUBREG_REG (src))))
+ && vals->get (SUBREG_REG (src))))
return FALSE;
/* Don't try to handle this if the destination register was
modified earlier in the block. */
- if (pointer_map_contains (vals, dest))
+ if (vals->get (dest))
return FALSE;
/* Don't try to handle this if the condition uses the
&& modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
return FALSE;
- slot = pointer_map_insert (vals, (void *) dest);
- *slot = (void *) src;
+ vals->put (dest, src);
regs->safe_push (dest);
}
static bool
cond_move_convert_if_block (struct noce_if_info *if_infop,
basic_block bb, rtx cond,
- struct pointer_map_t *then_vals,
- struct pointer_map_t *else_vals,
+ hash_map<rtx, rtx> *then_vals,
+ hash_map<rtx, rtx> *else_vals,
bool else_block_p)
{
enum rtx_code code;
FOR_BB_INSNS (bb, insn)
{
rtx set, target, dest, t, e;
- void **then_slot, **else_slot;
/* ??? Maybe emit conditional debug insn? */
if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
dest = SET_DEST (set);
- then_slot = pointer_map_contains (then_vals, dest);
- else_slot = pointer_map_contains (else_vals, dest);
- t = then_slot ? (rtx) *then_slot : NULL_RTX;
- e = else_slot ? (rtx) *else_slot : NULL_RTX;
+ rtx *then_slot = then_vals->get (dest);
+ rtx *else_slot = else_vals->get (dest);
+ t = then_slot ? *then_slot : NULL_RTX;
+ e = else_slot ? *else_slot : NULL_RTX;
if (else_block_p)
{
rtx seq, loc_insn;
rtx reg;
int c;
- struct pointer_map_t *then_vals;
- struct pointer_map_t *else_vals;
vec<rtx> then_regs = vNULL;
vec<rtx> else_regs = vNULL;
unsigned int i;
/* Build a mapping for each block to the value used for each
register. */
- then_vals = pointer_map_create ();
- else_vals = pointer_map_create ();
+ hash_map<rtx, rtx> then_vals;
+ hash_map<rtx, rtx> else_vals;
/* Make sure the blocks are suitable. */
- if (!check_cond_move_block (then_bb, then_vals, &then_regs, cond)
+ if (!check_cond_move_block (then_bb, &then_vals, &then_regs, cond)
|| (else_bb
- && !check_cond_move_block (else_bb, else_vals, &else_regs, cond)))
+ && !check_cond_move_block (else_bb, &else_vals, &else_regs, cond)))
goto done;
/* Make sure the blocks can be used together. If the same register
c = 0;
FOR_EACH_VEC_ELT (then_regs, i, reg)
{
- void **then_slot = pointer_map_contains (then_vals, reg);
- void **else_slot = pointer_map_contains (else_vals, reg);
+ rtx *then_slot = then_vals.get (reg);
+ rtx *else_slot = else_vals.get (reg);
gcc_checking_assert (then_slot);
if (!else_slot)
++c;
else
{
- rtx then_val = (rtx) *then_slot;
- rtx else_val = (rtx) *else_slot;
+ rtx then_val = *then_slot;
+ rtx else_val = *else_slot;
if (!CONSTANT_P (then_val) && !CONSTANT_P (else_val)
&& !rtx_equal_p (then_val, else_val))
goto done;
/* Finish off c for MAX_CONDITIONAL_EXECUTE. */
FOR_EACH_VEC_ELT (else_regs, i, reg)
{
- gcc_checking_assert (pointer_map_contains (else_vals, reg));
- if (!pointer_map_contains (then_vals, reg))
+ gcc_checking_assert (else_vals.get (reg));
+ if (!then_vals.get (reg))
++c;
}
then do anything left in the else blocks. */
start_sequence ();
if (!cond_move_convert_if_block (if_info, then_bb, cond,
- then_vals, else_vals, false)
+ &then_vals, &else_vals, false)
|| (else_bb
&& !cond_move_convert_if_block (if_info, else_bb, cond,
- then_vals, else_vals, true)))
+ &then_vals, &else_vals, true)))
{
end_sequence ();
goto done;
success_p = TRUE;
done:
- pointer_map_destroy (then_vals);
- pointer_map_destroy (else_vals);
then_regs.release ();
else_regs.release ();
return success_p;
hash_scc (struct output_block *ob, unsigned first, unsigned size);
unsigned int next_dfs_num;
- struct pointer_map_t *sccstate;
+ hash_map<tree, sccs *> sccstate;
struct obstack sccstate_obstack;
};
bool single_p)
{
sccstack.create (0);
- sccstate = pointer_map_create ();
gcc_obstack_init (&sccstate_obstack);
next_dfs_num = 1;
DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p, single_p);
DFS::~DFS ()
{
sccstack.release ();
- pointer_map_destroy (sccstate);
obstack_free (&sccstate_obstack, NULL);
}
tree expr, bool ref_p, bool this_ref_p, bool single_p)
{
unsigned ix;
- sccs **slot;
/* Handle special cases. */
if (expr == NULL_TREE)
if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
return;
- slot = (sccs **)pointer_map_insert (sccstate, expr);
+ sccs **slot = &sccstate.get_or_insert (expr);
sccs *cstate = *slot;
if (!cstate)
{
struct gcov_ctr_summary GTY((skip)) profile_info;
/* Map assigning declarations their resolutions. */
- pointer_map_t * GTY((skip)) resolution_map;
+ hash_map<tree, ld_plugin_symbol_resolution> * GTY((skip)) resolution_map;
};
typedef struct lto_file_decl_data *lto_file_decl_data_ptr;
+2014-08-07 Trevor Saunders <tsaunders@mozilla.com>
+
+ * lto-partition.c, lto.c: Use hash_map instead of pointer_map.
+
2014-08-02 Trevor Saunders <tsaunders@mozilla.com>
* lto-partition.c, lto-partition.h: Use hash_set instead of
{
symtab_node *node;
struct lto_file_decl_data *file_data;
- struct pointer_map_t *pmap;
+ hash_map<lto_file_decl_data *, ltrans_partition> pmap;
ltrans_partition partition;
- void **slot;
int npartitions = 0;
- pmap = pointer_map_create ();
-
FOR_EACH_SYMBOL (node)
{
if (node->get_partitioning_class () != SYMBOL_PARTITION
if (file_data)
{
- slot = pointer_map_contains (pmap, file_data);
- if (slot)
- partition = (ltrans_partition) *slot;
+ ltrans_partition *slot = &pmap.get_or_insert (file_data);
+ if (*slot)
+ partition = *slot;
else
{
partition = new_partition (file_data->file_name);
- slot = pointer_map_insert (pmap, file_data);
*slot = partition;
npartitions++;
}
else
{
partition = new_partition ("");
- slot = pointer_map_insert (pmap, NULL);
- *slot = partition;
+ pmap.put (NULL, partition);
npartitions++;
}
if (!npartitions)
new_partition ("empty");
- pointer_map_destroy (pmap);
-
}
/* Maximal partitioning. Put every new symbol into new partition if possible. */
if (resolution == LDPR_UNKNOWN)
return;
if (!file_data->resolution_map)
- file_data->resolution_map = pointer_map_create ();
- *pointer_map_insert (file_data->resolution_map, decl) = (void *)(size_t)resolution;
+ file_data->resolution_map
+ = new hash_map<tree, ld_plugin_symbol_resolution>;
+ file_data->resolution_map->put (decl, resolution);
}
/* Register DECL with the global symbol table and change its
FILE *resolution;
int count = 0;
struct lto_file_decl_data **decl_data;
- void **res;
symtab_node *snode;
init_cgraph ();
/* Store resolutions into the symbol table. */
+ ld_plugin_symbol_resolution_t *res;
FOR_EACH_SYMBOL (snode)
if (snode->real_symbol_p ()
&& snode->lto_file_data
&& snode->lto_file_data->resolution_map
- && (res = pointer_map_contains (snode->lto_file_data->resolution_map,
- snode->decl)))
- snode->resolution
- = (enum ld_plugin_symbol_resolution)(size_t)*res;
+ && (res = snode->lto_file_data->resolution_map->get (snode->decl)))
+ snode->resolution = *res;
for (i = 0; all_file_decl_data[i]; i++)
if (all_file_decl_data[i]->resolution_map)
{
- pointer_map_destroy (all_file_decl_data[i]->resolution_map);
+ delete all_file_decl_data[i]->resolution_map;
all_file_decl_data[i]->resolution_map = NULL;
}
void
aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
- struct pointer_map_t **cache ATTRIBUTE_UNUSED)
+ hash_map<tree, name_expansion *> **cache)
{
unsigned i;
aff_tree to_add, current, curre;
tree e, rhs;
gimple def;
widest_int scale;
- void **slot;
struct name_expansion *exp;
aff_combination_zero (&to_add, comb->type);
continue;
if (!*cache)
- *cache = pointer_map_create ();
- slot = pointer_map_insert (*cache, e);
- exp = (struct name_expansion *) *slot;
+ *cache = new hash_map<tree, name_expansion *>;
+ name_expansion **slot = &(*cache)->get_or_insert (e);
+ exp = *slot;
if (!exp)
{
void
tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
- struct pointer_map_t **cache)
+ hash_map<tree, name_expansion *> **cache)
{
tree_to_aff_combination (expr, type, comb);
aff_combination_expand (comb, cache);
}
/* Frees memory occupied by struct name_expansion in *VALUE. Callback for
- pointer_map_traverse. */
+ hash_map::traverse. */
-static bool
-free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
- void *data ATTRIBUTE_UNUSED)
+bool
+free_name_expansion (tree const &, name_expansion **value, void *)
{
- struct name_expansion *const exp = (struct name_expansion *) *value;
-
- free (exp);
+ free (*value);
return true;
}
tree_to_aff_combination_expand. */
void
-free_affine_expand_cache (struct pointer_map_t **cache)
+free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
{
if (!*cache)
return;
- pointer_map_traverse (*cache, free_name_expansion, NULL);
- pointer_map_destroy (*cache);
+ (*cache)->traverse<void *, free_name_expansion> (NULL);
+ delete (*cache);
*cache = NULL;
}
/* Affine combination of trees. We keep track of at most MAX_AFF_ELTS elements
to make things simpler; this is sufficient in most cases. */
+#include "hash-map.h"
#include "wide-int.h"
#define MAX_AFF_ELTS 8
tree rest;
};
+struct name_expansion;
+
widest_int wide_int_ext_for_comb (const widest_int &, aff_tree *);
void aff_combination_const (aff_tree *, tree, const widest_int &);
void aff_combination_elt (aff_tree *, tree, tree);
tree aff_combination_to_tree (aff_tree *);
void unshare_aff_combination (aff_tree *);
bool aff_combination_constant_multiple_p (aff_tree *, aff_tree *, widest_int *);
-void aff_combination_expand (aff_tree *, struct pointer_map_t **);
+void aff_combination_expand (aff_tree *, hash_map<tree, name_expansion *> **);
void tree_to_aff_combination_expand (tree, tree, aff_tree *,
- struct pointer_map_t **);
+ hash_map<tree, name_expansion *> **);
tree get_inner_reference_aff (tree, aff_tree *, widest_int *);
-void free_affine_expand_cache (struct pointer_map_t **);
+void free_affine_expand_cache (hash_map<tree, name_expansion *> **);
bool aff_comb_cannot_overlap_p (aff_tree *, const widest_int &,
const widest_int &);
/* Cache used by tree_to_aff_combination_expand. */
-static struct pointer_map_t *name_expansions;
+static hash_map<tree, name_expansion *> *name_expansions;
/* Dumps data reference REF to FILE. */
{
aff_tree aff1, aff2;
tree ev, left, right, type, step_val;
- pointer_map_t *peeled_chrec_map = NULL;
+ hash_map<tree, name_expansion *> *peeled_chrec_map = NULL;
ev = instantiate_parameters (loop, analyze_scalar_evolution (loop, arg));
if (ev == NULL_TREE || TREE_CODE (ev) != POLYNOMIAL_CHREC)
vec<bitmap_head> all_refs_stored_in_loop;
/* Cache for expanding memory addresses. */
- struct pointer_map_t *ttae_cache;
+ hash_map<tree, name_expansion *> *ttae_cache;
} memory_accesses;
/* Obstack for the bitmaps in the above data structures. */
static bool
mem_refs_may_alias_p (mem_ref_p mem1, mem_ref_p mem2,
- struct pointer_map_t **ttae_cache)
+ hash_map<tree, name_expansion *> **ttae_cache)
{
/* Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same
object and their offset differ in such a way that the locations cannot
static void
discover_iteration_bound_by_body_walk (struct loop *loop)
{
- pointer_map_t *bb_bounds;
struct nb_iter_bound *elt;
vec<widest_int> bounds = vNULL;
vec<vec<basic_block> > queues = vNULL;
vec<basic_block> queue = vNULL;
ptrdiff_t queue_index;
ptrdiff_t latch_index = 0;
- pointer_map_t *block_priority;
/* Discover what bounds may interest us. */
for (elt = loop->bounds; elt; elt = elt->next)
/* For every basic block record the lowest bound that is guaranteed to
terminate the loop. */
- bb_bounds = pointer_map_create ();
+ hash_map<basic_block, ptrdiff_t> bb_bounds;
for (elt = loop->bounds; elt; elt = elt->next)
{
widest_int bound = elt->bound;
|| wi::ltu_p (bound, loop->nb_iterations_upper_bound))
{
ptrdiff_t index = bound_index (bounds, bound);
- void **entry = pointer_map_contains (bb_bounds,
- gimple_bb (elt->stmt));
+ ptrdiff_t *entry = bb_bounds.get (gimple_bb (elt->stmt));
if (!entry)
- *pointer_map_insert (bb_bounds,
- gimple_bb (elt->stmt)) = (void *)index;
+ bb_bounds.put (gimple_bb (elt->stmt), index);
else if ((ptrdiff_t)*entry > index)
- *entry = (void *)index;
+ *entry = index;
}
}
- block_priority = pointer_map_create ();
+ hash_map<basic_block, ptrdiff_t> block_priority;
/* Perform shortest path discovery loop->header ... loop->latch.
queues.safe_grow_cleared (queue_index + 1);
queue.safe_push (loop->header);
queues[queue_index] = queue;
- *pointer_map_insert (block_priority, loop->header) = (void *)queue_index;
+ block_priority.put (loop->header, queue_index);
for (; queue_index >= 0; queue_index--)
{
{
basic_block bb;
ptrdiff_t bound_index = queue_index;
- void **entry;
edge e;
edge_iterator ei;
bb = queue.pop ();
/* OK, we later inserted the BB with lower priority, skip it. */
- if ((ptrdiff_t)*pointer_map_contains (block_priority, bb) > queue_index)
+ if (*block_priority.get (bb) > queue_index)
continue;
/* See if we can improve the bound. */
- entry = pointer_map_contains (bb_bounds, bb);
- if (entry && (ptrdiff_t)*entry < bound_index)
- bound_index = (ptrdiff_t)*entry;
+ ptrdiff_t *entry = bb_bounds.get (bb);
+ if (entry && *entry < bound_index)
+ bound_index = *entry;
/* Insert succesors into the queue, watch for latch edge
and record greatest index we saw. */
FOR_EACH_EDGE (e, ei, bb->succs)
{
bool insert = false;
- void **entry;
if (loop_exit_edge_p (loop, e))
continue;
if (e == loop_latch_edge (loop)
&& latch_index < bound_index)
latch_index = bound_index;
- else if (!(entry = pointer_map_contains (block_priority, e->dest)))
+ else if (!(entry = block_priority.get (e->dest)))
{
insert = true;
- *pointer_map_insert (block_priority, e->dest) = (void *)bound_index;
+ block_priority.put (e->dest, bound_index);
}
- else if ((ptrdiff_t)*entry < bound_index)
+ else if (*entry < bound_index)
{
insert = true;
- *entry = (void *)bound_index;
+ *entry = bound_index;
}
if (insert)
queues.release ();
bounds.release ();
- pointer_map_destroy (bb_bounds);
- pointer_map_destroy (block_priority);
}
/* See if every path cross the loop goes through a statement that is known
cfun->gimple_df->default_defs = NULL;
pt_solution_reset (&cfun->gimple_df->escaped);
if (cfun->gimple_df->decls_to_pointers != NULL)
- pointer_map_destroy (cfun->gimple_df->decls_to_pointers);
+ delete cfun->gimple_df->decls_to_pointers;
cfun->gimple_df->decls_to_pointers = NULL;
cfun->gimple_df->modified_noreturn_calls = NULL;
cfun->gimple_df = NULL;
return true;
}
-static pointer_map_t *cgraph_node_map = 0;
+struct profile_id_traits : default_hashmap_traits
+{
+ template<typename T>
+ static bool
+ is_deleted (T &e)
+ {
+ return e.m_key == UINT_MAX;
+ }
+
+ template<typename T> static bool is_empty (T &e) { return e.m_key == 0; }
+ template<typename T> static void mark_deleted (T &e) { e.m_key = UINT_MAX; }
+ template<typename T> static void mark_empty (T &e) { e.m_key = 0; }
+};
+
+static hash_map<unsigned int, cgraph_node *, profile_id_traits> *
+cgraph_node_map = 0;
/* Returns true if node graph is initialized. This
is used to test if profile_id has been created
init_node_map (bool local)
{
struct cgraph_node *n;
- cgraph_node_map = pointer_map_create ();
+ cgraph_node_map
+ = new hash_map<unsigned int, cgraph_node *, profile_id_traits>;
FOR_EACH_DEFINED_FUNCTION (n)
if (n->has_gimple_body_p ())
{
- void **val;
+ cgraph_node **val;
if (local)
{
n->profile_id = coverage_compute_profile_id (n);
- while ((val = pointer_map_contains (cgraph_node_map,
- (void *)(size_t)n->profile_id))
+ while ((val = cgraph_node_map->get (n->profile_id))
|| !n->profile_id)
{
if (dump_file)
n->profile_id,
n->name (),
n->order,
- (*(symtab_node **)val)->name (),
- (*(symtab_node **)val)->order);
+ (*val)->name (),
+ (*val)->order);
n->profile_id = (n->profile_id + 1) & 0x7fffffff;
}
}
n->order);
continue;
}
- else if ((val = pointer_map_contains (cgraph_node_map,
- (void *)(size_t)n->profile_id)))
+ else if ((val = cgraph_node_map->get (n->profile_id)))
{
if (dump_file)
fprintf (dump_file,
*val = NULL;
continue;
}
- *pointer_map_insert (cgraph_node_map,
- (void *)(size_t)n->profile_id) = (void *)n;
+ cgraph_node_map->put (n->profile_id, n);
}
}
void
del_node_map (void)
{
- pointer_map_destroy (cgraph_node_map);
+ delete cgraph_node_map;
}
/* Return cgraph node for function with pid */
struct cgraph_node*
find_func_by_profile_id (int profile_id)
{
- void **val = pointer_map_contains (cgraph_node_map,
- (void *)(size_t)profile_id);
+ cgraph_node **val = cgraph_node_map->get (profile_id);
if (val)
- return (struct cgraph_node *)*val;
+ return *val;
else
return NULL;
}