+2012-10-29 Lawrence Crowl <crowl@google.com>
+
+ * sbitmap.h (sbitmap_copy): Rename bitmap_copy.
+ (sbitmap_copy_n): Rename bitmap_copy_n.
+ (sbitmap_equal): Rename bitmap_equal_p.
+ (sbitmap_empty_p): Rename bitmap_empty_p.
+ (sbitmap_range_empty_p): Rename bitmap_range_empty_p.
+ (sbitmap_zero): Rename bitmap_clear.
+ (sbitmap_ones): Rename bitmap_ones.
+ (sbitmap_vector_zero): Rename bitmap_vector_clear.
+ (sbitmap_vector_ones): Rename bitmap_vector_ones.
+ (sbitmap_not): Rename bitmap_not.
+ (sbitmap_a_and_b_cg): Commented out.
+ (sbitmap_a_and_b): Rename bitmap_and. Add bool return.
+ (sbitmap_difference): Rename bitmap_and_compl.
+ (sbitmap_a_or_b_cg): Commented out.
+ (sbitmap_a_or_b): Rename bitmap_xor. Add bool return.
+ (sbitmap_a_xor_b_cg): Commented out.
+ (sbitmap_a_xor_b): Rename bitmap_xor. Add bool return.
+ (sbitmap_a_and_b_or_c_cg): Rename bitmap_and_or.
+ (sbitmap_a_and_b_or_c): Commented out.
+ (sbitmap_a_or_b_and_c_cg): Rename bitmap_or_and.
+ (sbitmap_a_or_b_and_c): Commented out.
+ (sbitmap_union_of_diff_cg): Rename bitmap_ior_and_compl.
+ (sbitmap_union_of_diff): Commented out.
+ (dump_sbitmap): Rename dump_bitmap.
+ (dump_sbitmap_file): Rename dump_bitmap_file.
+ (debug_sbitmap): Rename debug_bitmap.
+ (dump_sbitmap_vector): Rename dump_bitmap_vector.
+ (sbitmap_first_set_bit): Rename bitmap_first_set_bit.
+ (sbitmap_last_set_bit): Rename bitmap_last_set_bit.
+ (sbitmap_a_subset_b_p): Rename bitmap_subset_p.
+ (sbitmap_any_common_bits): Rename bitmap_intersect_p.
+ (#define sbitmap_free): Reimplement as inline function.
+ (#define sbitmap_vector_free): Reimplement as inline function.
+ * bitmap.h (#define bitmap_zero): Remove as redundant.
+ (#define bitmap_empty_p): Reimplement as inline function.
+ (#define dump_bitmap): Reimplement as inline function.
+
2012-10-29 Jonathan Wakely <jwakely.gcc@gmail.com>
PR c++/54930
memset (new_reg_base_value, 0, maxreg * sizeof (rtx));
/* Wipe the reg_seen array clean. */
- sbitmap_zero (reg_seen);
+ bitmap_clear (reg_seen);
/* Mark all hard registers which may contain an address.
The stack, frame and argument pointers may contain an address.
extern bool bitmap_intersect_compl_p (const_bitmap, const_bitmap);
/* True if MAP is an empty bitmap. */
-#define bitmap_empty_p(MAP) (!(MAP)->first)
+inline bool bitmap_empty_p (const_bitmap map)
+{
+ return !map->first;
+}
/* True if the bitmap has only a single bit set. */
extern bool bitmap_single_bit_set_p (const_bitmap);
extern void bitmap_obstack_free (bitmap);
/* A few compatibility/functions macros for compatibility with sbitmaps */
-#define dump_bitmap(file, bitmap) bitmap_print (file, bitmap, "", "\n")
-#define bitmap_zero(a) bitmap_clear (a)
+inline void dump_bitmap (FILE *file, const_bitmap map)
+{
+ bitmap_print (file, map, "", "\n");
+}
+
extern unsigned bitmap_first_set_bit (const_bitmap);
extern unsigned bitmap_last_set_bit (const_bitmap);
note_other_use_this_block (regno, info->users_this_bb);
SET_HARD_REG_BIT (info->btrs_written_in_block, regno);
SET_HARD_REG_BIT (info->btrs_live_in_block, regno);
- sbitmap_difference (info->bb_gen, info->bb_gen,
+ bitmap_and_compl (info->bb_gen, info->bb_gen,
info->btr_defset[regno - first_btr]);
}
}
btr_def_group all_btr_def_groups = NULL;
defs_uses_info info;
- sbitmap_vector_zero (bb_gen, last_basic_block);
+ bitmap_vector_clear (bb_gen, last_basic_block);
for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
{
basic_block bb = BASIC_BLOCK (i);
def_array[insn_uid] = def;
SET_HARD_REG_BIT (info.btrs_written_in_block, regno);
SET_HARD_REG_BIT (info.btrs_live_in_block, regno);
- sbitmap_difference (bb_gen[i], bb_gen[i],
+ bitmap_and_compl (bb_gen[i], bb_gen[i],
btr_defset[regno - first_btr]);
SET_BIT (bb_gen[i], insn_uid);
def->next_this_bb = defs_this_bb;
user->other_use_this_block = 1;
IOR_HARD_REG_SET (info.btrs_written_in_block, all_btrs);
IOR_HARD_REG_SET (info.btrs_live_in_block, all_btrs);
- sbitmap_zero (info.bb_gen);
+ bitmap_clear (info.bb_gen);
}
else
{
/* For each basic block, form the set BB_KILL - the set
of definitions that the block kills. */
- sbitmap_vector_zero (bb_kill, last_basic_block);
+ bitmap_vector_clear (bb_kill, last_basic_block);
for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
{
for (regno = first_btr; regno <= last_btr; regno++)
if (TEST_HARD_REG_BIT (all_btrs, regno)
&& TEST_HARD_REG_BIT (btrs_written[i], regno))
- sbitmap_a_or_b (bb_kill[i], bb_kill[i],
+ bitmap_ior (bb_kill[i], bb_kill[i],
btr_defset[regno - first_btr]);
}
}
sbitmap bb_in = sbitmap_alloc (max_uid);
for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
- sbitmap_copy (bb_out[i], bb_gen[i]);
+ bitmap_copy (bb_out[i], bb_gen[i]);
changed = 1;
while (changed)
for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
{
sbitmap_union_of_preds (bb_in, bb_out, BASIC_BLOCK (i));
- changed |= sbitmap_union_of_diff_cg (bb_out[i], bb_gen[i],
+ changed |= bitmap_ior_and_compl (bb_out[i], bb_gen[i],
bb_in, bb_kill[i]);
}
}
{
/* Remove all reaching defs of regno except
for this one. */
- sbitmap_difference (reaching_defs, reaching_defs,
+ bitmap_and_compl (reaching_defs, reaching_defs,
btr_defset[def->btr - first_btr]);
SET_BIT(reaching_defs, insn_uid);
}
sbitmap_iterator sbi;
if (user->use)
- sbitmap_a_and_b (
+ bitmap_and (
reaching_defs_of_reg,
reaching_defs,
btr_defset[REGNO (user->use) - first_btr]);
{
int reg;
- sbitmap_zero (reaching_defs_of_reg);
+ bitmap_clear (reaching_defs_of_reg);
for (reg = first_btr; reg <= last_btr; reg++)
if (TEST_HARD_REG_BIT (all_btrs, reg)
&& refers_to_regno_p (reg, reg + 1, user->insn,
NULL))
- sbitmap_a_or_b_and_c (reaching_defs_of_reg,
+ bitmap_or_and (reaching_defs_of_reg,
reaching_defs_of_reg,
reaching_defs,
btr_defset[reg - first_btr]);
for (regno = first_btr; regno <= last_btr; regno++)
if (TEST_HARD_REG_BIT (all_btrs, regno)
&& TEST_HARD_REG_BIT (call_used_reg_set, regno))
- sbitmap_difference (reaching_defs, reaching_defs,
+ bitmap_and_compl (reaching_defs, reaching_defs,
btr_defset[regno - first_btr]);
}
}
sbitmap *bb_kill;
sbitmap *bb_out;
- sbitmap_vector_zero (btr_defset, (last_btr - first_btr) + 1);
+ bitmap_vector_clear (btr_defset, (last_btr - first_btr) + 1);
compute_defs_uses_and_gen (all_btr_defs, def_array, use_array, btr_defset,
bb_gen, btrs_written);
HOST_WIDE_INT i;
rtx val;
- if (sbitmap_empty_p (stored_args_map))
+ if (bitmap_empty_p (stored_args_map))
return false;
val = internal_arg_pointer_based_exp (addr, true);
if (val == NULL_RTX)
= plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
#endif
stored_args_map = sbitmap_alloc (args_size.constant);
- sbitmap_zero (stored_args_map);
+ bitmap_clear (stored_args_map);
}
/* If we have no actual push instructions, or shouldn't use them,
visited = sbitmap_alloc (last_basic_block);
/* None of the nodes in the CFG have been visited yet. */
- sbitmap_zero (visited);
+ bitmap_clear (visited);
/* Push the first edge on to the stack. */
stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
visited = sbitmap_alloc (last_basic_block);
/* None of the nodes in the CFG have been visited yet. */
- sbitmap_zero (visited);
+ bitmap_clear (visited);
/* Push the first edge on to the stack. */
stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
dfs_find_deadend (basic_block bb)
{
sbitmap visited = sbitmap_alloc (last_basic_block);
- sbitmap_zero (visited);
+ bitmap_clear (visited);
for (;;)
{
visited = sbitmap_alloc (last_basic_block);
/* None of the nodes in the CFG have been visited yet. */
- sbitmap_zero (visited);
+ bitmap_clear (visited);
/* Put all blocks that have no successor into the initial work list. */
FOR_ALL_BB (bb)
visited = sbitmap_alloc (last_basic_block);
/* None of the nodes in the CFG have been visited yet. */
- sbitmap_zero (visited);
+ bitmap_clear (visited);
/* Push the first edge on to the stack. */
stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
data->visited_blocks = sbitmap_alloc (last_basic_block);
/* None of the nodes in the CFG have been visited yet. */
- sbitmap_zero (data->visited_blocks);
+ bitmap_clear (data->visited_blocks);
return;
}
{
visited = sbitmap_alloc (size);
- sbitmap_zero (visited);
+ bitmap_clear (visited);
v_size = size;
}
else if (v_size < size)
if (e->dest == EXIT_BLOCK_PTR)
continue;
- sbitmap_copy (dst, src[e->dest->index]);
+ bitmap_copy (dst, src[e->dest->index]);
break;
}
if (e == 0)
- sbitmap_ones (dst);
+ bitmap_ones (dst);
else
for (++ix; ix < EDGE_COUNT (b->succs); ix++)
{
if (e->src == ENTRY_BLOCK_PTR)
continue;
- sbitmap_copy (dst, src[e->src->index]);
+ bitmap_copy (dst, src[e->src->index]);
break;
}
if (e == 0)
- sbitmap_ones (dst);
+ bitmap_ones (dst);
else
for (++ix; ix < EDGE_COUNT (b->preds); ix++)
{
if (e->dest == EXIT_BLOCK_PTR)
continue;
- sbitmap_copy (dst, src[e->dest->index]);
+ bitmap_copy (dst, src[e->dest->index]);
break;
}
if (ix == EDGE_COUNT (b->succs))
- sbitmap_zero (dst);
+ bitmap_clear (dst);
else
for (ix++; ix < EDGE_COUNT (b->succs); ix++)
{
if (e->src== ENTRY_BLOCK_PTR)
continue;
- sbitmap_copy (dst, src[e->src->index]);
+ bitmap_copy (dst, src[e->src->index]);
break;
}
if (ix == EDGE_COUNT (b->preds))
- sbitmap_zero (dst);
+ bitmap_clear (dst);
else
for (ix++; ix < EDGE_COUNT (b->preds); ix++)
{
/* If we have an edge cache, cache edges going out of BB. */
if (edge_cache)
{
- sbitmap_zero (edge_cache);
+ bitmap_clear (edge_cache);
if (update_p)
{
FOR_EACH_EDGE (e, ei, bb->succs)
}
if (edge_cache)
- sbitmap_vector_free (edge_cache);
+ sbitmap_free (edge_cache);
}
\f
static void
}
blocks = sbitmap_alloc (last_basic_block);
- sbitmap_ones (blocks);
+ bitmap_ones (blocks);
find_many_sub_basic_blocks (blocks);
sbitmap_free (blocks);
purge_all_dead_edges ();
/* Count the number of loop headers. This should be the
same as the number of natural loops. */
headers = sbitmap_alloc (last_basic_block);
- sbitmap_zero (headers);
+ bitmap_clear (headers);
num_loops = 0;
FOR_EACH_BB (header)
}
free (bbs);
}
- sbitmap_zero (visited);
+ bitmap_clear (visited);
FOR_EACH_LOOP (li, loop, LI_FROM_INNERMOST)
{
bbs = get_loop_body (loop);
return;
in_queue = sbitmap_alloc (last_basic_block);
- sbitmap_zero (in_queue);
+ bitmap_clear (in_queue);
SET_BIT (in_queue, from->index);
/* Prevent us from going out of the base_loop. */
SET_BIT (in_queue, base_loop->header->index);
n_bord_bbs = 0;
bord_bbs = XNEWVEC (basic_block, n_basic_blocks);
seen = sbitmap_alloc (last_basic_block);
- sbitmap_zero (seen);
+ bitmap_clear (seen);
/* Find "border" hexes -- i.e. those with predecessor in removed path. */
for (i = 0; i < nrem; i++)
free (rem_bbs);
/* Find blocks whose dominators may be affected. */
- sbitmap_zero (seen);
+ bitmap_clear (seen);
for (i = 0; i < n_bord_bbs; i++)
{
basic_block ldom;
unsigned i;
seen = sbitmap_alloc (last_basic_block);
- sbitmap_zero (seen);
+ bitmap_clear (seen);
body = get_loop_body (loop);
for (i = 0; i < loop->num_nodes; i++)
basic_block bb;
superblocks = sbitmap_alloc (last_basic_block);
- sbitmap_zero (superblocks);
+ bitmap_clear (superblocks);
FOR_EACH_BB (bb)
if (bb->flags & BB_SUPERBLOCK)
todo = VEC_alloc (basic_block, heap, last_basic_block);
pushed = sbitmap_alloc (last_basic_block);
- sbitmap_zero (pushed);
+ bitmap_clear (pushed);
if (!finalize_fp_sets)
{
df_note_add_problem ();
visited = sbitmap_alloc (last_basic_block);
in_worklist = sbitmap_alloc (last_basic_block);
in_pending = sbitmap_alloc (last_basic_block);
- sbitmap_zero (in_worklist);
+ bitmap_clear (in_worklist);
/* Don't check outgoing edges of entry point. */
- sbitmap_ones (in_pending);
+ bitmap_ones (in_pending);
FOR_EACH_BB (bb)
if (BLOCK_INFO (bb)->processed)
RESET_BIT (in_pending, bb->index);
in_pending = in_worklist;
in_worklist = sbitmap_swap;
- sbitmap_zero (visited);
+ bitmap_clear (visited);
cfun->machine->rescan_vzeroupper_p = 0;
n_ready, 0);
data->ready_try_change_size = n_ready;
}
- sbitmap_zero (data->ready_try_change);
+ bitmap_clear (data->ready_try_change);
/* Filter out insns from ready_try that the core will not be able to issue
on current cycle due to decoder. */
unsigned int i = 0;
sbitmap_iterator sbi;
- gcc_assert (sbitmap_last_set_bit (data->ready_try_change) < n_ready);
+ gcc_assert (bitmap_last_set_bit (data->ready_try_change) < n_ready);
EXECUTE_IF_SET_IN_SBITMAP (data->ready_try_change, 0, i, sbi)
{
ready_try[i] = 0;
/* Bit X of PROTECTED_BBS is set if the last operation in basic block
X is protected by a cache barrier. */
protected_bbs = sbitmap_alloc (last_basic_block);
- sbitmap_zero (protected_bbs);
+ bitmap_clear (protected_bbs);
/* Iterate over the basic blocks in reverse post-order. */
rev_post_order = XNEWVEC (int, last_basic_block);
}
blocks = sbitmap_alloc (last_basic_block);
- sbitmap_zero (blocks);
+ bitmap_clear (blocks);
in_spu_reorg = 1;
compute_bb_for_insn ();
}
free (spu_bb_info);
- if (!sbitmap_empty_p (blocks))
+ if (!bitmap_empty_p (blocks))
find_many_sub_basic_blocks (blocks);
/* We have to schedule to make sure alignment is ok. */
unsigned int i;
/* Initialize the bitmaps that were passed in. */
- sbitmap_vector_zero (kill, last_basic_block);
- sbitmap_vector_zero (comp, last_basic_block);
+ bitmap_vector_clear (kill, last_basic_block);
+ bitmap_vector_clear (comp, last_basic_block);
for (i = 0; i < table->size; i++)
{
/* Set up the table of already visited basic blocks. */
cse_visited_basic_blocks = sbitmap_alloc (last_basic_block);
- sbitmap_zero (cse_visited_basic_blocks);
+ bitmap_clear (cse_visited_basic_blocks);
/* Loop over basic blocks in reverse completion order (RPO),
excluding the ENTRY and EXIT blocks. */
can_alter_cfg = true;
marked = sbitmap_alloc (get_max_uid () + 1);
- sbitmap_zero (marked);
+ bitmap_clear (marked);
}
/* So something was deleted that requires a redo. Do it on
the cheap. */
delete_unmarked_insns ();
- sbitmap_zero (marked);
+ bitmap_clear (marked);
bitmap_clear (processed);
bitmap_clear (redo_out);
g->nodes[i].cuid = i;
g->nodes[i].successors = sbitmap_alloc (num_nodes);
- sbitmap_zero (g->nodes[i].successors);
+ bitmap_clear (g->nodes[i].successors);
g->nodes[i].predecessors = sbitmap_alloc (num_nodes);
- sbitmap_zero (g->nodes[i].predecessors);
+ bitmap_clear (g->nodes[i].predecessors);
g->nodes[i].first_note = (first_note ? first_note : insn);
g->nodes[i++].insn = insn;
first_note = NULL_RTX;
scc->backarcs = NULL;
scc->num_backarcs = 0;
scc->nodes = sbitmap_alloc (g->num_nodes);
- sbitmap_copy (scc->nodes, nodes);
+ bitmap_copy (scc->nodes, nodes);
/* Mark the backarcs that belong to this SCC. */
EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, u, sbi)
EXECUTE_IF_SET_IN_SBITMAP (ops, 0, i, sbi)
{
const sbitmap node_succ = NODE_SUCCESSORS (&g->nodes[i]);
- sbitmap_a_or_b (succ, succ, node_succ);
+ bitmap_ior (succ, succ, node_succ);
};
/* We want those that are not in ops. */
- sbitmap_difference (succ, succ, ops);
+ bitmap_and_compl (succ, succ, ops);
}
/* Given a set OPS of nodes in the DDG, find the set of their predecessors
EXECUTE_IF_SET_IN_SBITMAP (ops, 0, i, sbi)
{
const sbitmap node_preds = NODE_PREDECESSORS (&g->nodes[i]);
- sbitmap_a_or_b (preds, preds, node_preds);
+ bitmap_ior (preds, preds, node_preds);
};
/* We want those that are not in ops. */
- sbitmap_difference (preds, preds, ops);
+ bitmap_and_compl (preds, preds, ops);
}
int i = 0;
sbitmap tmp = sbitmap_alloc (num_nodes);
- sbitmap_zero (tmp);
+ bitmap_clear (tmp);
for (i = 0; i < sccs->num_sccs; i++)
{
- gcc_assert (!sbitmap_empty_p (sccs->sccs[i]->nodes));
+ gcc_assert (!bitmap_empty_p (sccs->sccs[i]->nodes));
/* Verify that every node in sccs is in exactly one strongly
connected component. */
- gcc_assert (!sbitmap_any_common_bits (tmp, sccs->sccs[i]->nodes));
- sbitmap_a_or_b (tmp, tmp, sccs->sccs[i]->nodes);
+ gcc_assert (!bitmap_intersect_p (tmp, sccs->sccs[i]->nodes));
+ bitmap_ior (tmp, tmp, sccs->sccs[i]->nodes);
}
sbitmap_free (tmp);
}
if (backarc->aux.count == IN_SCC)
continue;
- sbitmap_zero (scc_nodes);
- sbitmap_zero (from);
- sbitmap_zero (to);
+ bitmap_clear (scc_nodes);
+ bitmap_clear (from);
+ bitmap_clear (to);
SET_BIT (from, dest->cuid);
SET_BIT (to, src->cuid);
sbitmap reach_to = sbitmap_alloc (num_nodes);
sbitmap tmp = sbitmap_alloc (num_nodes);
- sbitmap_copy (reachable_from, from);
- sbitmap_copy (tmp, from);
+ bitmap_copy (reachable_from, from);
+ bitmap_copy (tmp, from);
change = 1;
while (change)
{
change = 0;
- sbitmap_copy (workset, tmp);
- sbitmap_zero (tmp);
+ bitmap_copy (workset, tmp);
+ bitmap_clear (tmp);
EXECUTE_IF_SET_IN_SBITMAP (workset, 0, u, sbi)
{
ddg_edge_ptr e;
}
}
- sbitmap_copy (reach_to, to);
- sbitmap_copy (tmp, to);
+ bitmap_copy (reach_to, to);
+ bitmap_copy (tmp, to);
change = 1;
while (change)
{
change = 0;
- sbitmap_copy (workset, tmp);
- sbitmap_zero (tmp);
+ bitmap_copy (workset, tmp);
+ bitmap_clear (tmp);
EXECUTE_IF_SET_IN_SBITMAP (workset, 0, u, sbi)
{
ddg_edge_ptr e;
}
}
- answer = sbitmap_a_and_b_cg (result, reachable_from, reach_to);
+ answer = bitmap_and (result, reachable_from, reach_to);
sbitmap_free (workset);
sbitmap_free (reachable_from);
sbitmap_free (reach_to);
g->nodes[i].aux.count = -1;
g->nodes[src].aux.count = 0;
- sbitmap_zero (tmp);
+ bitmap_clear (tmp);
SET_BIT (tmp, src);
while (change)
sbitmap_iterator sbi;
change = 0;
- sbitmap_copy (workset, tmp);
- sbitmap_zero (tmp);
+ bitmap_copy (workset, tmp);
+ bitmap_clear (tmp);
EXECUTE_IF_SET_IN_SBITMAP (workset, 0, u, sbi)
{
ddg_node_ptr u_node = &g->nodes[u];
bbindex_to_postorder[i] = last_basic_block;
/* Initialize the considered map. */
- sbitmap_zero (considered);
+ bitmap_clear (considered);
EXECUTE_IF_SET_IN_BITMAP (blocks_to_consider, 0, index, bi)
{
SET_BIT (considered, index);
basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks * 2);
int sp = 0;
sbitmap visited = sbitmap_alloc (last_basic_block + 1);
- sbitmap_zero (visited);
+ bitmap_clear (visited);
SET_BIT (visited, ENTRY_BLOCK_PTR->index);
while (true)
bitmap all_ones = NULL;
unsigned int i;
- sbitmap_ones (unreachable_blocks);
+ bitmap_ones (unreachable_blocks);
FOR_ALL_BB (bb)
{
/* Find the last set bit in ebitmap MAP. */
int
-ebitmap_last_set_bit (ebitmap map)
+bitmap_last_set_bit (ebitmap map)
{
unsigned int i = 0;
ebitmap_iterator ebi;
/* Clear ebitmap MAP. */
void
-ebitmap_clear (ebitmap map)
+bitmap_clear (ebitmap map)
{
ebitmap_array_clear (map);
- sbitmap_zero (map->wordmask);
+ bitmap_clear (map->wordmask);
map->wordmask = sbitmap_resize (map->wordmask, 1, 0);
map->numwords = 0;
map->cache = NULL;
size = EBITMAP_ELT_BITS;
ebitmap_array_init (ret, (size + EBITMAP_ELT_BITS - 1) / EBITMAP_ELT_BITS);
ret->wordmask = sbitmap_alloc_with_popcount (size);
- sbitmap_zero (ret->wordmask);
+ bitmap_clear (ret->wordmask);
ret->numwords = 0;
ret->cache = NULL;
ret->cacheindex = 0;
/* Clear BIT from ebitmap MAP. */
void
-ebitmap_clear_bit (ebitmap map, unsigned int bit)
+bitmap_clear_bit (ebitmap map, unsigned int bit)
{
unsigned int wordindex = bit / EBITMAP_ELT_BITS;
unsigned int eltwordindex = 0;
/* Set BIT in ebitmap MAP. */
void
-ebitmap_set_bit (ebitmap map, unsigned int bit)
+bitmap_set_bit (ebitmap map, unsigned int bit)
{
unsigned int wordindex = bit / EBITMAP_ELT_BITS;
unsigned int eltwordindex;
/* Return true if MAP contains BIT. */
bool
-ebitmap_bit_p (ebitmap map, unsigned int bit)
+bitmap_bit_p (ebitmap map, unsigned int bit)
{
unsigned int wordindex = bit / EBITMAP_ELT_BITS;
unsigned int bitindex= bit % EBITMAP_ELT_BITS;
/* Copy ebitmap SRC to DST. */
void
-ebitmap_copy (ebitmap dst, ebitmap src)
+bitmap_copy (ebitmap dst, ebitmap src)
{
/* Blow away any existing wordmask, and copy the new one. */
sbitmap_free (dst->wordmask);
dst->wordmask = sbitmap_alloc_with_popcount (src->wordmask->n_bits);
- sbitmap_copy (dst->wordmask, src->wordmask);
+ bitmap_copy (dst->wordmask, src->wordmask);
/* Make sure our destination array is big enough, and then copy the
actual words. */
/* Dump ebitmap BMAP to FILE. */
void
-dump_ebitmap (FILE *file, ebitmap bmap)
+dump_bitmap (FILE *file, ebitmap bmap)
{
unsigned int pos;
unsigned int i;
int res;
unsigned int size;
- res = sbitmap_last_set_bit (bmap->wordmask);
+ res = bitmap_last_set_bit (bmap->wordmask);
if (res == -1)
size = 0;
else
fprintf (file, "n_words = %d, set = {", bmap->numwords);
for (pos = 30, i = 0; i < size; i++)
- if (ebitmap_bit_p (bmap, i))
+ if (bitmap_bit_p (bmap, i))
{
if (pos > 70)
{
/* Dump ebitmap BMAP to stderr. */
DEBUG_FUNCTION void
-debug_ebitmap (ebitmap bmap)
+debug_bitmap (ebitmap bmap)
{
- dump_ebitmap (stderr, bmap);
+ dump_bitmap (stderr, bmap);
}
/* Perform the operation DST &= SRC. */
void
-ebitmap_and_into (ebitmap dst, ebitmap src)
+bitmap_and_into (ebitmap dst, ebitmap src)
{
sbitmap_iterator sbi;
unsigned int i;
/* Short circuit the empty bitmap cases. */
if (src->numwords == 0 || dst->numwords == 0)
{
- ebitmap_clear (dst);
+ bitmap_clear (dst);
return;
}
/* AND the masks, then walk the words that may actually appear in
the result, AND'ing them. */
- sbitmap_a_and_b (dst->wordmask, dst->wordmask, src->wordmask);
+ bitmap_and (dst->wordmask, dst->wordmask, src->wordmask);
EXECUTE_IF_SET_IN_SBITMAP (dst->wordmask, 0, i, sbi)
{
/* Perform the operation DST = SRC1 & SRC2. */
void
-ebitmap_and (ebitmap dst, ebitmap src1, ebitmap src2)
+bitmap_and (ebitmap dst, ebitmap src1, ebitmap src2)
{
sbitmap_iterator sbi;
unsigned int i;
dst->cache = NULL;
if (src1->numwords == 0 || src2->numwords == 0)
{
- ebitmap_clear (dst);
+ bitmap_clear (dst);
return;
}
= sbitmap_resize (dst->wordmask,
MIN (src1->wordmask->n_bits, src2->wordmask->n_bits),
0);
- sbitmap_a_and_b (dst->wordmask, src1->wordmask, src2->wordmask);
+ bitmap_and (dst->wordmask, src1->wordmask, src2->wordmask);
EXECUTE_IF_SET_IN_SBITMAP (dst->wordmask, 0, i, sbi)
{
gcc_assert (dst->elts[i] != 0);
EXECUTE_IF_SET_IN_EBITMAP (src1, 0, i, ebi)
- if (ebitmap_bit_p (src2, i))
- gcc_assert (ebitmap_bit_p (dst, i));
+ if (bitmap_bit_p (src2, i))
+ gcc_assert (bitmap_bit_p (dst, i));
for (i = 0; i < dst->numwords; i++)
gcc_assert (dst->elts[i] != 0);
changed. */
bool
-ebitmap_ior_into (ebitmap dst, ebitmap src)
+bitmap_ior_into (ebitmap dst, ebitmap src)
{
unsigned int dstsize = dst->wordmask->n_bits;
unsigned int srcsize = src->wordmask->n_bits;
unsigned int newarraysize;
#ifdef EBITMAP_DEBUGGING
ebitmap dstcopy = ebitmap_alloc (1);
- ebitmap_copy (dstcopy, dst);
+ bitmap_copy (dstcopy, dst);
#endif
dst->cache = NULL;
if (dst->numwords == 0 && src->numwords != 0)
{
- ebitmap_copy (dst, src);
+ bitmap_copy (dst, src);
return true;
}
else if (src->numwords == 0)
/* We can do without the temp mask if it's faster, but it would mean
touching more words in the actual dense vector. */
tempmask = sbitmap_alloc (MAX (srcsize, dstsize));
- sbitmap_zero (tempmask);
+ bitmap_clear (tempmask);
if (srcsize == dstsize)
{
- sbitmap_a_or_b (tempmask, dst->wordmask, src->wordmask);
+ bitmap_ior (tempmask, dst->wordmask, src->wordmask);
}
else
{
0);
if (srcsize >= dstsize)
{
- sbitmap_copy_n (tempmask, dst->wordmask, dst->wordmask->size);
- sbitmap_a_or_b (tempmask, tempmask, src->wordmask);
+ bitmap_copy_n (tempmask, dst->wordmask, dst->wordmask->size);
+ bitmap_ior (tempmask, tempmask, src->wordmask);
}
else
{
- sbitmap_copy_n (tempmask, src->wordmask, src->wordmask->size);
- sbitmap_a_or_b (tempmask, tempmask, dst->wordmask);
+ bitmap_copy_n (tempmask, src->wordmask, src->wordmask->size);
+ bitmap_ior (tempmask, tempmask, dst->wordmask);
}
}
newarraysize = src->numwords + dst->numwords;
gcc_assert (dst->elts[i] != 0);
EXECUTE_IF_SET_IN_EBITMAP (src, 0, i, ebi)
- gcc_assert (ebitmap_bit_p (dst, i));
+ gcc_assert (bitmap_bit_p (dst, i));
EXECUTE_IF_SET_IN_EBITMAP (dstcopy, 0, i, ebi)
- gcc_assert (ebitmap_bit_p (dst, i));
+ gcc_assert (bitmap_bit_p (dst, i));
sbitmap_verify_popcount (dst->wordmask);
- gcc_assert (changed == !ebitmap_equal_p (dst, dstcopy));
+ gcc_assert (changed == !bitmap_equal_p (dst, dstcopy));
gcc_assert (sbitmap_popcount (dst->wordmask,
dst->wordmask->n_bits) == dst->numwords);
}
in DST has changed. */
bool
-ebitmap_ior (ebitmap dst, ebitmap src1, ebitmap src2)
+bitmap_ior (ebitmap dst, ebitmap src1, ebitmap src2)
{
unsigned int src1size = src1->wordmask->n_bits;
unsigned int src2size = src2->wordmask->n_bits;
unsigned int newarraysize;
#ifdef EBITMAP_DEBUGGING
ebitmap dstcopy = ebitmap_alloc (1);
- ebitmap_copy (dstcopy, dst);
+ bitmap_copy (dstcopy, dst);
#endif
dst->cache = NULL;
tempmask = sbitmap_alloc_with_popcount (MAX (src1size, src2size));
- sbitmap_zero (tempmask);
+ bitmap_clear (tempmask);
if (src1size == src2size)
{
- sbitmap_a_or_b (tempmask, src1->wordmask, src2->wordmask);
+ bitmap_ior (tempmask, src1->wordmask, src2->wordmask);
}
else
{
if (src1size >= src2size)
{
- sbitmap_copy_n (tempmask, src2->wordmask, src2->wordmask->size);
- sbitmap_a_or_b (tempmask, tempmask, src1->wordmask);
+ bitmap_copy_n (tempmask, src2->wordmask, src2->wordmask->size);
+ bitmap_ior (tempmask, tempmask, src1->wordmask);
}
else
{
- sbitmap_copy_n (tempmask, src1->wordmask, src1->wordmask->size);
- sbitmap_a_or_b (tempmask, tempmask, src2->wordmask);
+ bitmap_copy_n (tempmask, src1->wordmask, src1->wordmask->size);
+ bitmap_ior (tempmask, tempmask, src2->wordmask);
}
}
newarraysize = src1->numwords + src2->numwords;
gcc_assert (dst->elts[i] != 0);
EXECUTE_IF_SET_IN_EBITMAP (src1, 0, i, ebi)
- gcc_assert (ebitmap_bit_p (dst, i));
+ gcc_assert (bitmap_bit_p (dst, i));
EXECUTE_IF_SET_IN_EBITMAP (src2, 0, i, ebi)
- gcc_assert (ebitmap_bit_p (dst, i));
+ gcc_assert (bitmap_bit_p (dst, i));
}
sbitmap_verify_popcount (dst->wordmask);
- gcc_assert (changed == !ebitmap_equal_p (dst, dstcopy));
+ gcc_assert (changed == !bitmap_equal_p (dst, dstcopy));
gcc_assert (sbitmap_popcount (dst->wordmask,
dst->wordmask->n_bits) == dst->numwords);
#endif
has changed. */
bool
-ebitmap_and_compl_into (ebitmap dst, ebitmap src)
+bitmap_and_compl_into (ebitmap dst, ebitmap src)
{
bool changed = false;
unsigned int i;
sbitmap_iterator sbi;
#ifdef EBITMAP_DEBUGGING
ebitmap dstcopy = ebitmap_alloc (1);
- ebitmap_copy (dstcopy, dst);
+ bitmap_copy (dstcopy, dst);
#endif
gcc_assert (dst != src);
EXECUTE_IF_SET_IN_EBITMAP (dstcopy, 0, i, ebi)
{
- if (!ebitmap_bit_p (src, i))
- gcc_assert (ebitmap_bit_p (dst, i));
+ if (!bitmap_bit_p (src, i))
+ gcc_assert (bitmap_bit_p (dst, i));
}
for (i = 0; i < dst->numwords; i++)
gcc_assert (sbitmap_popcount (dst->wordmask,
dst->wordmask->n_bits) == neweltindex);
sbitmap_verify_popcount (dst->wordmask);
- gcc_assert (changed == !ebitmap_equal_p (dst, dstcopy));
+ gcc_assert (changed == !bitmap_equal_p (dst, dstcopy));
gcc_assert (sbitmap_popcount (dst->wordmask,
dst->wordmask->n_bits) == dst->numwords);
}
in DST has changed. */
bool
-ebitmap_and_compl (ebitmap dst, ebitmap src1, ebitmap src2)
+bitmap_and_compl (ebitmap dst, ebitmap src1, ebitmap src2)
{
unsigned int src1size = src1->wordmask->n_bits;
sbitmap_iterator sbi;
/* XXX: Optimize like the into version. */
dst->cache = NULL;
tempmask = sbitmap_alloc_with_popcount (src1size);
- sbitmap_zero (tempmask);
- sbitmap_copy (tempmask, src1->wordmask);
+ bitmap_clear (tempmask);
+ bitmap_copy (tempmask, src1->wordmask);
newarraysize = src1->numwords;
newarray = XNEWVEC (EBITMAP_ELT_TYPE, newarraysize);
EXECUTE_IF_SET_IN_EBITMAP (src1, 0, i, ebi)
{
- if (!ebitmap_bit_p (src2, i))
- gcc_assert (ebitmap_bit_p (dst, i));
+ if (!bitmap_bit_p (src2, i))
+ gcc_assert (bitmap_bit_p (dst, i));
}
for (i = 0; i < dst->numwords; i++)
gcc_assert (dst->elts[i] != 0);
/* Perform the operation DST = A | (B & ~C). */
bool
-ebitmap_ior_and_compl (ebitmap dst, ebitmap a, ebitmap b, ebitmap c)
+bitmap_ior_and_compl (ebitmap dst, ebitmap a, ebitmap b, ebitmap c)
{
bool changed;
ebitmap temp = ebitmap_alloc (1);
#ifdef EBITMAP_DEBUGGING
ebitmap dstcopy = ebitmap_alloc (1);
- ebitmap_copy (dstcopy, dst);
+ bitmap_copy (dstcopy, dst);
#endif
dst->cache = NULL;
- ebitmap_and_compl (temp, b, c);
- changed = ebitmap_ior (dst, a, temp);
+ bitmap_and_compl (temp, b, c);
+ changed = bitmap_ior (dst, a, temp);
#ifdef EBITMAP_DEBUGGING
{
ebitmap_iterator ebi;
unsigned int i;
EXECUTE_IF_SET_IN_EBITMAP (a, 0, i, ebi)
- gcc_assert (ebitmap_bit_p (dst, i));
+ gcc_assert (bitmap_bit_p (dst, i));
EXECUTE_IF_SET_IN_EBITMAP (b, 0, i, ebi)
- if (!ebitmap_bit_p (c, i))
- gcc_assert (ebitmap_bit_p (dst, i));
- gcc_assert (changed == !ebitmap_equal_p (dst, dstcopy));
+ if (!bitmap_bit_p (c, i))
+ gcc_assert (bitmap_bit_p (dst, i));
+ gcc_assert (changed == !bitmap_equal_p (dst, dstcopy));
}
#endif
ebitmap_free (temp);
/* Return true if ebitmap DST is equal to ebitmap SRC. */
bool
-ebitmap_equal_p (ebitmap dst, ebitmap src)
+bitmap_equal_p (ebitmap dst, ebitmap src)
{
unsigned int which = MIN (dst->wordmask->size, src->wordmask->size);
if (dst->numwords != src->numwords)
return false;
- /* sbitmap_equal compares up to the size of the first argument, so
+ /* bitmap_equal_p compares up to the size of the first argument, so
if the two sbitmaps are not equally sized, we need to pass the
smaller one as the first argument, or it will crash. */
if (which == dst->wordmask->size
- && !sbitmap_equal (dst->wordmask, src->wordmask))
+ && !bitmap_equal_p (dst->wordmask, src->wordmask))
return false;
else if (which == src->wordmask->size
- && !sbitmap_equal (src->wordmask, dst->wordmask))
+ && !bitmap_equal_p (src->wordmask, dst->wordmask))
return false;
return memcmp (dst->elts, src->elts,
} *ebitmap;
-#define ebitmap_empty_p(MAP) ((MAP)->numwords == 0)
+inline bool bitmap_empty_p (ebitmap map)
+{
+ return map->numwords == 0;
+}
+
#define ebitmap_free(MAP) (free((MAP)->elts), \
sbitmap_free ((MAP)->wordmask), \
free((MAP)))
-extern void ebitmap_set_bit (ebitmap, unsigned int);
-extern void ebitmap_clear_bit (ebitmap, unsigned int);
-extern bool ebitmap_bit_p (ebitmap, unsigned int);
-extern void dump_ebitmap (FILE *, ebitmap);
-extern void dump_ebitmap_file (FILE *, ebitmap);
-extern void dump_ebitmap_vector (FILE *, const char *, const char *, ebitmap *,
- int);
+extern void bitmap_set_bit (ebitmap, unsigned int);
+extern void bitmap_clear_bit (ebitmap, unsigned int);
+extern bool bitmap_bit_p (ebitmap, unsigned int);
+extern void dump_bitmap (FILE *, ebitmap);
+extern void dump_bitmap_file (FILE *, ebitmap);
+extern void dump_bitmap_vector (FILE *, const char *, const char *, ebitmap *,
+ int);
extern ebitmap ebitmap_alloc (unsigned int);
extern ebitmap *ebitmap_vector_alloc (unsigned int, unsigned int);
-extern void ebitmap_copy (ebitmap, ebitmap);
-extern void ebitmap_and (ebitmap, ebitmap, ebitmap);
-extern void ebitmap_and_into (ebitmap, ebitmap);
-extern bool ebitmap_and_compl (ebitmap, ebitmap, ebitmap);
-extern bool ebitmap_and_compl_into (ebitmap, ebitmap);
-extern bool ebitmap_ior_into (ebitmap, ebitmap);
-extern bool ebitmap_ior (ebitmap, ebitmap, ebitmap);
-extern bool ebitmap_ior_and_compl (ebitmap, ebitmap, ebitmap, ebitmap);
-extern bool ebitmap_ior_and_compl_into (ebitmap, ebitmap, ebitmap);
-extern bool ebitmap_equal_p (ebitmap, ebitmap);
-extern void ebitmap_clear (ebitmap);
-extern int ebitmap_last_set_bit (ebitmap);
-extern void debug_ebitmap (ebitmap);
-extern unsigned long ebitmap_popcount(ebitmap, unsigned long);
+extern void bitmap_copy (ebitmap, ebitmap);
+extern void bitmap_and (ebitmap, ebitmap, ebitmap);
+extern void bitmap_and_into (ebitmap, ebitmap);
+extern bool bitmap_and_compl (ebitmap, ebitmap, ebitmap);
+extern bool bitmap_and_compl_into (ebitmap, ebitmap);
+extern bool bitmap_ior_into (ebitmap, ebitmap);
+extern bool bitmap_ior (ebitmap, ebitmap, ebitmap);
+extern bool bitmap_ior_and_compl (ebitmap, ebitmap, ebitmap, ebitmap);
+extern bool bitmap_ior_and_compl_into (ebitmap, ebitmap, ebitmap);
+extern bool bitmap_equal_p (ebitmap, ebitmap);
+extern void bitmap_clear (ebitmap);
+extern int bitmap_last_set_bit (ebitmap);
+extern void debug_bitmap (ebitmap);
+extern unsigned long bitmap_popcount(ebitmap, unsigned long);
/* The iterator for ebitmap. */
typedef struct {
gcc_assert (ifun->eh->region_tree);
b_outer = sbitmap_alloc (VEC_length (eh_region, ifun->eh->region_array));
- sbitmap_zero (b_outer);
+ bitmap_clear (b_outer);
do
{
/* Look for basic blocks within the prologue insns. */
blocks = sbitmap_alloc (last_basic_block);
- sbitmap_zero (blocks);
+ bitmap_clear (blocks);
SET_BIT (blocks, entry_edge->dest->index);
SET_BIT (blocks, orig_entry_edge->dest->index);
find_many_sub_basic_blocks (blocks);
/* Initialize any bitmaps that were passed in. */
if (transp)
{
- sbitmap_vector_ones (transp, last_basic_block);
+ bitmap_vector_ones (transp, last_basic_block);
}
if (comp)
- sbitmap_vector_zero (comp, last_basic_block);
+ bitmap_vector_clear (comp, last_basic_block);
if (antloc)
- sbitmap_vector_zero (antloc, last_basic_block);
+ bitmap_vector_clear (antloc, last_basic_block);
for (i = 0; i < table->size; i++)
{
basic_block bb;
prune_exprs = sbitmap_alloc (expr_hash_table.n_elems);
- sbitmap_zero (prune_exprs);
+ bitmap_clear (prune_exprs);
for (ui = 0; ui < expr_hash_table.size; ui++)
{
for (expr = expr_hash_table.table[ui]; expr; expr = expr->next_same_hash)
if ((e->flags & EDGE_ABNORMAL)
&& (pre_p || CALL_P (BB_END (e->src))))
{
- sbitmap_difference (antloc[bb->index],
+ bitmap_and_compl (antloc[bb->index],
antloc[bb->index], prune_exprs);
- sbitmap_difference (transp[bb->index],
+ bitmap_and_compl (transp[bb->index],
transp[bb->index], prune_exprs);
break;
}
the number of deletions achieved. We will prune these out of the
insertion/deletion sets. */
prune_exprs = sbitmap_alloc (n_elems);
- sbitmap_zero (prune_exprs);
+ bitmap_clear (prune_exprs);
/* Iterate over the edges counting the number of times each expression
needs to be inserted. */
compute_local_properties (transp, comp, antloc, &expr_hash_table);
prune_expressions (true);
- sbitmap_vector_zero (ae_kill, last_basic_block);
+ bitmap_vector_clear (ae_kill, last_basic_block);
/* Compute ae_kill for each basic block using:
FOR_EACH_BB (bb)
{
- sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
- sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
+ bitmap_ior (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
+ bitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
}
edge_list = pre_edge_lcm (expr_hash_table.n_elems, transp, comp, antloc,
set_size = pre_insert_map[0]->size;
num_edges = NUM_EDGES (edge_list);
inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
- sbitmap_vector_zero (inserted, num_edges);
+ bitmap_vector_clear (inserted, num_edges);
for (e = 0; e < num_edges; e++)
{
int changed, passes;
basic_block bb;
- sbitmap_vector_zero (hoist_vbeout, last_basic_block);
- sbitmap_vector_zero (hoist_vbein, last_basic_block);
+ bitmap_vector_clear (hoist_vbeout, last_basic_block);
+ bitmap_vector_clear (hoist_vbein, last_basic_block);
passes = 0;
changed = 1;
/* Include expressions in VBEout that are calculated
in BB and available at its end. */
- sbitmap_a_or_b (hoist_vbeout[bb->index],
+ bitmap_ior (hoist_vbeout[bb->index],
hoist_vbeout[bb->index], comp[bb->index]);
}
- changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index],
+ changed |= bitmap_or_and (hoist_vbein[bb->index],
antloc[bb->index],
hoist_vbeout[bb->index],
transp[bb->index]);
FOR_EACH_BB (bb)
{
fprintf (dump_file, "vbein (%d): ", bb->index);
- dump_sbitmap_file (dump_file, hoist_vbein[bb->index]);
+ dump_bitmap_file (dump_file, hoist_vbein[bb->index]);
fprintf (dump_file, "vbeout(%d): ", bb->index);
- dump_sbitmap_file (dump_file, hoist_vbeout[bb->index]);
+ dump_bitmap_file (dump_file, hoist_vbeout[bb->index]);
}
}
}
{
visited_allocated_locally = 1;
visited = sbitmap_alloc (last_basic_block);
- sbitmap_zero (visited);
+ bitmap_clear (visited);
}
FOR_EACH_EDGE (pred, ei, bb->preds)
sbitmap visited = sbitmap_alloc (last_basic_block);
sese region = SCOP_REGION (scop);
- sbitmap_zero (visited);
+ bitmap_clear (visited);
build_scop_bbs_1 (scop, visited, SESE_ENTRY_BB (region));
sbitmap_free (visited);
}
born = sbitmap_alloc (ira_max_point);
dead = sbitmap_alloc (ira_max_point);
- sbitmap_zero (born);
- sbitmap_zero (dead);
+ bitmap_clear (born);
+ bitmap_clear (dead);
FOR_EACH_OBJECT (obj, oi)
for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
{
}
born_or_dead = sbitmap_alloc (ira_max_point);
- sbitmap_a_or_b (born_or_dead, born, dead);
+ bitmap_ior (born_or_dead, born, dead);
map = (int *) ira_allocate (sizeof (int) * ira_max_point);
n = -1;
prev_born_p = prev_dead_p = false;
/* If the entire reg was live before blasting into subregs, we need
to init all of the subregs to ones else init to 0. */
if (init_value)
- sbitmap_ones (live_subregs[allocnum]);
+ bitmap_ones (live_subregs[allocnum]);
else
- sbitmap_zero (live_subregs[allocnum]);
+ bitmap_clear (live_subregs[allocnum]);
bitmap_set_bit (live_subregs_used, allocnum);
}
start++;
}
- if (sbitmap_empty_p (live_subregs[regno]))
+ if (bitmap_empty_p (live_subregs[regno]))
{
bitmap_clear_bit (live_subregs_used, regno);
bitmap_clear_bit (live_relevant_regs, regno);
/* We want a maximal solution, so make an optimistic initialization of
ANTIN. */
- sbitmap_vector_ones (antin, last_basic_block);
+ bitmap_vector_ones (antin, last_basic_block);
/* Put every block on the worklist; this is necessary because of the
optimistic initialization of ANTIN above. */
/* Do not clear the aux field for blocks which are predecessors of
the EXIT block. That way we never add then to the worklist
again. */
- sbitmap_zero (antout[bb->index]);
+ bitmap_clear (antout[bb->index]);
else
{
/* Clear the aux field of this block so that it can be added to
sbitmap_intersection_of_succs (antout[bb->index], antin, bb);
}
- if (sbitmap_a_or_b_and_c_cg (antin[bb->index], antloc[bb->index],
+ if (bitmap_or_and (antin[bb->index], antloc[bb->index],
transp[bb->index], antout[bb->index]))
/* If the in state of this block changed, then we need
to add the predecessors of this block to the worklist
pred = INDEX_EDGE_PRED_BB (edge_list, x);
succ = INDEX_EDGE_SUCC_BB (edge_list, x);
if (pred == ENTRY_BLOCK_PTR)
- sbitmap_copy (earliest[x], antin[succ->index]);
+ bitmap_copy (earliest[x], antin[succ->index]);
else
{
if (succ == EXIT_BLOCK_PTR)
- sbitmap_zero (earliest[x]);
+ bitmap_clear (earliest[x]);
else
{
- sbitmap_difference (difference, antin[succ->index],
+ bitmap_and_compl (difference, antin[succ->index],
avout[pred->index]);
- sbitmap_not (temp_bitmap, antout[pred->index]);
- sbitmap_a_and_b_or_c (earliest[x], difference,
+ bitmap_not (temp_bitmap, antout[pred->index]);
+ bitmap_and_or (earliest[x], difference,
kill[pred->index], temp_bitmap);
}
}
example the expression is ANTLOC in a block within the loop) then
this algorithm will detect it when we process the block at the head
of the optimistic edge. That will requeue the affected blocks. */
- sbitmap_vector_ones (later, num_edges);
+ bitmap_vector_ones (later, num_edges);
/* Note that even though we want an optimistic setting of LATER, we
do not want to be overly optimistic. Consider an outgoing edge from
the entry block. That edge should always have a LATER value the
same as EARLIEST for that edge. */
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
- sbitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]);
+ bitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]);
/* Add all the blocks to the worklist. This prevents an early exit from
the loop given our optimistic initialization of LATER above. */
qout = worklist;
/* Compute the intersection of LATERIN for each incoming edge to B. */
- sbitmap_ones (laterin[bb->index]);
+ bitmap_ones (laterin[bb->index]);
FOR_EACH_EDGE (e, ei, bb->preds)
- sbitmap_a_and_b (laterin[bb->index], laterin[bb->index],
+ bitmap_and (laterin[bb->index], laterin[bb->index],
later[(size_t)e->aux]);
/* Calculate LATER for all outgoing edges. */
FOR_EACH_EDGE (e, ei, bb->succs)
- if (sbitmap_union_of_diff_cg (later[(size_t) e->aux],
+ if (bitmap_ior_and_compl (later[(size_t) e->aux],
earliest[(size_t) e->aux],
laterin[e->src->index],
antloc[e->src->index])
/* Computation of insertion and deletion points requires computing LATERIN
for the EXIT block. We allocated an extra entry in the LATERIN array
for just this purpose. */
- sbitmap_ones (laterin[last_basic_block]);
+ bitmap_ones (laterin[last_basic_block]);
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
- sbitmap_a_and_b (laterin[last_basic_block],
+ bitmap_and (laterin[last_basic_block],
laterin[last_basic_block],
later[(size_t) e->aux]);
basic_block bb;
FOR_EACH_BB (bb)
- sbitmap_difference (del[bb->index], antloc[bb->index],
+ bitmap_and_compl (del[bb->index], antloc[bb->index],
laterin[bb->index]);
for (x = 0; x < NUM_EDGES (edge_list); x++)
basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x);
if (b == EXIT_BLOCK_PTR)
- sbitmap_difference (insert[x], later[x], laterin[last_basic_block]);
+ bitmap_and_compl (insert[x], later[x], laterin[last_basic_block]);
else
- sbitmap_difference (insert[x], later[x], laterin[b->index]);
+ bitmap_and_compl (insert[x], later[x], laterin[b->index]);
}
}
fprintf (dump_file, "Edge List:\n");
verify_edge_list (dump_file, edge_list);
print_edge_list (dump_file, edge_list);
- dump_sbitmap_vector (dump_file, "transp", "", transp, last_basic_block);
- dump_sbitmap_vector (dump_file, "antloc", "", antloc, last_basic_block);
- dump_sbitmap_vector (dump_file, "avloc", "", avloc, last_basic_block);
- dump_sbitmap_vector (dump_file, "kill", "", kill, last_basic_block);
+ dump_bitmap_vector (dump_file, "transp", "", transp, last_basic_block);
+ dump_bitmap_vector (dump_file, "antloc", "", antloc, last_basic_block);
+ dump_bitmap_vector (dump_file, "avloc", "", avloc, last_basic_block);
+ dump_bitmap_vector (dump_file, "kill", "", kill, last_basic_block);
}
#endif
#ifdef LCM_DEBUG_INFO
if (dump_file)
{
- dump_sbitmap_vector (dump_file, "antin", "", antin, last_basic_block);
- dump_sbitmap_vector (dump_file, "antout", "", antout, last_basic_block);
+ dump_bitmap_vector (dump_file, "antin", "", antin, last_basic_block);
+ dump_bitmap_vector (dump_file, "antout", "", antout, last_basic_block);
}
#endif
#ifdef LCM_DEBUG_INFO
if (dump_file)
- dump_sbitmap_vector (dump_file, "earliest", "", earliest, num_edges);
+ dump_bitmap_vector (dump_file, "earliest", "", earliest, num_edges);
#endif
sbitmap_vector_free (antout);
#ifdef LCM_DEBUG_INFO
if (dump_file)
{
- dump_sbitmap_vector (dump_file, "laterin", "", laterin, last_basic_block + 1);
- dump_sbitmap_vector (dump_file, "later", "", later, num_edges);
+ dump_bitmap_vector (dump_file, "laterin", "", laterin, last_basic_block + 1);
+ dump_bitmap_vector (dump_file, "later", "", later, num_edges);
}
#endif
*insert = sbitmap_vector_alloc (num_edges, n_exprs);
*del = sbitmap_vector_alloc (last_basic_block, n_exprs);
- sbitmap_vector_zero (*insert, num_edges);
- sbitmap_vector_zero (*del, last_basic_block);
+ bitmap_vector_clear (*insert, num_edges);
+ bitmap_vector_clear (*del, last_basic_block);
compute_insert_delete (edge_list, antloc, later, laterin, *insert, *del);
sbitmap_vector_free (laterin);
#ifdef LCM_DEBUG_INFO
if (dump_file)
{
- dump_sbitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
- dump_sbitmap_vector (dump_file, "pre_delete_map", "", *del,
+ dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
+ dump_bitmap_vector (dump_file, "pre_delete_map", "", *del,
last_basic_block);
}
#endif
XNEWVEC (basic_block, n_basic_blocks - NUM_FIXED_BLOCKS);
/* We want a maximal solution. */
- sbitmap_vector_ones (avout, last_basic_block);
+ bitmap_vector_ones (avout, last_basic_block);
/* Put every block on the worklist; this is necessary because of the
optimistic initialization of AVOUT above. */
if (bb->aux == ENTRY_BLOCK_PTR)
/* Do not clear the aux field for blocks which are successors of the
ENTRY block. That way we never add then to the worklist again. */
- sbitmap_zero (avin[bb->index]);
+ bitmap_clear (avin[bb->index]);
else
{
/* Clear the aux field of this block so that it can be added to
sbitmap_intersection_of_preds (avin[bb->index], avout, bb);
}
- if (sbitmap_union_of_diff_cg (avout[bb->index], avloc[bb->index],
+ if (bitmap_ior_and_compl (avout[bb->index], avloc[bb->index],
avin[bb->index], kill[bb->index]))
/* If the out state of this block changed, then we need
to add the successors of this block to the worklist
pred = INDEX_EDGE_PRED_BB (edge_list, x);
succ = INDEX_EDGE_SUCC_BB (edge_list, x);
if (succ == EXIT_BLOCK_PTR)
- sbitmap_copy (farthest[x], st_avout[pred->index]);
+ bitmap_copy (farthest[x], st_avout[pred->index]);
else
{
if (pred == ENTRY_BLOCK_PTR)
- sbitmap_zero (farthest[x]);
+ bitmap_clear (farthest[x]);
else
{
- sbitmap_difference (difference, st_avout[pred->index],
+ bitmap_and_compl (difference, st_avout[pred->index],
st_antin[succ->index]);
- sbitmap_not (temp_bitmap, st_avin[succ->index]);
- sbitmap_a_and_b_or_c (farthest[x], difference,
+ bitmap_not (temp_bitmap, st_avin[succ->index]);
+ bitmap_and_or (farthest[x], difference,
kill[succ->index], temp_bitmap);
}
}
INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
/* We want a maximal solution. */
- sbitmap_vector_ones (nearer, num_edges);
+ bitmap_vector_ones (nearer, num_edges);
/* Note that even though we want an optimistic setting of NEARER, we
do not want to be overly optimistic. Consider an incoming edge to
the exit block. That edge should always have a NEARER value the
same as FARTHEST for that edge. */
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
- sbitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]);
+ bitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]);
/* Add all the blocks to the worklist. This prevents an early exit
from the loop given our optimistic initialization of NEARER. */
bb->aux = NULL;
/* Compute the intersection of NEARER for each outgoing edge from B. */
- sbitmap_ones (nearerout[bb->index]);
+ bitmap_ones (nearerout[bb->index]);
FOR_EACH_EDGE (e, ei, bb->succs)
- sbitmap_a_and_b (nearerout[bb->index], nearerout[bb->index],
+ bitmap_and (nearerout[bb->index], nearerout[bb->index],
nearer[(size_t) e->aux]);
/* Calculate NEARER for all incoming edges. */
FOR_EACH_EDGE (e, ei, bb->preds)
- if (sbitmap_union_of_diff_cg (nearer[(size_t) e->aux],
+ if (bitmap_ior_and_compl (nearer[(size_t) e->aux],
farthest[(size_t) e->aux],
nearerout[e->dest->index],
st_avloc[e->dest->index])
/* Computation of insertion and deletion points requires computing NEAREROUT
for the ENTRY block. We allocated an extra entry in the NEAREROUT array
for just this purpose. */
- sbitmap_ones (nearerout[last_basic_block]);
+ bitmap_ones (nearerout[last_basic_block]);
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
- sbitmap_a_and_b (nearerout[last_basic_block],
+ bitmap_and (nearerout[last_basic_block],
nearerout[last_basic_block],
nearer[(size_t) e->aux]);
basic_block bb;
FOR_EACH_BB (bb)
- sbitmap_difference (del[bb->index], st_avloc[bb->index],
+ bitmap_and_compl (del[bb->index], st_avloc[bb->index],
nearerout[bb->index]);
for (x = 0; x < NUM_EDGES (edge_list); x++)
{
basic_block b = INDEX_EDGE_PRED_BB (edge_list, x);
if (b == ENTRY_BLOCK_PTR)
- sbitmap_difference (insert[x], nearer[x], nearerout[last_basic_block]);
+ bitmap_and_compl (insert[x], nearer[x], nearerout[last_basic_block]);
else
- sbitmap_difference (insert[x], nearer[x], nearerout[b->index]);
+ bitmap_and_compl (insert[x], nearer[x], nearerout[b->index]);
}
}
st_antin = sbitmap_vector_alloc (last_basic_block, n_exprs);
st_antout = sbitmap_vector_alloc (last_basic_block, n_exprs);
- sbitmap_vector_zero (st_antin, last_basic_block);
- sbitmap_vector_zero (st_antout, last_basic_block);
+ bitmap_vector_clear (st_antin, last_basic_block);
+ bitmap_vector_clear (st_antout, last_basic_block);
compute_antinout_edge (st_antloc, transp, st_antin, st_antout);
/* Compute global anticipatability. */
fprintf (dump_file, "Edge List:\n");
verify_edge_list (dump_file, edge_list);
print_edge_list (dump_file, edge_list);
- dump_sbitmap_vector (dump_file, "transp", "", transp, last_basic_block);
- dump_sbitmap_vector (dump_file, "st_avloc", "", st_avloc, last_basic_block);
- dump_sbitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
- dump_sbitmap_vector (dump_file, "st_antin", "", st_antin, last_basic_block);
- dump_sbitmap_vector (dump_file, "st_antout", "", st_antout, last_basic_block);
- dump_sbitmap_vector (dump_file, "st_kill", "", kill, last_basic_block);
+ dump_bitmap_vector (dump_file, "transp", "", transp, last_basic_block);
+ dump_bitmap_vector (dump_file, "st_avloc", "", st_avloc, last_basic_block);
+ dump_bitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
+ dump_bitmap_vector (dump_file, "st_antin", "", st_antin, last_basic_block);
+ dump_bitmap_vector (dump_file, "st_antout", "", st_antout, last_basic_block);
+ dump_bitmap_vector (dump_file, "st_kill", "", kill, last_basic_block);
}
#endif
#ifdef LCM_DEBUG_INFO
if (dump_file)
{
- dump_sbitmap_vector (dump_file, "st_avout", "", st_avout, last_basic_block);
- dump_sbitmap_vector (dump_file, "st_avin", "", st_avin, last_basic_block);
+ dump_bitmap_vector (dump_file, "st_avout", "", st_avout, last_basic_block);
+ dump_bitmap_vector (dump_file, "st_avin", "", st_avin, last_basic_block);
}
#endif
#ifdef LCM_DEBUG_INFO
if (dump_file)
- dump_sbitmap_vector (dump_file, "farthest", "", farthest, num_edges);
+ dump_bitmap_vector (dump_file, "farthest", "", farthest, num_edges);
#endif
sbitmap_vector_free (st_antin);
#ifdef LCM_DEBUG_INFO
if (dump_file)
{
- dump_sbitmap_vector (dump_file, "nearerout", "", nearerout,
+ dump_bitmap_vector (dump_file, "nearerout", "", nearerout,
last_basic_block + 1);
- dump_sbitmap_vector (dump_file, "nearer", "", nearer, num_edges);
+ dump_bitmap_vector (dump_file, "nearer", "", nearer, num_edges);
}
#endif
#ifdef LCM_DEBUG_INFO
if (dump_file)
{
- dump_sbitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
- dump_sbitmap_vector (dump_file, "pre_delete_map", "", *del,
+ dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
+ dump_bitmap_vector (dump_file, "pre_delete_map", "", *del,
last_basic_block);
}
#endif
bool ok;
wont_exit = sbitmap_alloc (npeel + 1);
- sbitmap_ones (wont_exit);
+ bitmap_ones (wont_exit);
RESET_BIT (wont_exit, 0);
if (desc->noloop_assumptions)
RESET_BIT (wont_exit, 1);
exit_mod = niter % (max_unroll + 1);
wont_exit = sbitmap_alloc (max_unroll + 1);
- sbitmap_ones (wont_exit);
+ bitmap_ones (wont_exit);
remove_edges = NULL;
if (flag_split_ivs_in_unroller
here; the only exception is when we have extra zero check and the number
of iterations is reliable. Also record the place of (possible) extra
zero check. */
- sbitmap_zero (wont_exit);
+ bitmap_clear (wont_exit);
if (extra_zero_check
&& !desc->noloop_assumptions)
SET_BIT (wont_exit, 1);
for (i = 0; i < n_peel; i++)
{
/* Peel the copy. */
- sbitmap_zero (wont_exit);
+ bitmap_clear (wont_exit);
if (i != n_peel - 1 || !last_may_exit)
SET_BIT (wont_exit, 1);
ok = duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
/* And unroll loop. */
- sbitmap_ones (wont_exit);
+ bitmap_ones (wont_exit);
RESET_BIT (wont_exit, may_exit_copy);
opt_info_start_duplication (opt_info);
opt_info = analyze_insns_in_loop (loop);
wont_exit = sbitmap_alloc (npeel + 1);
- sbitmap_zero (wont_exit);
+ bitmap_clear (wont_exit);
opt_info_start_duplication (opt_info);
wont_exit = sbitmap_alloc (nunroll + 1);
- sbitmap_zero (wont_exit);
+ bitmap_clear (wont_exit);
opt_info_start_duplication (opt_info);
ok = duplicate_loop_to_header_edge (loop, loop_latch_edge (loop),
propagate_pseudo_copies ();
sub_blocks = sbitmap_alloc (last_basic_block);
- sbitmap_zero (sub_blocks);
+ bitmap_clear (sub_blocks);
EXECUTE_IF_SET_IN_BITMAP (decomposable_context, 0, regno, iter)
decompose_register (regno);
born = sbitmap_alloc (lra_live_max_point);
dead = sbitmap_alloc (lra_live_max_point);
- sbitmap_zero (born);
- sbitmap_zero (dead);
+ bitmap_clear (born);
+ bitmap_clear (dead);
max_regno = max_reg_num ();
for (i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
{
}
}
born_or_dead = sbitmap_alloc (lra_live_max_point);
- sbitmap_a_or_b (born_or_dead, born, dead);
+ bitmap_ior (born_or_dead, born, dead);
map = XCNEWVEC (int, lra_live_max_point);
n = -1;
prev_born_p = prev_dead_p = false;
df_set_flags (DF_NO_INSN_RESCAN);
lra_constraint_insn_stack = VEC_alloc (rtx, heap, get_max_uid ());
lra_constraint_insn_stack_bitmap = sbitmap_alloc (get_max_uid ());
- sbitmap_zero (lra_constraint_insn_stack_bitmap);
+ bitmap_clear (lra_constraint_insn_stack_bitmap);
lra_live_ranges_init ();
lra_constraints_init ();
lra_curr_reload_num = 0;
{
sbitmap blocks;
blocks = sbitmap_alloc (last_basic_block);
- sbitmap_ones (blocks);
+ bitmap_ones (blocks);
find_many_sub_basic_blocks (blocks);
sbitmap_free (blocks);
}
transp = sbitmap_vector_alloc (last_basic_block, n_entities);
comp = sbitmap_vector_alloc (last_basic_block, n_entities);
- sbitmap_vector_ones (transp, last_basic_block);
+ bitmap_vector_ones (transp, last_basic_block);
for (j = n_entities - 1; j >= 0; j--)
{
sbitmap *insert;
/* Set the anticipatable and computing arrays. */
- sbitmap_vector_zero (antic, last_basic_block);
- sbitmap_vector_zero (comp, last_basic_block);
+ bitmap_vector_clear (antic, last_basic_block);
+ bitmap_vector_clear (comp, last_basic_block);
for (j = n_entities - 1; j >= 0; j--)
{
int m = current_mode[j] = MODE_PRIORITY_TO_MODE (entity_map[j], i);
placement mode switches to modes with priority I. */
FOR_EACH_BB (bb)
- sbitmap_not (kill[bb->index], transp[bb->index]);
+ bitmap_not (kill[bb->index], transp[bb->index]);
edge_list = pre_edge_lcm (n_entities, transp, comp, antic,
kill, &insert, &del);
fprintf (dump_file, "%11d %11d %5s %s\n", start, end, "", "(max, min)");
}
- sbitmap_zero (must_follow);
+ bitmap_clear (must_follow);
SET_BIT (must_follow, move->def);
start = MAX (start, end - (ii - 1));
move->new_reg = gen_reg_rtx (GET_MODE (prev_reg));
move->num_consecutive_stages = distances[0] && distances[1] ? 2 : 1;
move->insn = gen_move_insn (move->new_reg, copy_rtx (prev_reg));
- sbitmap_zero (move->uses);
+ bitmap_clear (move->uses);
prev_reg = move->new_reg;
}
goto clear;
}
- sbitmap_ones (sched_nodes);
+ bitmap_ones (sched_nodes);
/* Calculate the new placement of the branch. It should be in row
ii-1 and fall into it's scheduling window. */
int count_succs;
/* 1. compute sched window for u (start, end, step). */
- sbitmap_zero (psp);
- sbitmap_zero (pss);
- psp_not_empty = sbitmap_a_and_b_cg (psp, u_node_preds, sched_nodes);
- pss_not_empty = sbitmap_a_and_b_cg (pss, u_node_succs, sched_nodes);
+ bitmap_clear (psp);
+ bitmap_clear (pss);
+ psp_not_empty = bitmap_and (psp, u_node_preds, sched_nodes);
+ pss_not_empty = bitmap_and (pss, u_node_succs, sched_nodes);
/* We first compute a forward range (start <= end), then decide whether
to reverse it. */
first_cycle_in_window = (step == 1) ? start : end - step;
last_cycle_in_window = (step == 1) ? end - step : start;
- sbitmap_zero (must_precede);
- sbitmap_zero (must_follow);
+ bitmap_clear (must_precede);
+ bitmap_clear (must_follow);
if (dump_file)
fprintf (dump_file, "\nmust_precede: ");
partial_schedule_ptr ps = create_partial_schedule (ii, g, DFA_HISTORY);
- sbitmap_ones (tobe_scheduled);
- sbitmap_zero (sched_nodes);
+ bitmap_ones (tobe_scheduled);
+ bitmap_clear (sched_nodes);
while (flush_and_start_over && (ii < maxii))
{
if (dump_file)
fprintf (dump_file, "Starting with ii=%d\n", ii);
flush_and_start_over = false;
- sbitmap_zero (sched_nodes);
+ bitmap_clear (sched_nodes);
for (i = 0; i < num_nodes; i++)
{
ps = NULL;
}
else
- gcc_assert (sbitmap_equal (tobe_scheduled, sched_nodes));
+ gcc_assert (bitmap_equal_p (tobe_scheduled, sched_nodes));
sbitmap_free (sched_nodes);
sbitmap_free (must_precede);
int i;
sbitmap tmp = sbitmap_alloc (num_nodes);
- sbitmap_zero (tmp);
+ bitmap_clear (tmp);
if (dump_file)
fprintf (dump_file, "SMS final nodes order: \n");
sbitmap tmp = sbitmap_alloc (num_nodes);
sbitmap ones = sbitmap_alloc (num_nodes);
- sbitmap_zero (prev_sccs);
- sbitmap_ones (ones);
+ bitmap_clear (prev_sccs);
+ bitmap_ones (ones);
/* Perform the node ordering starting from the SCC with the highest recMII.
For each SCC order the nodes according to their ASAP/ALAP/HEIGHT etc. */
/* Add nodes on paths from previous SCCs to the current SCC. */
find_nodes_on_paths (on_path, g, prev_sccs, scc->nodes);
- sbitmap_a_or_b (tmp, scc->nodes, on_path);
+ bitmap_ior (tmp, scc->nodes, on_path);
/* Add nodes on paths from the current SCC to previous SCCs. */
find_nodes_on_paths (on_path, g, scc->nodes, prev_sccs);
- sbitmap_a_or_b (tmp, tmp, on_path);
+ bitmap_ior (tmp, tmp, on_path);
/* Remove nodes of previous SCCs from current extended SCC. */
- sbitmap_difference (tmp, tmp, prev_sccs);
+ bitmap_and_compl (tmp, tmp, prev_sccs);
pos = order_nodes_in_scc (g, prev_sccs, tmp, node_order, pos);
/* Above call to order_nodes_in_scc updated prev_sccs |= tmp. */
to order_nodes_in_scc handles a single connected component. */
while (pos < g->num_nodes)
{
- sbitmap_difference (tmp, ones, prev_sccs);
+ bitmap_and_compl (tmp, ones, prev_sccs);
pos = order_nodes_in_scc (g, prev_sccs, tmp, node_order, pos);
}
sbitmap_free (prev_sccs);
sbitmap predecessors = sbitmap_alloc (num_nodes);
sbitmap successors = sbitmap_alloc (num_nodes);
- sbitmap_zero (predecessors);
+ bitmap_clear (predecessors);
find_predecessors (predecessors, g, nodes_ordered);
- sbitmap_zero (successors);
+ bitmap_clear (successors);
find_successors (successors, g, nodes_ordered);
- sbitmap_zero (tmp);
- if (sbitmap_a_and_b_cg (tmp, predecessors, scc))
+ bitmap_clear (tmp);
+ if (bitmap_and (tmp, predecessors, scc))
{
- sbitmap_copy (workset, tmp);
+ bitmap_copy (workset, tmp);
dir = BOTTOMUP;
}
- else if (sbitmap_a_and_b_cg (tmp, successors, scc))
+ else if (bitmap_and (tmp, successors, scc))
{
- sbitmap_copy (workset, tmp);
+ bitmap_copy (workset, tmp);
dir = TOPDOWN;
}
else
{
int u;
- sbitmap_zero (workset);
+ bitmap_clear (workset);
if ((u = find_max_asap (g, scc)) >= 0)
SET_BIT (workset, u);
dir = BOTTOMUP;
}
- sbitmap_zero (zero_bitmap);
- while (!sbitmap_equal (workset, zero_bitmap))
+ bitmap_clear (zero_bitmap);
+ while (!bitmap_equal_p (workset, zero_bitmap))
{
int v;
ddg_node_ptr v_node;
if (dir == TOPDOWN)
{
- while (!sbitmap_equal (workset, zero_bitmap))
+ while (!bitmap_equal_p (workset, zero_bitmap))
{
v = find_max_hv_min_mob (g, workset);
v_node = &g->nodes[v];
node_order[pos++] = v;
v_node_succs = NODE_SUCCESSORS (v_node);
- sbitmap_a_and_b (tmp, v_node_succs, scc);
+ bitmap_and (tmp, v_node_succs, scc);
/* Don't consider the already ordered successors again. */
- sbitmap_difference (tmp, tmp, nodes_ordered);
- sbitmap_a_or_b (workset, workset, tmp);
+ bitmap_and_compl (tmp, tmp, nodes_ordered);
+ bitmap_ior (workset, workset, tmp);
RESET_BIT (workset, v);
SET_BIT (nodes_ordered, v);
}
dir = BOTTOMUP;
- sbitmap_zero (predecessors);
+ bitmap_clear (predecessors);
find_predecessors (predecessors, g, nodes_ordered);
- sbitmap_a_and_b (workset, predecessors, scc);
+ bitmap_and (workset, predecessors, scc);
}
else
{
- while (!sbitmap_equal (workset, zero_bitmap))
+ while (!bitmap_equal_p (workset, zero_bitmap))
{
v = find_max_dv_min_mob (g, workset);
v_node = &g->nodes[v];
node_order[pos++] = v;
v_node_preds = NODE_PREDECESSORS (v_node);
- sbitmap_a_and_b (tmp, v_node_preds, scc);
+ bitmap_and (tmp, v_node_preds, scc);
/* Don't consider the already ordered predecessors again. */
- sbitmap_difference (tmp, tmp, nodes_ordered);
- sbitmap_a_or_b (workset, workset, tmp);
+ bitmap_and_compl (tmp, tmp, nodes_ordered);
+ bitmap_ior (workset, workset, tmp);
RESET_BIT (workset, v);
SET_BIT (nodes_ordered, v);
}
dir = TOPDOWN;
- sbitmap_zero (successors);
+ bitmap_clear (successors);
find_successors (successors, g, nodes_ordered);
- sbitmap_a_and_b (workset, successors, scc);
+ bitmap_and (workset, successors, scc);
}
}
sbitmap_free (tmp);
basic_block bb;
blocks = sbitmap_alloc (last_basic_block);
- sbitmap_zero (blocks);
+ bitmap_clear (blocks);
changed = false;
FOR_EACH_BB_REVERSE (bb)
all_vd = XNEWVEC (struct value_data, last_basic_block);
visited = sbitmap_alloc (last_basic_block);
- sbitmap_zero (visited);
+ bitmap_clear (visited);
if (MAY_HAVE_DEBUG_INSNS)
debug_insn_changes_pool
{
sbitmap blocks;
blocks = sbitmap_alloc (last_basic_block);
- sbitmap_ones (blocks);
+ bitmap_ones (blocks);
find_many_sub_basic_blocks (blocks);
sbitmap_free (blocks);
}
/* Copy sbitmap SRC to DST. */
void
-sbitmap_copy (sbitmap dst, const_sbitmap src)
+bitmap_copy (sbitmap dst, const_sbitmap src)
{
memcpy (dst->elms, src->elms, sizeof (SBITMAP_ELT_TYPE) * dst->size);
if (dst->popcount)
/* Copy the first N elements of sbitmap SRC to DST. */
void
-sbitmap_copy_n (sbitmap dst, const_sbitmap src, unsigned int n)
+bitmap_copy_n (sbitmap dst, const_sbitmap src, unsigned int n)
{
memcpy (dst->elms, src->elms, sizeof (SBITMAP_ELT_TYPE) * n);
if (dst->popcount)
/* Determine if a == b. */
int
-sbitmap_equal (const_sbitmap a, const_sbitmap b)
+bitmap_equal_p (const_sbitmap a, const_sbitmap b)
{
return !memcmp (a->elms, b->elms, sizeof (SBITMAP_ELT_TYPE) * a->size);
}
/* Return true if the bitmap is empty. */
bool
-sbitmap_empty_p (const_sbitmap bmap)
+bitmap_empty_p (const_sbitmap bmap)
{
unsigned int i;
for (i=0; i<bmap->size; i++)
START. */
bool
-sbitmap_range_empty_p (const_sbitmap bmap, unsigned int start, unsigned int n)
+bitmap_range_empty_p (const_sbitmap bmap, unsigned int start, unsigned int n)
{
unsigned int i = start / SBITMAP_ELT_BITS;
SBITMAP_ELT_TYPE elm;
/* Zero all elements in a bitmap. */
void
-sbitmap_zero (sbitmap bmap)
+bitmap_clear (sbitmap bmap)
{
memset (bmap->elms, 0, SBITMAP_SIZE_BYTES (bmap));
if (bmap->popcount)
/* Set all elements in a bitmap to ones. */
void
-sbitmap_ones (sbitmap bmap)
+bitmap_ones (sbitmap bmap)
{
unsigned int last_bit;
/* Zero a vector of N_VECS bitmaps. */
void
-sbitmap_vector_zero (sbitmap *bmap, unsigned int n_vecs)
+bitmap_vector_clear (sbitmap *bmap, unsigned int n_vecs)
{
unsigned int i;
for (i = 0; i < n_vecs; i++)
- sbitmap_zero (bmap[i]);
+ bitmap_clear (bmap[i]);
}
/* Set a vector of N_VECS bitmaps to ones. */
void
-sbitmap_vector_ones (sbitmap *bmap, unsigned int n_vecs)
+bitmap_vector_ones (sbitmap *bmap, unsigned int n_vecs)
{
unsigned int i;
for (i = 0; i < n_vecs; i++)
- sbitmap_ones (bmap[i]);
+ bitmap_ones (bmap[i]);
}
/* Set DST to be A union (B - C).
Returns true if any change is made. */
bool
-sbitmap_union_of_diff_cg (sbitmap dst, const_sbitmap a, const_sbitmap b, const_sbitmap c)
+bitmap_ior_and_compl (sbitmap dst, const_sbitmap a, const_sbitmap b, const_sbitmap c)
{
unsigned int i, n = dst->size;
sbitmap_ptr dstp = dst->elms;
return changed != 0;
}
-void
-sbitmap_union_of_diff (sbitmap dst, const_sbitmap a, const_sbitmap b, const_sbitmap c)
-{
- unsigned int i, n = dst->size;
- sbitmap_ptr dstp = dst->elms;
- const_sbitmap_ptr ap = a->elms;
- const_sbitmap_ptr bp = b->elms;
- const_sbitmap_ptr cp = c->elms;
-
- gcc_assert (!dst->popcount && !a->popcount
- && !b->popcount && !c->popcount);
-
- for (i = 0; i < n; i++)
- *dstp++ = *ap++ | (*bp++ & ~*cp++);
-}
-
/* Set bitmap DST to the bitwise negation of the bitmap SRC. */
void
-sbitmap_not (sbitmap dst, const_sbitmap src)
+bitmap_not (sbitmap dst, const_sbitmap src)
{
unsigned int i, n = dst->size;
sbitmap_ptr dstp = dst->elms;
for (i = 0; i < n; i++)
*dstp++ = ~*srcp++;
- /* Zero all bits past n_bits, by ANDing dst with sbitmap_ones. */
+ /* Zero all bits past n_bits, by ANDing dst with bitmap_ones. */
last_bit = src->n_bits % SBITMAP_ELT_BITS;
if (last_bit)
dst->elms[n-1] = dst->elms[n-1]
in A and the bits in B. i.e. dst = a & (~b). */
void
-sbitmap_difference (sbitmap dst, const_sbitmap a, const_sbitmap b)
+bitmap_and_compl (sbitmap dst, const_sbitmap a, const_sbitmap b)
{
unsigned int i, dst_size = dst->size;
unsigned int min_size = dst->size;
Return false otherwise. */
bool
-sbitmap_any_common_bits (const_sbitmap a, const_sbitmap b)
+bitmap_intersect_p (const_sbitmap a, const_sbitmap b)
{
const_sbitmap_ptr ap = a->elms;
const_sbitmap_ptr bp = b->elms;
Return nonzero if any change is made. */
bool
-sbitmap_a_and_b_cg (sbitmap dst, const_sbitmap a, const_sbitmap b)
-{
- unsigned int i, n = dst->size;
- sbitmap_ptr dstp = dst->elms;
- const_sbitmap_ptr ap = a->elms;
- const_sbitmap_ptr bp = b->elms;
- SBITMAP_ELT_TYPE changed = 0;
-
- gcc_assert (!dst->popcount);
-
- for (i = 0; i < n; i++)
- {
- const SBITMAP_ELT_TYPE tmp = *ap++ & *bp++;
- changed |= *dstp ^ tmp;
- *dstp++ = tmp;
- }
-
- return changed != 0;
-}
-
-void
-sbitmap_a_and_b (sbitmap dst, const_sbitmap a, const_sbitmap b)
+bitmap_and (sbitmap dst, const_sbitmap a, const_sbitmap b)
{
unsigned int i, n = dst->size;
sbitmap_ptr dstp = dst->elms;
const_sbitmap_ptr bp = b->elms;
bool has_popcount = dst->popcount != NULL;
unsigned char *popcountp = dst->popcount;
+ bool anychange = false;
for (i = 0; i < n; i++)
{
{
bool wordchanged = (*dstp ^ tmp) != 0;
if (wordchanged)
- *popcountp = do_popcount (tmp);
+ {
+ *popcountp = do_popcount (tmp);
+ anychange = true;
+ }
popcountp++;
}
*dstp++ = tmp;
if (has_popcount)
sbitmap_verify_popcount (dst);
#endif
+ return anychange;
}
/* Set DST to be (A xor B)).
Return nonzero if any change is made. */
bool
-sbitmap_a_xor_b_cg (sbitmap dst, const_sbitmap a, const_sbitmap b)
-{
- unsigned int i, n = dst->size;
- sbitmap_ptr dstp = dst->elms;
- const_sbitmap_ptr ap = a->elms;
- const_sbitmap_ptr bp = b->elms;
- SBITMAP_ELT_TYPE changed = 0;
-
- gcc_assert (!dst->popcount);
-
- for (i = 0; i < n; i++)
- {
- const SBITMAP_ELT_TYPE tmp = *ap++ ^ *bp++;
- changed |= *dstp ^ tmp;
- *dstp++ = tmp;
- }
-
- return changed != 0;
-}
-
-void
-sbitmap_a_xor_b (sbitmap dst, const_sbitmap a, const_sbitmap b)
+bitmap_xor (sbitmap dst, const_sbitmap a, const_sbitmap b)
{
unsigned int i, n = dst->size;
sbitmap_ptr dstp = dst->elms;
const_sbitmap_ptr bp = b->elms;
bool has_popcount = dst->popcount != NULL;
unsigned char *popcountp = dst->popcount;
+ bool anychange = false;
for (i = 0; i < n; i++)
{
{
bool wordchanged = (*dstp ^ tmp) != 0;
if (wordchanged)
- *popcountp = do_popcount (tmp);
+ {
+ *popcountp = do_popcount (tmp);
+ anychange = true;
+ }
popcountp++;
}
*dstp++ = tmp;
if (has_popcount)
sbitmap_verify_popcount (dst);
#endif
+ return anychange;
}
/* Set DST to be (A or B)).
Return nonzero if any change is made. */
bool
-sbitmap_a_or_b_cg (sbitmap dst, const_sbitmap a, const_sbitmap b)
-{
- unsigned int i, n = dst->size;
- sbitmap_ptr dstp = dst->elms;
- const_sbitmap_ptr ap = a->elms;
- const_sbitmap_ptr bp = b->elms;
- SBITMAP_ELT_TYPE changed = 0;
-
- gcc_assert (!dst->popcount);
-
- for (i = 0; i < n; i++)
- {
- const SBITMAP_ELT_TYPE tmp = *ap++ | *bp++;
- changed |= *dstp ^ tmp;
- *dstp++ = tmp;
- }
-
- return changed != 0;
-}
-
-void
-sbitmap_a_or_b (sbitmap dst, const_sbitmap a, const_sbitmap b)
+bitmap_ior (sbitmap dst, const_sbitmap a, const_sbitmap b)
{
unsigned int i, n = dst->size;
sbitmap_ptr dstp = dst->elms;
const_sbitmap_ptr bp = b->elms;
bool has_popcount = dst->popcount != NULL;
unsigned char *popcountp = dst->popcount;
+ bool anychange = false;
for (i = 0; i < n; i++)
{
{
bool wordchanged = (*dstp ^ tmp) != 0;
if (wordchanged)
- *popcountp = do_popcount (tmp);
+ {
+ *popcountp = do_popcount (tmp);
+ anychange = true;
+ }
popcountp++;
}
*dstp++ = tmp;
if (has_popcount)
sbitmap_verify_popcount (dst);
#endif
+ return anychange;
}
/* Return nonzero if A is a subset of B. */
bool
-sbitmap_a_subset_b_p (const_sbitmap a, const_sbitmap b)
+bitmap_subset_p (const_sbitmap a, const_sbitmap b)
{
unsigned int i, n = a->size;
const_sbitmap_ptr ap, bp;
Return nonzero if any change is made. */
bool
-sbitmap_a_or_b_and_c_cg (sbitmap dst, const_sbitmap a, const_sbitmap b, const_sbitmap c)
+bitmap_or_and (sbitmap dst, const_sbitmap a, const_sbitmap b, const_sbitmap c)
{
unsigned int i, n = dst->size;
sbitmap_ptr dstp = dst->elms;
return changed != 0;
}
-void
-sbitmap_a_or_b_and_c (sbitmap dst, const_sbitmap a, const_sbitmap b, const_sbitmap c)
-{
- unsigned int i, n = dst->size;
- sbitmap_ptr dstp = dst->elms;
- const_sbitmap_ptr ap = a->elms;
- const_sbitmap_ptr bp = b->elms;
- const_sbitmap_ptr cp = c->elms;
-
- gcc_assert (!dst->popcount);
-
- for (i = 0; i < n; i++)
- *dstp++ = *ap++ | (*bp++ & *cp++);
-}
-
/* Set DST to be (A and (B or C)).
Return nonzero if any change is made. */
bool
-sbitmap_a_and_b_or_c_cg (sbitmap dst, const_sbitmap a, const_sbitmap b, const_sbitmap c)
+bitmap_and_or (sbitmap dst, const_sbitmap a, const_sbitmap b, const_sbitmap c)
{
unsigned int i, n = dst->size;
sbitmap_ptr dstp = dst->elms;
return changed != 0;
}
-void
-sbitmap_a_and_b_or_c (sbitmap dst, const_sbitmap a, const_sbitmap b, const_sbitmap c)
-{
- unsigned int i, n = dst->size;
- sbitmap_ptr dstp = dst->elms;
- const_sbitmap_ptr ap = a->elms;
- const_sbitmap_ptr bp = b->elms;
- const_sbitmap_ptr cp = c->elms;
-
- for (i = 0; i < n; i++)
- *dstp++ = *ap++ & (*bp++ | *cp++);
-}
-
/* Return number of first bit set in the bitmap, -1 if none. */
int
-sbitmap_first_set_bit (const_sbitmap bmap)
+bitmap_first_set_bit (const_sbitmap bmap)
{
unsigned int n = 0;
sbitmap_iterator sbi;
/* Return number of last bit set in the bitmap, -1 if none. */
int
-sbitmap_last_set_bit (const_sbitmap bmap)
+bitmap_last_set_bit (const_sbitmap bmap)
{
int i;
const SBITMAP_ELT_TYPE *const ptr = bmap->elms;
}
void
-dump_sbitmap (FILE *file, const_sbitmap bmap)
+dump_bitmap (FILE *file, const_sbitmap bmap)
{
unsigned int i, n, j;
unsigned int set_size = bmap->size;
}
void
-dump_sbitmap_file (FILE *file, const_sbitmap bmap)
+dump_bitmap_file (FILE *file, const_sbitmap bmap)
{
unsigned int i, pos;
}
DEBUG_FUNCTION void
-debug_sbitmap (const_sbitmap bmap)
+debug_bitmap (const_sbitmap bmap)
{
- dump_sbitmap_file (stderr, bmap);
+ dump_bitmap_file (stderr, bmap);
}
void
-dump_sbitmap_vector (FILE *file, const char *title, const char *subtitle,
+dump_bitmap_vector (FILE *file, const char *title, const char *subtitle,
sbitmap *bmaps, int n_maps)
{
int i;
for (i = 0; i < n_maps; i++)
{
fprintf (file, "%s %d\n", subtitle, i);
- dump_sbitmap (file, bmaps[i]);
+ dump_bitmap (file, bmaps[i]);
}
fprintf (file, "\n");
Most other operations on this set representation are O(U) where U is
the size of the set universe:
- * clear : sbitmap_zero
+ * clear : bitmap_clear
* cardinality : sbitmap_popcount
- * choose_one : sbitmap_first_set_bit /
- sbitmap_last_set_bit
+ * choose_one : bitmap_first_set_bit /
+ bitmap_last_set_bit
* forall : EXECUTE_IF_SET_IN_SBITMAP
- * set_copy : sbitmap_copy / sbitmap_copy_n
- * set_intersection : sbitmap_a_and_b
- * set_union : sbitmap_a_or_b
- * set_difference : sbitmap_difference
+ * set_copy : bitmap_copy / bitmap_copy_n
+ * set_intersection : bitmap_and
+ * set_union : bitmap_ior
+ * set_difference : bitmap_and_compl
* set_disjuction : (not implemented)
- * set_compare : sbitmap_equal
+ * set_compare : bitmap_equal_p
Some operations on 3 sets that occur frequently in in data flow problems
are also implemented:
- * A | (B & C) : sbitmap_a_or_b_and_c
- * A | (B & ~C) : sbitmap_union_of_diff
- * A & (B | C) : sbitmap_a_and_b_or_c
+ * A | (B & C) : bitmap_or_and
+ * A | (B & ~C) : bitmap_ior_and_compl
+ * A & (B | C) : bitmap_and_or
Most of the set functions have two variants: One that returns non-zero
if members were added or removed from the target set, and one that just
} \
} while (0)
-#define sbitmap_free(MAP) (free((MAP)->popcount), free((MAP)))
-#define sbitmap_vector_free(VEC) free(VEC)
+inline void sbitmap_free (sbitmap map)
+{
+ free (map->popcount);
+ free (map);
+}
-extern void dump_sbitmap (FILE *, const_sbitmap);
-extern void dump_sbitmap_file (FILE *, const_sbitmap);
-extern void dump_sbitmap_vector (FILE *, const char *, const char *, sbitmap *,
+inline void sbitmap_vector_free (sbitmap * vec)
+{
+ free (vec);
+}
+
+extern void dump_bitmap (FILE *, const_sbitmap);
+extern void dump_bitmap_file (FILE *, const_sbitmap);
+extern void dump_bitmap_vector (FILE *, const char *, const char *, sbitmap *,
int);
extern sbitmap sbitmap_alloc (unsigned int);
extern sbitmap sbitmap_alloc_with_popcount (unsigned int);
extern sbitmap *sbitmap_vector_alloc (unsigned int, unsigned int);
extern sbitmap sbitmap_resize (sbitmap, unsigned int, int);
-extern void sbitmap_copy (sbitmap, const_sbitmap);
-extern void sbitmap_copy_n (sbitmap, const_sbitmap, unsigned int);
-extern int sbitmap_equal (const_sbitmap, const_sbitmap);
-extern bool sbitmap_empty_p (const_sbitmap);
-extern bool sbitmap_range_empty_p (const_sbitmap, unsigned int, unsigned int);
-extern void sbitmap_zero (sbitmap);
-extern void sbitmap_ones (sbitmap);
-extern void sbitmap_vector_zero (sbitmap *, unsigned int);
-extern void sbitmap_vector_ones (sbitmap *, unsigned int);
-
-extern void sbitmap_union_of_diff (sbitmap, const_sbitmap,
- const_sbitmap, const_sbitmap);
-extern bool sbitmap_union_of_diff_cg (sbitmap, const_sbitmap,
+extern void bitmap_copy (sbitmap, const_sbitmap);
+extern void bitmap_copy_n (sbitmap, const_sbitmap, unsigned int);
+extern int bitmap_equal_p (const_sbitmap, const_sbitmap);
+extern bool bitmap_empty_p (const_sbitmap);
+extern bool bitmap_range_empty_p (const_sbitmap, unsigned int, unsigned int);
+extern void bitmap_clear (sbitmap);
+extern void bitmap_ones (sbitmap);
+extern void bitmap_vector_clear (sbitmap *, unsigned int);
+extern void bitmap_vector_ones (sbitmap *, unsigned int);
+
+extern bool bitmap_ior_and_compl (sbitmap, const_sbitmap,
const_sbitmap, const_sbitmap);
-extern void sbitmap_difference (sbitmap, const_sbitmap, const_sbitmap);
-extern void sbitmap_not (sbitmap, const_sbitmap);
-extern void sbitmap_a_or_b_and_c (sbitmap, const_sbitmap,
- const_sbitmap, const_sbitmap);
-extern bool sbitmap_a_or_b_and_c_cg (sbitmap, const_sbitmap,
+extern void bitmap_and_compl (sbitmap, const_sbitmap, const_sbitmap);
+extern void bitmap_not (sbitmap, const_sbitmap);
+extern bool bitmap_or_and (sbitmap, const_sbitmap,
const_sbitmap, const_sbitmap);
-extern void sbitmap_a_and_b_or_c (sbitmap, const_sbitmap,
- const_sbitmap, const_sbitmap);
-extern bool sbitmap_a_and_b_or_c_cg (sbitmap, const_sbitmap,
+extern bool bitmap_and_or (sbitmap, const_sbitmap,
const_sbitmap, const_sbitmap);
-extern bool sbitmap_any_common_bits (const_sbitmap, const_sbitmap);
-extern void sbitmap_a_and_b (sbitmap, const_sbitmap, const_sbitmap);
-extern bool sbitmap_a_and_b_cg (sbitmap, const_sbitmap, const_sbitmap);
-extern void sbitmap_a_or_b (sbitmap, const_sbitmap, const_sbitmap);
-extern bool sbitmap_a_or_b_cg (sbitmap, const_sbitmap, const_sbitmap);
-extern void sbitmap_a_xor_b (sbitmap, const_sbitmap, const_sbitmap);
-extern bool sbitmap_a_xor_b_cg (sbitmap, const_sbitmap, const_sbitmap);
-extern bool sbitmap_a_subset_b_p (const_sbitmap, const_sbitmap);
-
-extern int sbitmap_first_set_bit (const_sbitmap);
-extern int sbitmap_last_set_bit (const_sbitmap);
-
-extern void debug_sbitmap (const_sbitmap);
+extern bool bitmap_intersect_p (const_sbitmap, const_sbitmap);
+extern bool bitmap_and (sbitmap, const_sbitmap, const_sbitmap);
+extern bool bitmap_ior (sbitmap, const_sbitmap, const_sbitmap);
+extern bool bitmap_xor (sbitmap, const_sbitmap, const_sbitmap);
+extern bool bitmap_subset_p (const_sbitmap, const_sbitmap);
+
+extern int bitmap_first_set_bit (const_sbitmap);
+extern int bitmap_last_set_bit (const_sbitmap);
+
+extern void debug_bitmap (const_sbitmap);
extern sbitmap sbitmap_realloc (sbitmap, unsigned int);
extern unsigned long sbitmap_popcount (const_sbitmap, unsigned long);
extern void sbitmap_verify_popcount (const_sbitmap);
stack = XNEWVEC (edge_iterator, n_edges);
inner = sbitmap_alloc (last_basic_block);
- sbitmap_ones (inner);
+ bitmap_ones (inner);
header = sbitmap_alloc (last_basic_block);
- sbitmap_zero (header);
+ bitmap_clear (header);
in_queue = sbitmap_alloc (last_basic_block);
- sbitmap_zero (in_queue);
+ bitmap_clear (in_queue);
in_stack = sbitmap_alloc (last_basic_block);
- sbitmap_zero (in_stack);
+ bitmap_clear (in_stack);
for (i = 0; i < last_basic_block; i++)
max_hdr[i] = -1;
{
degree1 = XNEWVEC (int, last_basic_block);
extended_rgn_header = sbitmap_alloc (last_basic_block);
- sbitmap_zero (extended_rgn_header);
+ bitmap_clear (extended_rgn_header);
}
/* Find blocks which are inner loop headers. We still have non-reducible
{
free (degree1);
- sbitmap_a_or_b (header, header, extended_rgn_header);
+ bitmap_ior (header, header, extended_rgn_header);
sbitmap_free (extended_rgn_header);
extend_rgns (degree, &idx, header, max_hdr);
prob[bb] = 0;
/* Initialize dom[bb] to '111..1'. */
- sbitmap_ones (dom[bb]);
+ bitmap_ones (dom[bb]);
FOR_EACH_EDGE (in_edge, in_ei, BASIC_BLOCK (BB_TO_BLOCK (bb))->preds)
{
continue;
pred_bb = BLOCK_TO_BB (in_edge->src->index);
- sbitmap_a_and_b (dom[bb], dom[bb], dom[pred_bb]);
- sbitmap_a_or_b (ancestor_edges[bb],
+ bitmap_and (dom[bb], dom[bb], dom[pred_bb]);
+ bitmap_ior (ancestor_edges[bb],
ancestor_edges[bb], ancestor_edges[pred_bb]);
SET_BIT (ancestor_edges[bb], EDGE_TO_BIT (in_edge));
- sbitmap_a_or_b (pot_split[bb], pot_split[bb], pot_split[pred_bb]);
+ bitmap_ior (pot_split[bb], pot_split[bb], pot_split[pred_bb]);
FOR_EACH_EDGE (out_edge, out_ei, in_edge->src->succs)
SET_BIT (pot_split[bb], EDGE_TO_BIT (out_edge));
}
SET_BIT (dom[bb], bb);
- sbitmap_difference (pot_split[bb], pot_split[bb], ancestor_edges[bb]);
+ bitmap_and_compl (pot_split[bb], pot_split[bb], ancestor_edges[bb]);
if (sched_verbose >= 2)
fprintf (sched_dump, ";; bb_prob(%d, %d) = %3d\n", bb, BB_TO_BLOCK (bb),
split_edges (int bb_src, int bb_trg, edgelst *bl)
{
sbitmap src = sbitmap_alloc (SBITMAP_SIZE (pot_split[bb_src]));
- sbitmap_copy (src, pot_split[bb_src]);
+ bitmap_copy (src, pot_split[bb_src]);
- sbitmap_difference (src, src, pot_split[bb_trg]);
+ bitmap_and_compl (src, src, pot_split[bb_trg]);
extract_edgelst (src, bl);
sbitmap_free (src);
}
overrunning the end of the bblst_table. */
update_idx = 0;
- sbitmap_zero (visited);
+ bitmap_clear (visited);
for (j = 0; j < el.nr_members; j++)
{
block = el.first_member[j]->src;
/* Initialize bitmap used in add_branch_dependences. */
insn_referenced = sbitmap_alloc (sched_max_luid);
- sbitmap_zero (insn_referenced);
+ bitmap_clear (insn_referenced);
/* Compute backward dependencies. */
for (bb = 0; bb < current_nr_blocks; bb++)
prob = XNEWVEC (int, current_nr_blocks);
dom = sbitmap_vector_alloc (current_nr_blocks, current_nr_blocks);
- sbitmap_vector_zero (dom, current_nr_blocks);
+ bitmap_vector_clear (dom, current_nr_blocks);
/* Use ->aux to implement EDGE_TO_BIT mapping. */
rgn_nr_edges = 0;
/* Split edges. */
pot_split = sbitmap_vector_alloc (current_nr_blocks, rgn_nr_edges);
- sbitmap_vector_zero (pot_split, current_nr_blocks);
+ bitmap_vector_clear (pot_split, current_nr_blocks);
ancestor_edges = sbitmap_vector_alloc (current_nr_blocks, rgn_nr_edges);
- sbitmap_vector_zero (ancestor_edges, current_nr_blocks);
+ bitmap_vector_clear (ancestor_edges, current_nr_blocks);
/* Compute probabilities, dominators, split_edges. */
for (bb = 0; bb < current_nr_blocks; bb++)
current_loop_nest = NULL;
bbs_in_loop_rgns = sbitmap_alloc (last_basic_block);
- sbitmap_zero (bbs_in_loop_rgns);
+ bitmap_clear (bbs_in_loop_rgns);
recompute_rev_top_order ();
}
if (blocks_to_reschedule)
{
- sbitmap_ones (visited_bbs);
+ bitmap_ones (visited_bbs);
EXECUTE_IF_SET_IN_BITMAP (blocks_to_reschedule, 0, bbi, bi)
{
gcc_assert (BLOCK_TO_BB (bbi) < current_nr_blocks);
}
else
{
- sbitmap_zero (visited_bbs);
+ bitmap_clear (visited_bbs);
from = EBB_FIRST_BB (0);
}
sp = 0;
ei = ei_start (bb->succs);
- sbitmap_zero (visited);
+ bitmap_clear (visited);
act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
while (1)
/* Build the gen_vector. This is any store in the table which is not killed
by aliasing later in its block. */
st_avloc = sbitmap_vector_alloc (last_basic_block, num_stores);
- sbitmap_vector_zero (st_avloc, last_basic_block);
+ bitmap_vector_clear (st_avloc, last_basic_block);
st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
- sbitmap_vector_zero (st_antloc, last_basic_block);
+ bitmap_vector_clear (st_antloc, last_basic_block);
for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
{
}
st_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
- sbitmap_vector_zero (st_kill, last_basic_block);
+ bitmap_vector_clear (st_kill, last_basic_block);
st_transp = sbitmap_vector_alloc (last_basic_block, num_stores);
- sbitmap_vector_zero (st_transp, last_basic_block);
+ bitmap_vector_clear (st_transp, last_basic_block);
regs_set_in_block = XNEWVEC (int, max_gcse_regno);
FOR_EACH_BB (bb)
if (dump_file)
{
- dump_sbitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
- dump_sbitmap_vector (dump_file, "st_kill", "", st_kill, last_basic_block);
- dump_sbitmap_vector (dump_file, "st_transp", "", st_transp, last_basic_block);
- dump_sbitmap_vector (dump_file, "st_avloc", "", st_avloc, last_basic_block);
+ dump_bitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
+ dump_bitmap_vector (dump_file, "st_kill", "", st_kill, last_basic_block);
+ dump_bitmap_vector (dump_file, "st_transp", "", st_transp, last_basic_block);
+ dump_bitmap_vector (dump_file, "st_avloc", "", st_avloc, last_basic_block);
}
}
-/* Inspired from sbitmap_a_or_b_and_c_cg function in sbitmap.c. */
+/* Inspired from bitmap_or_and function in sbitmap.c. */
/* { dg-do run } */
/* { dg-options "-O2 -fmodulo-sched -fmodulo-sched-allow-regmoves -fdump-rtl-sms" } */
/* { dg-options "-O2 -fmodulo-sched -fmodulo-sched-allow-regmoves -fdump-rtl-sms --param sms-min-sc=1" { target powerpc*-*-* } } */
/* Create an oversized sbitmap to reduce the chance that we need to
resize it. */
bb_seen = sbitmap_alloc (last_basic_block * 2);
- sbitmap_zero (bb_seen);
+ bitmap_clear (bb_seen);
initialize_original_copy_tables ();
if (profile_info && flag_branch_probabilities)
r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
lp_reachable
= sbitmap_alloc (VEC_length (eh_landing_pad, cfun->eh->lp_array));
- sbitmap_zero (r_reachable);
- sbitmap_zero (lp_reachable);
+ bitmap_clear (r_reachable);
+ bitmap_clear (lp_reachable);
FOR_EACH_BB (bb)
{
fprintf (dump_file, "Before removal of unreachable regions:\n");
dump_eh_tree (dump_file, cfun);
fprintf (dump_file, "Reachable regions: ");
- dump_sbitmap_file (dump_file, r_reachable);
+ dump_bitmap_file (dump_file, r_reachable);
fprintf (dump_file, "Reachable landing pads: ");
- dump_sbitmap_file (dump_file, lp_reachable);
+ dump_bitmap_file (dump_file, lp_reachable);
}
for (r_nr = 1;
basic_block bb;
r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
- sbitmap_zero (r_reachable);
+ bitmap_clear (r_reachable);
FOR_EACH_BB (bb)
{
mark_def_sites will add to this set those blocks that the renamer
should process. */
interesting_blocks = sbitmap_alloc (last_basic_block);
- sbitmap_zero (interesting_blocks);
+ bitmap_clear (interesting_blocks);
/* Initialize dominance frontier. */
dfs = XNEWVEC (bitmap_head, last_basic_block);
if (!need_ssa_update_p (cfun))
return;
- if (new_ssa_names && sbitmap_first_set_bit (new_ssa_names) >= 0)
+ if (new_ssa_names && bitmap_first_set_bit (new_ssa_names) >= 0)
{
sbitmap_iterator sbi;
add_new_name_mapping are typically done after creating new SSA
names, so we'll need to reallocate these arrays. */
old_ssa_names = sbitmap_alloc (num_ssa_names + NAME_SETS_GROWTH_FACTOR);
- sbitmap_zero (old_ssa_names);
+ bitmap_clear (old_ssa_names);
new_ssa_names = sbitmap_alloc (num_ssa_names + NAME_SETS_GROWTH_FACTOR);
- sbitmap_zero (new_ssa_names);
+ bitmap_clear (new_ssa_names);
bitmap_obstack_initialize (&update_ssa_obstack);
/* If we only need to update virtuals, remove all the mappings for
real names before proceeding. The caller is responsible for
having dealt with the name mappings before calling update_ssa. */
- sbitmap_zero (old_ssa_names);
- sbitmap_zero (new_ssa_names);
+ bitmap_clear (old_ssa_names);
+ bitmap_clear (new_ssa_names);
}
gcc_assert (update_ssa_initialized_fn == cfun);
/* If there are names defined in the replacement table, prepare
definition and use sites for all the names in NEW_SSA_NAMES and
OLD_SSA_NAMES. */
- if (sbitmap_first_set_bit (new_ssa_names) >= 0)
+ if (bitmap_first_set_bit (new_ssa_names) >= 0)
{
prepare_names_to_update (insert_phi_p);
/* If all the names in NEW_SSA_NAMES had been marked for
removal, and there are no symbols to rename, then there's
nothing else to do. */
- if (sbitmap_first_set_bit (new_ssa_names) < 0
+ if (bitmap_first_set_bit (new_ssa_names) < 0
&& !cfun->gimple_df->ssa_renaming_needed)
goto done;
}
bitmap_initialize (&dfs[bb->index], &bitmap_default_obstack);
compute_dominance_frontiers (dfs);
- if (sbitmap_first_set_bit (old_ssa_names) >= 0)
+ if (bitmap_first_set_bit (old_ssa_names) >= 0)
{
sbitmap_iterator sbi;
gain any new members). Copy OLD_SSA_NAMES to a temporary
for traversal. */
sbitmap tmp = sbitmap_alloc (SBITMAP_SIZE (old_ssa_names));
- sbitmap_copy (tmp, old_ssa_names);
+ bitmap_copy (tmp, old_ssa_names);
EXECUTE_IF_SET_IN_SBITMAP (tmp, 0, i, sbi)
insert_updated_phi_nodes_for (ssa_name (i), dfs, blocks_to_update,
update_flags);
/* Now start the renaming process at START_BB. */
interesting_blocks = sbitmap_alloc (last_basic_block);
- sbitmap_zero (interesting_blocks);
+ bitmap_clear (interesting_blocks);
EXECUTE_IF_SET_IN_BITMAP (blocks_to_update, 0, i, bi)
SET_BIT (interesting_blocks, i);
{
int part;
- sbitmap_zero (g->visited);
+ bitmap_clear (g->visited);
VEC_truncate (int, g->stack, 0);
FOR_EACH_VEC_ELT (int, g->nodes, x, part)
elim_forward (g, part);
}
- sbitmap_zero (g->visited);
+ bitmap_clear (g->visited);
while (VEC_length (int, g->stack) > 0)
{
x = VEC_pop (int, g->stack);
control_dependence_map[i] = BITMAP_ALLOC (NULL);
last_stmt_necessary = sbitmap_alloc (last_basic_block);
- sbitmap_zero (last_stmt_necessary);
+ bitmap_clear (last_stmt_necessary);
bb_contains_live_stmts = sbitmap_alloc (last_basic_block);
- sbitmap_zero (bb_contains_live_stmts);
+ bitmap_clear (bb_contains_live_stmts);
}
processed = sbitmap_alloc (num_ssa_names + 1);
- sbitmap_zero (processed);
+ bitmap_clear (processed);
worklist = VEC_alloc (gimple, heap, 64);
cfg_altered = false;
timevar_pop (TV_CONTROL_DEPENDENCES);
visited_control_parents = sbitmap_alloc (last_basic_block);
- sbitmap_zero (visited_control_parents);
+ bitmap_clear (visited_control_parents);
mark_dfs_back_edges ();
}
}
gimple_purge_all_dead_eh_edges (need_eh_cleanup);
- bitmap_zero (need_eh_cleanup);
+ bitmap_clear (need_eh_cleanup);
}
statistics_counter_event (cfun, "Redundant expressions eliminated",
sbitmap visited = sbitmap_alloc (last_basic_block + 1);
bitmap tmp = BITMAP_ALLOC (&liveness_bitmap_obstack);
- sbitmap_zero (visited);
+ bitmap_clear (visited);
/* Visit all the blocks in reverse order and propagate live on entry values
into the predecessors blocks. */
{
gcc_checking_assert (&live->livein[p1] && &live->livein[p2]);
bitmap_ior_into (&live->livein[p1], &live->livein[p2]);
- bitmap_zero (&live->livein[p2]);
+ bitmap_clear (&live->livein[p2]);
}
bitmap_obstack_initialize (&lim_bitmap_obstack);
- sbitmap_zero (contains_call);
+ bitmap_clear (contains_call);
FOR_EACH_BB (bb)
{
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
initialize_original_copy_tables ();
wont_exit = sbitmap_alloc (n_unroll + 1);
- sbitmap_ones (wont_exit);
+ bitmap_ones (wont_exit);
RESET_BIT (wont_exit, 0);
if (!gimple_duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
HOST_WIDE_INT i;
valid_mult = sbitmap_alloc (2 * MAX_RATIO + 1);
- sbitmap_zero (valid_mult);
+ bitmap_clear (valid_mult);
addr = gen_rtx_fmt_ee (MULT, address_mode, reg1, NULL_RTX);
for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
{
/* Unroll the loop and remove the exits in all iterations except for the
last one. */
wont_exit = sbitmap_alloc (factor);
- sbitmap_ones (wont_exit);
+ bitmap_ones (wont_exit);
RESET_BIT (wont_exit, factor - 1);
ok = gimple_duplicate_loop_to_header_edge
#define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index))
#define VISITED_P(BB) (TEST_BIT (visited, (BB)->index))
- sbitmap_zero (visited);
+ bitmap_clear (visited);
MARK_VISITED (ENTRY_BLOCK_PTR);
FOR_EACH_BB (x)
/* If any predecessor edges are abnormal, we punt, so antic_in is empty.
We pre-build the map of blocks with incoming abnormal edges here. */
has_abnormal_preds = sbitmap_alloc (last_basic_block);
- sbitmap_zero (has_abnormal_preds);
+ bitmap_clear (has_abnormal_preds);
FOR_ALL_BB (block)
{
BB_VISITED (EXIT_BLOCK_PTR) = 1;
changed_blocks = sbitmap_alloc (last_basic_block + 1);
- sbitmap_ones (changed_blocks);
+ bitmap_ones (changed_blocks);
while (changed)
{
if (dump_file && (dump_flags & TDF_DETAILS))
if (do_partial_partial)
{
- sbitmap_ones (changed_blocks);
+ bitmap_ones (changed_blocks);
mark_dfs_back_edges ();
num_iterations = 0;
changed = true;
varying_ssa_edges = VEC_alloc (gimple, gc, 20);
executable_blocks = sbitmap_alloc (last_basic_block);
- sbitmap_zero (executable_blocks);
+ bitmap_clear (executable_blocks);
bb_in_list = sbitmap_alloc (last_basic_block);
- sbitmap_zero (bb_in_list);
+ bitmap_clear (bb_in_list);
if (dump_file && (dump_flags & TDF_DETAILS))
dump_immediate_uses (dump_file);
/* Build a list of candidates to process. */
candidates = sbitmap_alloc (length);
- sbitmap_zero (candidates);
+ bitmap_clear (candidates);
nr_candidates = 0;
FOR_EACH_VEC_ELT (operand_entry_t, *ops, i, oe1)
{
fprintf (dump_file, "searching for un-distribute opportunities ");
print_generic_expr (dump_file,
VEC_index (operand_entry_t, *ops,
- sbitmap_first_set_bit (candidates))->op, 0);
+ bitmap_first_set_bit (candidates))->op, 0);
fprintf (dump_file, " %d\n", nr_candidates);
}
/* Now collect the operands in the outer chain that contain
the common operand in their inner chain. */
- sbitmap_zero (candidates2);
+ bitmap_clear (candidates2);
nr_candidates2 = 0;
EXECUTE_IF_SET_IN_SBITMAP (candidates, 0, i, sbi0)
{
{
operand_entry_t oe1, oe2;
gimple prod;
- int first = sbitmap_first_set_bit (candidates2);
+ int first = bitmap_first_set_bit (candidates2);
/* Build the new addition chain. */
oe1 = VEC_index (operand_entry_t, *ops, first);
graph->eq_rep = XNEWVEC (int, graph->size);
graph->direct_nodes = sbitmap_alloc (graph->size);
graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
- sbitmap_zero (graph->direct_nodes);
+ bitmap_clear (graph->direct_nodes);
for (j = 0; j < FIRST_REF_NODE; j++)
{
size_t size = graph->size;
struct topo_info *ti = XNEW (struct topo_info);
ti->visited = sbitmap_alloc (size);
- sbitmap_zero (ti->visited);
+ bitmap_clear (ti->visited);
ti->topo_order = VEC_alloc (unsigned, heap, 1);
return ti;
}
si->current_index = 0;
si->visited = sbitmap_alloc (size);
- sbitmap_zero (si->visited);
+ bitmap_clear (si->visited);
si->deleted = sbitmap_alloc (size);
- sbitmap_zero (si->deleted);
+ bitmap_clear (si->deleted);
si->node_mapping = XNEWVEC (unsigned int, size);
si->dfs = XCNEWVEC (unsigned int, size);
if (!TEST_BIT (si->visited, si->node_mapping[i]))
condense_visit (graph, si, si->node_mapping[i]);
- sbitmap_zero (si->visited);
+ bitmap_clear (si->visited);
/* Actually the label the nodes for pointer equivalences */
for (i = 0; i < FIRST_REF_NODE; i++)
if (!TEST_BIT (si->visited, si->node_mapping[i]))
return false;
visited = sbitmap_alloc (last_basic_block);
- sbitmap_zero (visited);
+ bitmap_clear (visited);
ret = true;
FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
/* Check that the loads in the first sequence are different and there
are no gaps between them. */
load_index = sbitmap_alloc (group_size);
- sbitmap_zero (load_index);
+ bitmap_clear (load_index);
for (k = 0; k < group_size; k++)
{
first_group_load_index = VEC_index (int, load_permutation, k);
supported = true;
load_index = sbitmap_alloc (group_size);
- sbitmap_zero (load_index);
+ bitmap_clear (load_index);
for (j = 0; j < group_size; j++)
{
for (i = j * group_size, k = 0;
if (!live[rpo[i]])
{
live[rpo[i]] = sbitmap_alloc (num_ssa_names);
- sbitmap_zero (live[rpo[i]]);
+ bitmap_clear (live[rpo[i]]);
}
/* Process BB and update the live information with uses in
need_asserts |= find_assert_locations_1 (bb, live[rpo[i]]);
/* Merge liveness into the predecessor blocks and free it. */
- if (!sbitmap_empty_p (live[rpo[i]]))
+ if (!bitmap_empty_p (live[rpo[i]]))
{
int pred_rpo = i;
FOR_EACH_EDGE (e, ei, bb->preds)
if (!live[pred])
{
live[pred] = sbitmap_alloc (num_ssa_names);
- sbitmap_zero (live[pred]);
+ bitmap_clear (live[pred]);
}
- sbitmap_a_or_b (live[pred], live[pred], live[rpo[i]]);
+ bitmap_ior (live[pred], live[pred], live[rpo[i]]);
if (bb_rpo[pred] < pred_rpo)
pred_rpo = bb_rpo[pred];
visited = sbitmap_alloc (last_basic_block);
in_worklist = sbitmap_alloc (last_basic_block);
in_pending = sbitmap_alloc (last_basic_block);
- sbitmap_zero (in_worklist);
+ bitmap_clear (in_worklist);
FOR_EACH_BB (bb)
fibheap_insert (pending, bb_order[bb->index], bb);
- sbitmap_ones (in_pending);
+ bitmap_ones (in_pending);
while (success && !fibheap_empty (pending))
{
in_pending = in_worklist;
in_worklist = sbitmap_swap;
- sbitmap_zero (visited);
+ bitmap_clear (visited);
while (!fibheap_empty (worklist))
{