* bitmap.c (bitmap_print): Make bitno unsigned.
* bt-load.c (clear_btr_from_live_range,
btr_def_live_range): Likewise.
* caller-save.c (save_call_clobbered_regs): Likewise.
* cfganal.c (compute_dominance_frontiers_1): Likewise.
* cfgcleanup.c (thread_jump): Likewise.
* cfgrtl.c (safe_insert_insn_on_edge): Likewise.
* conflict.c (conflict_graph_compute): Likewise.
* ddg.c (add_deps_for_use): Likewise.
* df.c (df_refs_update): Likewise.
* except.c (remove_eh_handler): Likewise.
* flow.c (verify_local_live_at_start, update_life_info,
initialize_uninitialized_subregs, propagate_one_insn,
free_propagate_block_info, propagate_block, find_use_as_address,
reg_set_to_hard_reg_set): Likewise.
* gcse.c (clear_modify_mem_tables): Likewise.
* global.c (global_conflicts, build_insn_chain): Likewise.
* ifcvt.c (dead_or_predicable): Likewise.
* local-alloc.c (update_equiv_regs): Likewise.
* loop.c (load_mems): Likewise.
* ra-build.c (livethrough_conflicts_bb, conflicts_between_webs):
Likewise.
* ra-rewrite.c (reloads_to_loads, rewrite_program2, actual_spill):
Likewise.
* reload1.c (order_regs_for_reload, finish_spills): Likewise.
* sched-deps.c (sched_analyze_insn, free_deps): Likewise.
* sched-rgn.c (propagate_deps
* tree-cfg.c (tree_purge_all_dead_eh_edges): Likewise.
* tree-dfa.c (dump_dfa_stats
tree-into-ssa.c (compute_global_livein, insert_phi_nodes,
insert_phi_nodes_for, debug_def_blocks_r, invalidate_name_tags):
Likewise.
* tree-outof-ssa.c (coalesce_ssa_name, coalesce_vars,
free_temp_expr_table, find_replaceable_exprs): Likewise.
* tree-sra.c (scan_function, scalarize_parms): Likewise.
* tree-ssa-alias.c (init_alias_info,
compute_points_to_and_addr_escape,
compute_flow_sensitive_aliasing, maybe_create_global_var): Likewise.
* tree-ssa-dce.c (mark_control_dependent_edges_necessary): Likewise.
* tree-ssa-live.c (new_tree_live_info, live_worklist,
calculate_live_on_entry, calculate_live_on_exit, compare_pairs,
sort_coalesce_list, build_tree_conflict_graph, dump_live_info
tree-ssa-loop-manip.c (add_exit_phis_var): Likewise.
tree-ssa-operands.c (get_asm_expr_operands, add_call_clobber_ops,
add_call_read_ops): Likewise.
* tree-ssa-pre.c (bitmap_print_value_set, insert_aux): Likewise.
* tree-ssa-live.h (num_var_partitions): Return unsigned.
From-SVN: r90053
+2004-11-04 Nathan Sidwell <nathan@codesourcery.com>
+
+ * bitmap.c (bitmap_print): Make bitno unsigned.
+ * bt-load.c (clear_btr_from_live_range,
+ btr_def_live_range): Likewise.
+ * caller-save.c (save_call_clobbered_regs): Likewise.
+ * cfganal.c (compute_dominance_frontiers_1): Likewise.
+ * cfgcleanup.c (thread_jump): Likewise.
+ * cfgrtl.c (safe_insert_insn_on_edge): Likewise.
+ * conflict.c (conflict_graph_compute): Likewise.
+ * ddg.c (add_deps_for_use): Likewise.
+ * df.c (df_refs_update): Likewise.
+ * except.c (remove_eh_handler): Likewise.
+ * flow.c (verify_local_live_at_start, update_life_info,
+ initialize_uninitialized_subregs, propagate_one_insn,
+ free_propagate_block_info, propagate_block, find_use_as_address,
+ reg_set_to_hard_reg_set): Likewise.
+ * gcse.c (clear_modify_mem_tables): Likewise.
+ * global.c (global_conflicts, build_insn_chain): Likewise.
+ * ifcvt.c (dead_or_predicable): Likewise.
+ * local-alloc.c (update_equiv_regs): Likewise.
+ * loop.c (load_mems): Likewise.
+ * ra-build.c (livethrough_conflicts_bb, conflicts_between_webs):
+ Likewise.
+ * ra-rewrite.c (reloads_to_loads, rewrite_program2, actual_spill):
+ Likewise.
+ * reload1.c (order_regs_for_reload, finish_spills): Likewise.
+ * sched-deps.c (sched_analyze_insn, free_deps): Likewise.
+ * sched-rgn.c (propagate_deps
+ * tree-cfg.c (tree_purge_all_dead_eh_edges): Likewise.
+ * tree-dfa.c (dump_dfa_stats
+ tree-into-ssa.c (compute_global_livein, insert_phi_nodes,
+ insert_phi_nodes_for, debug_def_blocks_r, invalidate_name_tags):
+ Likewise.
+ * tree-outof-ssa.c (coalesce_ssa_name, coalesce_vars,
+ free_temp_expr_table, find_replaceable_exprs): Likewise.
+ * tree-sra.c (scan_function, scalarize_parms): Likewise.
+ * tree-ssa-alias.c (init_alias_info,
+ compute_points_to_and_addr_escape,
+ compute_flow_sensitive_aliasing, maybe_create_global_var): Likewise.
+ * tree-ssa-dce.c (mark_control_dependent_edges_necessary): Likewise.
+ * tree-ssa-live.c (new_tree_live_info, live_worklist,
+ calculate_live_on_entry, calculate_live_on_exit, compare_pairs,
+ sort_coalesce_list, build_tree_conflict_graph, dump_live_info
+ tree-ssa-loop-manip.c (add_exit_phis_var): Likewise.
+ tree-ssa-operands.c (get_asm_expr_operands, add_call_clobber_ops,
+ add_call_read_ops): Likewise.
+ * tree-ssa-pre.c (bitmap_print_value_set, insert_aux): Likewise.
+ * tree-ssa-live.h (num_var_partitions): Return unsigned.
+
2004-11-03 Dorit Naishlos <dorit@il.ibm.com>
PR tree-optimization/18009
bitmap_print (FILE *file, bitmap head, const char *prefix, const char *suffix)
{
const char *comma = "";
- int i;
+ unsigned i;
bitmap_iterator bi;
fputs (prefix, file);
static void
clear_btr_from_live_range (btr_def def)
{
- int bb;
+ unsigned bb;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (def->live_range, 0, bb, bi)
static void
add_btr_to_live_range (btr_def def)
{
- int bb;
+ unsigned bb;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (def->live_range, 0, bb, bi)
def->live_range = BITMAP_XMALLOC ();
bitmap_set_bit (def->live_range, def->bb->index);
- if (flag_btr_bb_exclusive)
- COPY_HARD_REG_SET (*btrs_live_in_range, btrs_live[def->bb->index]);
- else
- COPY_HARD_REG_SET (*btrs_live_in_range,
- btrs_live_at_end[def->bb->index]);
+ COPY_HARD_REG_SET (*btrs_live_in_range,
+ (flag_btr_bb_exclusive
+ ? btrs_live : btrs_live_at_end)[def->bb->index]);
for (user = def->uses; user != NULL; user = user->next)
augment_live_range (def->live_range, btrs_live_in_range,
the set of target registers live over it, because migration
of other PT instructions may have affected it.
*/
- int bb;
- int def_bb = def->bb->index;
+ unsigned bb;
+ unsigned def_bb = flag_btr_bb_exclusive ? -1 : def->bb->index;
bitmap_iterator bi;
CLEAR_HARD_REG_SET (*btrs_live_in_range);
- if (flag_btr_bb_exclusive)
+ EXECUTE_IF_SET_IN_BITMAP (def->live_range, 0, bb, bi)
{
- EXECUTE_IF_SET_IN_BITMAP (def->live_range, 0, bb, bi)
- {
- IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[bb]);
- }
- }
- else
- {
- EXECUTE_IF_SET_IN_BITMAP (def->live_range, 0, bb, bi)
- {
- IOR_HARD_REG_SET (*btrs_live_in_range,
- (def_bb == bb
- ? btrs_live_at_end : btrs_live) [bb]);
- }
+ IOR_HARD_REG_SET (*btrs_live_in_range,
+ (def_bb == bb
+ ? btrs_live_at_end : btrs_live) [bb]);
}
}
if (!def->other_btr_uses_before_def &&
if (code == CALL_INSN && ! find_reg_note (insn, REG_NORETURN, NULL))
{
- int regno;
+ unsigned regno;
HARD_REG_SET hard_regs_to_save;
reg_set_iterator rsi;
c;
c = next_dom_son (CDI_DOMINATORS, c))
{
- int x;
+ unsigned x;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (frontiers[c->index], 0, x, bi)
rtx set1, set2, cond1, cond2, insn;
enum rtx_code code1, code2, reversed_code2;
bool reverse1 = false;
- int i;
+ unsigned i;
regset nonequal;
bool failed = false;
reg_set_iterator rsi;
cselib_init (false);
/* First process all values computed in the source basic block. */
- for (insn = NEXT_INSN (BB_HEAD (e->src)); insn != NEXT_INSN (BB_END (e->src));
+ for (insn = NEXT_INSN (BB_HEAD (e->src));
+ insn != NEXT_INSN (BB_END (e->src));
insn = NEXT_INSN (insn))
if (INSN_P (insn))
cselib_process_insn (insn);
processing as if it were same basic block.
Our goal is to prove that whole block is an NOOP. */
- for (insn = NEXT_INSN (BB_HEAD (b)); insn != NEXT_INSN (BB_END (b)) && !failed;
+ for (insn = NEXT_INSN (BB_HEAD (b));
+ insn != NEXT_INSN (BB_END (b)) && !failed;
insn = NEXT_INSN (insn))
{
if (INSN_P (insn))
if (GET_CODE (pat) == PARALLEL)
{
- for (i = 0; i < XVECLEN (pat, 0); i++)
+ for (i = 0; i < (unsigned)XVECLEN (pat, 0); i++)
failed |= mark_effect (XVECEXP (pat, 0, i), nonequal);
}
else
regset_head killed_head;
regset killed = INITIALIZE_REG_SET (killed_head);
rtx save_regs = NULL_RTX;
- int regno, noccmode;
+ unsigned regno;
+ int noccmode;
enum machine_mode mode;
reg_set_iterator rsi;
insn = BB_END (bb);
for (insn = BB_END (bb); insn != head; insn = PREV_INSN (insn))
{
- int born_reg;
- int live_reg;
+ unsigned born_reg;
+ unsigned live_reg;
rtx link;
/* Are we interested in this insn? */
static void
build_inter_loop_deps (ddg_ptr g, struct df *df)
{
- int rd_num, u_num;
+ unsigned rd_num, u_num;
struct bb_info *bb_info;
bitmap_iterator bi;
df_refs_update (struct df *df, bitmap blocks)
{
basic_block bb;
- int count = 0, bbno;
+ unsigned count = 0, bbno;
df->n_regs = max_reg_num ();
if (df->n_regs >= df->reg_size)
cfun->eh->region_array[region->region_number] = outer;
if (region->aka)
{
- int i;
+ unsigned i;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
}
else
{
- int i;
+ unsigned i;
reg_set_iterator rsi;
/* Find the set of changed registers. */
{
regset tmp;
regset_head tmp_head;
- int i;
+ unsigned i;
int stabilized_prop_flags = prop_flags;
basic_block bb;
{
rtx insn;
edge e;
- int reg, did_something = 0;
+ unsigned reg, did_something = 0;
find_regno_partial_param param;
edge_iterator ei;
int insn_is_dead = 0;
int libcall_is_dead = 0;
rtx note;
- int i;
+ unsigned i;
if (! INSN_P (insn))
return prev;
if (pbi->flags & PROP_REG_INFO)
{
int num = pbi->insn_num;
- int i;
+ unsigned i;
reg_set_iterator rsi;
EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i, rsi)
if (flags & PROP_REG_INFO)
{
- int i;
+ unsigned i;
reg_set_iterator rsi;
/* Process the regs live at the end of the block.
void
dump_regset (regset r, FILE *outf)
{
- int i;
+ unsigned i;
reg_set_iterator rsi;
if (r == NULL)
void
reg_set_to_hard_reg_set (HARD_REG_SET *to, bitmap from)
{
- int i;
+ unsigned i;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
static void
clear_modify_mem_tables (void)
{
- int i;
+ unsigned i;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
static void
global_conflicts (void)
{
- int i;
+ unsigned i;
basic_block b;
rtx insn;
int *block_start_allocnos;
if (first == BB_HEAD (b))
{
- int i;
+ unsigned i;
bitmap_iterator bi;
CLEAR_REG_SET (live_relevant_regs);
regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
regset merge_set, tmp, test_live, test_set;
struct propagate_block_info *pbi;
- int i, fail = 0;
+ unsigned i, fail = 0;
bitmap_iterator bi;
/* Check for no calls or trapping operations. */
/* Clear all dead REGNOs from all basic block's live info. */
if (clear_regnos)
{
- int j;
+ unsigned j;
+
if (clear_regnos > 8)
{
FOR_EACH_BB (bb)
cselib_val *e = cselib_lookup (mem, VOIDmode, 0);
rtx set;
rtx best = mem;
- int j;
+ unsigned j;
struct elt_loc_list *const_equiv = 0;
reg_set_iterator rsi;
struct ra_bb_info *info = (struct ra_bb_info *) bb->aux;
rtx insn;
bitmap all_defs;
- int first, use_id;
+ int first;
+ unsigned use_id;
unsigned int deaths = 0;
unsigned int contains_call = 0;
for (; cl; cl = cl->next)
if (cl->conflicts)
{
- int j;
+ unsigned j;
struct web *web1 = find_subweb_2 (supweb1, cl->size_word);
bitmap_iterator bi;
struct web *web = ref2web[DF_REF_ID (refs[n])];
struct web *supweb = find_web_for_subweb (web);
int is_death;
- int j;
+ unsigned j;
+
/* Only emit reloads when entering their interference
region. A use of a spilled web never opens an
interference region, independent of it's color. */
{
basic_block last_bb = NULL;
rtx last_block_insn;
- int i, j;
+ unsigned i, j;
bitmap_iterator bi;
if (!INSN_P (insn))
CLEAR_HARD_REG_SET (cum_colors);
FOR_EACH_EDGE (e, ei, bb->preds)
{
- int j;
+ unsigned j;
if (num >= 5)
break;
void
actual_spill (void)
{
- int i;
+ unsigned i;
bitmap_iterator bi;
bitmap new_deaths = BITMAP_XMALLOC ();
static void
order_regs_for_reload (struct insn_chain *chain)
{
- int i;
+ unsigned i;
HARD_REG_SET used_by_pseudos;
HARD_REG_SET used_by_pseudos2;
reg_set_iterator rsi;
{
struct insn_chain *chain;
int something_changed = 0;
- int i;
+ unsigned i;
reg_set_iterator rsi;
/* Build the spill_regs array for the function. */
and call retry_global_alloc.
We change spill_pseudos here to only contain pseudos that did not
get a new hard register. */
- for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
+ for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
if (reg_old_renumber[i] != reg_renumber[i])
{
HARD_REG_SET forbidden;
}
/* Let alter_reg modify the reg rtx's for the modified pseudos. */
- for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
+ for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
{
int regno = reg_renumber[i];
if (reg_old_renumber[i] == regno)
{
RTX_CODE code = GET_CODE (x);
rtx link;
- int i;
+ unsigned i;
reg_set_iterator rsi;
if (code == COND_EXEC)
}
else if (code == PARALLEL)
{
- int i;
- for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
+ for (i = XVECLEN (x, 0); i--;)
{
rtx sub = XVECEXP (x, 0, i);
code = GET_CODE (sub);
}
}
- for (i = 0; i < deps->max_reg; i++)
+ for (i = 0; i < (unsigned)deps->max_reg; i++)
{
struct deps_reg *reg_last = &deps->reg_last[i];
reg_last->sets = alloc_INSN_LIST (insn, reg_last->sets);
void
free_deps (struct deps *deps)
{
- int i;
+ unsigned i;
reg_set_iterator rsi;
free_INSN_LIST_list (&deps->pending_read_insns);
FOR_EACH_EDGE (e, ei, block->succs)
{
struct deps *succ_deps;
- int reg;
+ unsigned reg;
reg_set_iterator rsi;
/* Only bbs "below" bb, in the same region, are interesting. */
tree_purge_all_dead_eh_edges (bitmap blocks)
{
bool changed = false;
- size_t i;
+ unsigned i;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
size = num_referenced_vars * sizeof (tree);
total += size;
- fprintf (file, fmt_str_1, "Referenced variables", num_referenced_vars,
+ fprintf (file, fmt_str_1, "Referenced variables", (unsigned long)num_referenced_vars,
SCALE (size), LABEL (size));
size = dfa_stats.num_stmt_anns * sizeof (struct stmt_ann_d);
compute_global_livein (bitmap livein, bitmap def_blocks)
{
basic_block bb, *worklist, *tos;
- int i;
+ unsigned i;
bitmap_iterator bi;
tos = worklist
static void
insert_phi_nodes (bitmap *dfs, bitmap names_to_rename)
{
- size_t i;
+ unsigned i;
varray_type work_stack;
bitmap_iterator bi;
{
struct def_blocks_d *def_map;
bitmap phi_insertion_points;
- int bb_index;
+ unsigned bb_index;
edge e;
tree phi;
basic_block bb;
We now always use fully pruned SSA form. */
while (VARRAY_ACTIVE_SIZE (*work_stack) > 0)
{
- int dfs_index;
+ unsigned dfs_index;
bitmap_iterator bi;
bb = VARRAY_TOP_GENERIC_PTR_NOGC (*work_stack);
static int
debug_def_blocks_r (void **slot, void *data ATTRIBUTE_UNUSED)
{
- unsigned long i;
struct def_blocks_d *db_p = (struct def_blocks_d *) *slot;
- bitmap_iterator bi;
fprintf (stderr, "VAR: ");
print_generic_expr (stderr, db_p->var, dump_flags);
- fprintf (stderr, ", DEF_BLOCKS: { ");
- EXECUTE_IF_SET_IN_BITMAP (db_p->def_blocks, 0, i, bi)
- {
- fprintf (stderr, "%ld ", i);
- }
- fprintf (stderr, "}");
- fprintf (stderr, ", LIVEIN_BLOCKS: { ");
- EXECUTE_IF_SET_IN_BITMAP (db_p->livein_blocks, 0, i, bi)
- {
- fprintf (stderr, "%ld ", i);
- }
- fprintf (stderr, "}\n");
+ bitmap_print (stderr, db_p->def_blocks, ", DEF_BLOCKS: { ", "}");
+ bitmap_print (stderr, db_p->livein_blocks, ", LIVEIN_BLOCKS: { ", "}\n");
return 1;
}
static void
invalidate_name_tags (bitmap vars_to_rename)
{
- size_t i;
+ unsigned i;
bool rename_name_tags_p;
bitmap_iterator bi;
static tree_live_info_p
coalesce_ssa_name (var_map map, int flags)
{
- int num, x, i;
+ unsigned num, x, i;
sbitmap live;
tree var, phi;
root_var_p rv;
int p = var_to_partition (map, res);
if (p == NO_PARTITION)
continue;
- for (x = 0; x < PHI_NUM_ARGS (phi); x++)
+ for (x = 0; x < (unsigned)PHI_NUM_ARGS (phi); x++)
{
tree arg = PHI_ARG_DEF (phi, x);
int p2;
basic_block bb;
type_var_p tv;
tree var;
- int x, p, p2;
+ unsigned x, p, p2;
coalesce_list_p cl;
conflict_graph graph;
FOR_EACH_BB (bb)
{
tree phi, arg;
- int p;
+ unsigned p;
+
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
{
p = var_to_partition (map, PHI_RESULT (phi));
/* Skip virtual PHI nodes. */
- if (p == NO_PARTITION)
+ if (p == (unsigned)NO_PARTITION)
continue;
make_live_on_entry (liveinfo, bb, p);
/* Each argument is a potential copy operation. Add any arguments
which are not coalesced to the result to the coalesce list. */
- for (x = 0; x < PHI_NUM_ARGS (phi); x++)
+ for (x = 0; x < (unsigned)PHI_NUM_ARGS (phi); x++)
{
arg = PHI_ARG_DEF (phi, x);
if (!phi_ssa_name_p (arg))
continue;
p2 = var_to_partition (map, arg);
- if (p2 == NO_PARTITION)
+ if (p2 == (unsigned)NO_PARTITION)
continue;
if (p != p2)
add_coalesce (cl, p, p2, 1);
tree *ret = NULL;
#ifdef ENABLE_CHECKING
- int x;
+ unsigned x;
for (x = 0; x <= num_var_partitions (t->map); x++)
gcc_assert (!t->partition_dep_list[x]);
#endif
find_replaceable_exprs (var_map map)
{
basic_block bb;
- int i;
+ unsigned i;
temp_expr_table_p table;
tree *ret;
if (dump_file && (dump_flags & TDF_DETAILS))
{
- size_t i;
+ unsigned i;
fputs ("\nScan results:\n", dump_file);
EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
scalarize_parms (void)
{
tree list = NULL;
- size_t i;
+ unsigned i;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
/* If aliases have been computed before, clear existing information. */
if (aliases_computed_p)
{
- size_t i;
+ unsigned i;
bitmap_iterator bi;
/* Clear the call-clobbered set. We are going to re-discover
compute_points_to_and_addr_escape (struct alias_info *ai)
{
basic_block bb;
- size_t i;
+ unsigned i;
tree op;
ssa_op_iter iter;
for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
{
- size_t j;
+ unsigned j;
tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
static void
maybe_create_global_var (struct alias_info *ai)
{
- size_t i, n_clobbered;
+ unsigned i, n_clobbered;
bitmap_iterator bi;
/* No need to create it, if we have one already. */
static void
mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el)
{
- int edge_number;
+ unsigned edge_number;
gcc_assert (bb != EXIT_BLOCK_PTR);
new_tree_live_info (var_map map)
{
tree_live_info_p live;
- int x;
+ unsigned x;
live = (tree_live_info_p) xmalloc (sizeof (struct tree_live_info_d));
live->map = map;
static void
live_worklist (tree_live_info_p live, varray_type stack, int i)
{
- int b;
+ unsigned b;
tree var;
basic_block def_bb = NULL;
edge e;
calculate_live_on_entry (var_map map)
{
tree_live_info_p live;
- int i;
+ unsigned i;
basic_block bb;
bitmap saw_def;
tree phi, var, stmt;
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
{
- for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+ for (i = 0; i < (unsigned)PHI_NUM_ARGS (phi); i++)
{
var = PHI_ARG_DEF (phi, i);
if (!phi_ssa_name_p (var))
int entry_block = e->dest->index;
if (e->dest == EXIT_BLOCK_PTR)
continue;
- for (i = 0; i < num_var_partitions (map); i++)
+ for (i = 0; i < (unsigned)num_var_partitions (map); i++)
{
basic_block tmp;
tree d;
calculate_live_on_exit (tree_live_info_p liveinfo)
{
unsigned b;
- int i, x;
+ unsigned i, x;
bitmap *on_exit;
basic_block bb;
edge e;
var_map map = liveinfo->map;
on_exit = (bitmap *)xmalloc (last_basic_block * sizeof (bitmap));
- for (x = 0; x < last_basic_block; x++)
+ for (x = 0; x < (unsigned)last_basic_block; x++)
on_exit[x] = BITMAP_XMALLOC ();
/* Set all the live-on-exit bits for uses in PHIs. */
FOR_EACH_BB (bb)
{
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
- for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+ for (i = 0; i < (unsigned)PHI_NUM_ARGS (phi); i++)
{
t = PHI_ARG_DEF (phi, i);
e = PHI_ARG_EDGE (phi, i);
void
sort_coalesce_list (coalesce_list_p cl)
{
- int x, num, count;
+ unsigned x, num, count;
partition_pair_p chain, p;
partition_pair_p *list;
conflict_graph graph;
var_map map;
bitmap live;
- int x, y, i;
+ unsigned x, y, i;
basic_block bb;
varray_type partition_link, tpa_to_clear, tpa_nodes;
unsigned l;
EXECUTE_IF_SET_IN_BITMAP (live, 0, x, bi)
{
i = tpa_find_tree (tpa, x);
- if (i != TPA_NONE)
+ if (i != (unsigned)TPA_NONE)
{
int start = VARRAY_INT (tpa_nodes, i);
/* If start is 0, a new root reference list is being started.
dump_live_info (FILE *f, tree_live_info_p live, int flag)
{
basic_block bb;
- int i;
+ unsigned i;
var_map map = live->map;
bitmap_iterator bi;
extern void register_ssa_partition_check (tree ssa_var);
#endif
-static inline int num_var_partitions (var_map);
+static inline unsigned num_var_partitions (var_map);
static inline tree var_to_partition_to_var (var_map, tree);
static inline tree partition_to_var (var_map, int);
static inline int var_to_partition (var_map, tree);
/* Number of partitions in MAP. */
-static inline int
+static inline unsigned
num_var_partitions (var_map map)
{
return map->num_partitions;
add_exit_phis_var (tree var, bitmap livein, bitmap exits)
{
bitmap def;
- int index;
+ unsigned index;
basic_block def_bb = bb_for_stmt (SSA_NAME_DEF_STMT (var));
bitmap_iterator bi;
for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
{
- size_t i;
+ unsigned i;
bitmap_iterator bi;
/* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
add_stmt_operand (&global_var, stmt, opf_is_def);
else
{
- size_t i;
+ unsigned i;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
add_stmt_operand (&global_var, stmt, opf_none);
else
{
- size_t i;
+ unsigned i;
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
{
fprintf (outfile, "%s[%d] := { ", setname, blockindex);
if (set)
{
- int i;
+ unsigned i;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (set->expressions, 0, i, bi)
fprintf (outfile, " (");
print_generic_expr (outfile, get_value_handle (ssa_name (i)), 0);
fprintf (outfile, ") ");
- if (bitmap_last_set_bit (set->expressions) != i)
+ if (bitmap_last_set_bit (set->expressions) != (int)i)
fprintf (outfile, ", ");
}
}
dom = get_immediate_dominator (CDI_DOMINATORS, block);
if (dom)
{
- int i;
+ unsigned i;
bitmap_iterator bi;
bitmap_set_t newset = NEW_SETS (dom);