gcc/
* basic-block.h (BASIC_BLOCK): Eliminate macro.
* alias.c (init_alias_analysis): Eliminate BASIC_BLOCK macro in
favor of uses of BASIC_BLOCK_FOR_FN, making uses of cfun explicit.
* bt-load.c (compute_defs_uses_and_gen, compute_out, link_btr_uses,
block_at_edge_of_live_range_p, migrate_btr_defs): Likewise.
* caller-save.c (insert_one_insn): Likewise.
* cfg.c (debug_bb, get_bb_original, get_bb_copy): Likewise.
* cfgexpand.c (add_scope_conflicts): Likewise.
* cfghooks.c (verify_flow_info): Likewise.
* cfgloop.c (flow_loops_find): Likewise.
* cfgrtl.c (rtl_flow_call_edges_add): Likewise.
* config/mips/mips.c (r10k_insert_cache_barriers): Likewise.
* config/s390/s390.c (s390_optimize_nonescaping_tx): Likewise.
* config/spu/spu.c (spu_machine_dependent_reorg): Likewise.
* cse.c (cse_main): Likewise.
* dce.c (fast_dce): Likewise.
* df-core.c (df_set_blocks, df_worklist_propagate_forward,
df_worklist_propagate_backward, df_worklist_dataflow_doublequeue,
df_bb_replace, df_dump_region): Likewise.
* df-problems.c (df_rd_bb_local_compute, df_lr_bb_local_compute,
df_live_bb_local_compute, df_chain_remove_problem)
df_chain_create_bb, df_word_lr_bb_local_compute, df_note_bb_compute,
df_md_bb_local_compute, df_md_local_compute,
df_md_transfer_function): Likewise.
* df-scan.c (df_scan_blocks, df_reorganize_refs_by_reg_by_insn,
df_reorganize_refs_by_insn, df_bb_refs_collect,
df_record_entry_block_defs, df_update_entry_block_defs,
df_record_exit_block_uses): Likewise.
* dominance.c (nearest_common_dominator_for_set): Likewise.
* gcse.c (hoist_code): Likewise.
* graph.c (draw_cfg_nodes_no_loops): Likewise.
* ipa-inline-analysis.c (param_change_prob,
estimate_function_body_sizes): Likewise.
* ipa-split.c (dominated_by_forbidden): Likewise.
* loop-unroll.c (apply_opt_in_copies): Likewise.
* lower-subreg.c (decompose_multiword_subregs): Likewise.
* lra-lives.c (lra_create_live_ranges): Likewise.
* predict.c (propagate_freq): Likewise.
* regrename.c (regrename_analyze): Likewise.
* regstat.c (regstat_bb_compute_ri,
regstat_bb_compute_calls_crossed): Likewise.
* resource.c (mark_target_live_regs): Likewise.
* sched-ebb.c (ebb_fix_recovery_cfg): Likewise.
* sched-int.h (EBB_FIRST_BB, EBB_LAST_BB): Likewise.
* sched-rgn.c (debug_region, dump_region_dot, too_large,
haifa_find_rgns, extend_rgns, compute_dom_prob_ps, update_live,
propagate_deps, sched_is_disabled_for_current_region_p): Likewise.
* sched-vis.c (debug_bb_n_slim): Likewise.
* sel-sched-ir.c (sel_finish_global_and_expr, verify_backedges,
purge_empty_blocks, sel_remove_loop_preheader): Likewise.
* sel-sched.c (remove_insns_that_need_bookkeeping)
(current_region_empty_p, sel_region_init,
simplify_changed_insns): Likewise.
* trans-mem.c (execute_tm_mark, execute_tm_edges,
tm_memopt_compute_antic, ipa_tm_scan_irr_function): Likewise.
* tree-cfg.c (make_edges, end_recording_case_labels,
label_to_block_fn, gimple_debug_bb, gimple_flow_call_edges_add,
remove_edge_and_dominated_blocks, remove_edge_and_dominated_blocks,
gimple_purge_all_dead_eh_edges,
gimple_purge_all_dead_abnormal_call_edges): Likewise.
* tree-cfgcleanup.c (fixup_noreturn_call,
split_bbs_on_noreturn_calls, cleanup_tree_cfg_1): Likewise.
* tree-inline.c (copy_cfg_body, fold_marked_statements): Likewise.
* tree-into-ssa.c (set_livein_block, prune_unused_phi_nodes,
insert_phi_nodes_for, insert_updated_phi_nodes_for): Likewise.
* tree-ssa-dom.c (tree_ssa_dominator_optimize): Likewise.
* tree-ssa-live.c (live_worklist): Likewise.
* tree-ssa-loop-manip.c (compute_live_loop_exits,
add_exit_phis_var, find_uses_to_rename, copy_phi_node_args): Likewise.
* tree-ssa-pre.c (compute_antic): Likewise.
* tree-ssa-reassoc.c (update_range_test, optimize_range_tests): Likewise.
* tree-ssa-sink.c (nearest_common_dominator_of_uses): Likewise.
* tree-ssa-tail-merge.c (same_succ_hash, same_succ_def::equal,
same_succ_flush_bbs, update_worklist, set_cluster,
same_phi_alternatives, find_clusters_1, apply_clusters,
update_debug_stmts): Likewise.
* tree-ssa-threadupdate.c (mark_threaded_blocks,
thread_through_all_blocks): Likewise.
* tree-ssa-uncprop.c (associate_equivalences_with_edges): Likewise.
* tree-vrp.c (find_assert_locations): Likewise.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@205822
138bc75d-0d04-0410-961f-
82ee72b054a4
+2013-12-09 David Malcolm <dmalcolm@redhat.com>
+
+ * basic-block.h (BASIC_BLOCK): Eliminate macro.
+
+ * alias.c (init_alias_analysis): Eliminate BASIC_BLOCK macro in
+ favor of uses of BASIC_BLOCK_FOR_FN, making uses of cfun explicit.
+ * bt-load.c (compute_defs_uses_and_gen, compute_out, link_btr_uses,
+ block_at_edge_of_live_range_p, migrate_btr_defs): Likewise.
+ * caller-save.c (insert_one_insn): Likewise.
+ * cfg.c (debug_bb, get_bb_original, get_bb_copy): Likewise.
+ * cfgexpand.c (add_scope_conflicts): Likewise.
+ * cfghooks.c (verify_flow_info): Likewise.
+ * cfgloop.c (flow_loops_find): Likewise.
+ * cfgrtl.c (rtl_flow_call_edges_add): Likewise.
+ * config/mips/mips.c (r10k_insert_cache_barriers): Likewise.
+ * config/s390/s390.c (s390_optimize_nonescaping_tx): Likewise.
+ * config/spu/spu.c (spu_machine_dependent_reorg): Likewise.
+ * cse.c (cse_main): Likewise.
+ * dce.c (fast_dce): Likewise.
+ * df-core.c (df_set_blocks, df_worklist_propagate_forward,
+ df_worklist_propagate_backward, df_worklist_dataflow_doublequeue,
+ df_bb_replace, df_dump_region): Likewise.
+ * df-problems.c (df_rd_bb_local_compute, df_lr_bb_local_compute,
+ df_live_bb_local_compute, df_chain_remove_problem)
+ df_chain_create_bb, df_word_lr_bb_local_compute, df_note_bb_compute,
+ df_md_bb_local_compute, df_md_local_compute,
+ df_md_transfer_function): Likewise.
+ * df-scan.c (df_scan_blocks, df_reorganize_refs_by_reg_by_insn,
+ df_reorganize_refs_by_insn, df_bb_refs_collect,
+ df_record_entry_block_defs, df_update_entry_block_defs,
+ df_record_exit_block_uses): Likewise.
+ * dominance.c (nearest_common_dominator_for_set): Likewise.
+ * gcse.c (hoist_code): Likewise.
+ * graph.c (draw_cfg_nodes_no_loops): Likewise.
+ * ipa-inline-analysis.c (param_change_prob,
+ estimate_function_body_sizes): Likewise.
+ * ipa-split.c (dominated_by_forbidden): Likewise.
+ * loop-unroll.c (apply_opt_in_copies): Likewise.
+ * lower-subreg.c (decompose_multiword_subregs): Likewise.
+ * lra-lives.c (lra_create_live_ranges): Likewise.
+ * predict.c (propagate_freq): Likewise.
+ * regrename.c (regrename_analyze): Likewise.
+ * regstat.c (regstat_bb_compute_ri,
+ regstat_bb_compute_calls_crossed): Likewise.
+ * resource.c (mark_target_live_regs): Likewise.
+ * sched-ebb.c (ebb_fix_recovery_cfg): Likewise.
+ * sched-int.h (EBB_FIRST_BB, EBB_LAST_BB): Likewise.
+ * sched-rgn.c (debug_region, dump_region_dot, too_large,
+ haifa_find_rgns, extend_rgns, compute_dom_prob_ps, update_live,
+ propagate_deps, sched_is_disabled_for_current_region_p): Likewise.
+ * sched-vis.c (debug_bb_n_slim): Likewise.
+ * sel-sched-ir.c (sel_finish_global_and_expr, verify_backedges,
+ purge_empty_blocks, sel_remove_loop_preheader): Likewise.
+ * sel-sched.c (remove_insns_that_need_bookkeeping)
+ (current_region_empty_p, sel_region_init,
+ simplify_changed_insns): Likewise.
+ * trans-mem.c (execute_tm_mark, execute_tm_edges,
+ tm_memopt_compute_antic, ipa_tm_scan_irr_function): Likewise.
+ * tree-cfg.c (make_edges, end_recording_case_labels,
+ label_to_block_fn, gimple_debug_bb, gimple_flow_call_edges_add,
+ remove_edge_and_dominated_blocks, remove_edge_and_dominated_blocks,
+ gimple_purge_all_dead_eh_edges,
+ gimple_purge_all_dead_abnormal_call_edges): Likewise.
+ * tree-cfgcleanup.c (fixup_noreturn_call,
+ split_bbs_on_noreturn_calls, cleanup_tree_cfg_1): Likewise.
+ * tree-inline.c (copy_cfg_body, fold_marked_statements): Likewise.
+ * tree-into-ssa.c (set_livein_block, prune_unused_phi_nodes,
+ insert_phi_nodes_for, insert_updated_phi_nodes_for): Likewise.
+ * tree-ssa-dom.c (tree_ssa_dominator_optimize): Likewise.
+ * tree-ssa-live.c (live_worklist): Likewise.
+ * tree-ssa-loop-manip.c (compute_live_loop_exits,
+ add_exit_phis_var, find_uses_to_rename, copy_phi_node_args): Likewise.
+ * tree-ssa-pre.c (compute_antic): Likewise.
+ * tree-ssa-reassoc.c (update_range_test, optimize_range_tests): Likewise.
+ * tree-ssa-sink.c (nearest_common_dominator_of_uses): Likewise.
+ * tree-ssa-tail-merge.c (same_succ_hash, same_succ_def::equal,
+ same_succ_flush_bbs, update_worklist, set_cluster,
+ same_phi_alternatives, find_clusters_1, apply_clusters,
+ update_debug_stmts): Likewise.
+ * tree-ssa-threadupdate.c (mark_threaded_blocks,
+ thread_through_all_blocks): Likewise.
+ * tree-ssa-uncprop.c (associate_equivalences_with_edges): Likewise.
+ * tree-vrp.c (find_assert_locations): Likewise.
+
2013-12-09 David Malcolm <dmalcolm@redhat.com>
* basic-block.h (SET_BASIC_BLOCK): Eliminate macro.
/* Walk the insns adding values to the new_reg_base_value array. */
for (i = 0; i < rpo_cnt; i++)
{
- basic_block bb = BASIC_BLOCK (rpo[i]);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
FOR_BB_INSNS (bb, insn)
{
if (NONDEBUG_INSN_P (insn))
#define label_to_block_map (cfun->cfg->x_label_to_block_map)
#define profile_status (cfun->cfg->x_profile_status)
-#define BASIC_BLOCK(N) ((*basic_block_info)[(N)])
-
/* For iterating over basic blocks. */
#define FOR_BB_BETWEEN(BB, FROM, TO, DIR) \
for (BB = FROM; BB != TO; BB = BB->DIR)
bitmap_vector_clear (bb_gen, last_basic_block);
for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
{
- basic_block bb = BASIC_BLOCK (i);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
int reg;
btr_def defs_this_bb = NULL;
rtx insn;
changed = 0;
for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
{
- bitmap_union_of_preds (bb_in, bb_out, BASIC_BLOCK (i));
+ bitmap_union_of_preds (bb_in, bb_out, BASIC_BLOCK_FOR_FN (cfun, i));
changed |= bitmap_ior_and_compl (bb_out[i], bb_gen[i],
bb_in, bb_kill[i]);
}
Count up the number of reaching defs of each use. */
for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
{
- basic_block bb = BASIC_BLOCK (i);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
rtx insn;
rtx last;
- bitmap_union_of_preds (reaching_defs, bb_out, BASIC_BLOCK (i));
+ bitmap_union_of_preds (reaching_defs, bb_out, BASIC_BLOCK_FOR_FN (cfun, i));
for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb));
insn != last;
insn = NEXT_INSN (insn))
static int
block_at_edge_of_live_range_p (int bb, btr_def def)
{
- if (def->other_btr_uses_before_def && BASIC_BLOCK (bb) == def->bb)
+ if (def->other_btr_uses_before_def
+ && BASIC_BLOCK_FOR_FN (cfun, bb) == def->bb)
return 1;
else if (def->other_btr_uses_after_use)
{
btr_user user;
for (user = def->uses; user != NULL; user = user->next)
- if (BASIC_BLOCK (bb) == user->bb)
+ if (BASIC_BLOCK_FOR_FN (cfun, bb) == user->bb)
return 1;
}
return 0;
for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
{
- basic_block bb = BASIC_BLOCK (i);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
fprintf (dump_file,
"Basic block %d: count = " HOST_WIDEST_INT_PRINT_DEC
" loop-depth = %d idom = %d\n",
&new_chain->live_throughout);
CLEAR_REG_SET (&new_chain->dead_or_set);
- if (chain->insn == BB_HEAD (BASIC_BLOCK (chain->block)))
- BB_HEAD (BASIC_BLOCK (chain->block)) = new_chain->insn;
+ if (chain->insn == BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, chain->block)))
+ BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, chain->block)) = new_chain->insn;
}
else
{
note_stores (PATTERN (chain->insn), add_stored_regs,
&new_chain->live_throughout);
CLEAR_REG_SET (&new_chain->dead_or_set);
- if (chain->insn == BB_END (BASIC_BLOCK (chain->block)))
- BB_END (BASIC_BLOCK (chain->block)) = new_chain->insn;
+ if (chain->insn == BB_END (BASIC_BLOCK_FOR_FN (cfun, chain->block)))
+ BB_END (BASIC_BLOCK_FOR_FN (cfun, chain->block)) = new_chain->insn;
}
new_chain->block = chain->block;
new_chain->is_caller_save_insn = 1;
DEBUG_FUNCTION basic_block
debug_bb_n (int n)
{
- basic_block bb = BASIC_BLOCK (n);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, n);
debug_bb (bb);
return bb;
}
key.index1 = bb->index;
entry = bb_original.find (&key);
if (entry)
- return BASIC_BLOCK (entry->index2);
+ return BASIC_BLOCK_FOR_FN (cfun, entry->index2);
else
return NULL;
}
key.index1 = bb->index;
entry = bb_copy.find (&key);
if (entry)
- return BASIC_BLOCK (entry->index2);
+ return BASIC_BLOCK_FOR_FN (cfun, entry->index2);
else
return NULL;
}
for (i = 0; i < n_bbs; i++)
{
bitmap active;
- bb = BASIC_BLOCK (rpo[i]);
+ bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
active = (bitmap)bb->aux;
add_scope_conflicts_1 (bb, work, false);
if (bitmap_ior_into (active, work))
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
{
if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
- && bb != BASIC_BLOCK (bb->index))
+ && bb != BASIC_BLOCK_FOR_FN (cfun, bb->index))
{
error ("bb %d on wrong place", bb->index);
err = 1;
auto_vec<loop_p> larray (loops->larray->length ());
for (b = 0; b < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; b++)
{
- basic_block header = BASIC_BLOCK (rc_order[b]);
+ basic_block header = BASIC_BLOCK_FOR_FN (cfun, rc_order[b]);
if (bb_loop_header_p (header))
{
struct loop *loop;
for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
{
- basic_block bb = BASIC_BLOCK (i);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
rtx insn;
rtx prev_insn;
n = pre_and_rev_post_order_compute (NULL, rev_post_order, false);
for (i = 0; i < n; i++)
{
- bb = BASIC_BLOCK (rev_post_order[i]);
+ bb = BASIC_BLOCK_FOR_FN (cfun, rev_post_order[i]);
/* If this block is only reached by unconditional edges, and if the
source of every edge is protected, the beginning of the block is
for (bb_index = 0; bb_index < n_basic_blocks_for_fn (cfun); bb_index++)
{
- bb = BASIC_BLOCK (bb_index);
+ bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
if (!bb)
continue;
for (i = n_basic_blocks_for_fn (cfun) - 1; i >= 0; i--)
{
- bb = BASIC_BLOCK (i);
+ bb = BASIC_BLOCK_FOR_FN (cfun, i);
branch = 0;
if (spu_bb_info[i].prop_jump)
{
processed before. */
do
{
- bb = BASIC_BLOCK (rc_order[i++]);
+ bb = BASIC_BLOCK_FOR_FN (cfun, rc_order[i++]);
}
while (bitmap_bit_p (cse_visited_basic_blocks, bb->index)
&& i < n_blocks);
for (i = 0; i < n_blocks; i++)
{
int index = postorder[i];
- basic_block bb = BASIC_BLOCK (index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, index);
bool local_changed;
if (index < NUM_FIXED_BLOCKS)
EXECUTE_IF_SET_IN_BITMAP (&diff, 0, bb_index, bi)
{
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
if (bb)
{
void *bb_info = df_get_bb_info (dflow, bb_index);
{
edge e;
edge_iterator ei;
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
bool changed = !age;
/* Calculate <conf_op> of incoming edges. */
{
edge e;
edge_iterator ei;
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
bool changed = !age;
/* Calculate <conf_op> of incoming edges. */
bitmap_clear_bit (pending, index);
bb_index = blocks_in_postorder[index];
- bb = BASIC_BLOCK (bb_index);
+ bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
prev_age = last_visit_age[index];
if (dir == DF_FORWARD)
changed = df_worklist_propagate_forward (dataflow, bb_index,
bitmap_clear (worklist);
}
for (i = 0; i < n_blocks; i++)
- BASIC_BLOCK (blocks_in_postorder[i])->aux = NULL;
+ BASIC_BLOCK_FOR_FN (cfun, blocks_in_postorder[i])->aux = NULL;
BITMAP_FREE (worklist);
BITMAP_FREE (pending);
fprintf (dump_file, "shoving block %d into %d\n", new_block_index, old_index);
gcc_assert (df);
- gcc_assert (BASIC_BLOCK (old_index) == NULL);
+ gcc_assert (BASIC_BLOCK_FOR_FN (cfun, old_index) == NULL);
for (p = 0; p < df->num_problems_defined; p++)
{
df_clear_bb_dirty (new_block);
SET_BASIC_BLOCK_FOR_FN (cfun, old_index, new_block);
new_block->index = old_index;
- df_set_bb_dirty (BASIC_BLOCK (old_index));
+ df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, old_index));
SET_BASIC_BLOCK_FOR_FN (cfun, new_block_index, NULL);
}
void
df_bb_delete (int bb_index)
{
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
int i;
if (!df)
EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
{
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
dump_bb (file, bb, 0, TDF_DETAILS);
}
fprintf (file, "\n");
static void
df_rd_bb_local_compute (unsigned int bb_index)
{
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
rtx insn;
static void
df_lr_bb_local_compute (unsigned int bb_index)
{
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
rtx insn;
df_ref *def_rec;
static void
df_live_bb_local_compute (unsigned int bb_index)
{
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
rtx insn;
df_ref *def_rec;
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
if (df_chain_problem_p (DF_DU_CHAIN))
for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
static void
df_chain_create_bb (unsigned int bb_index)
{
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
rtx insn;
bitmap_head cpy;
static void
df_word_lr_bb_local_compute (unsigned int bb_index)
{
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
rtx insn;
df_ref *def_rec;
df_note_bb_compute (unsigned int bb_index,
bitmap live, bitmap do_not_gen, bitmap artificial_uses)
{
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
static void
df_md_bb_local_compute (unsigned int bb_index)
{
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
rtx insn;
bitmap kill = &df_md_get_bb_info (bb_index)->kill;
EXECUTE_IF_SET_IN_BITMAP (&frontiers[bb_index], 0, df_bb_index, bi2)
{
- basic_block bb = BASIC_BLOCK (df_bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, df_bb_index);
if (bitmap_bit_p (all_blocks, df_bb_index))
bitmap_ior_and_into (&df_md_get_bb_info (df_bb_index)->init, kill,
df_get_live_in (bb));
static bool
df_md_transfer_function (int bb_index)
{
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
bitmap in = &bb_info->in;
bitmap out = &bb_info->out;
df_record_entry_block_defs (df->entry_block_defs);
df_get_exit_block_use_set (df->exit_block_uses);
df_record_exit_block_uses (df->exit_block_uses);
- df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
- df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
+ df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
+ df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
/* Regular blocks */
FOR_EACH_BB (bb)
EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
{
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
rtx insn;
df_ref *ref_rec;
EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
{
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
rtx insn;
df_ref *ref_rec;
EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
{
- offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
+ offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK_FOR_FN (cfun,
+ index),
+ offset, ref_info,
include_defs, include_uses,
include_eq_uses);
}
void
df_bb_refs_record (int bb_index, bool scan_insns)
{
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
rtx insn;
int luid = 0;
df_entry_block_defs_collect (&collection_rec, entry_block_defs);
/* Process bb_refs chain */
- df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL,
+ df_refs_add_to_chains (&collection_rec,
+ BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK),
+ NULL,
copy_defs);
}
{
df_record_entry_block_defs (&refs);
bitmap_copy (df->entry_block_defs, &refs);
- df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
+ df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
}
bitmap_clear (&refs);
}
df_exit_block_uses_collect (&collection_rec, exit_block_uses);
/* Process bb_refs chain */
- df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL,
+ df_refs_add_to_chains (&collection_rec,
+ BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK),
+ NULL,
copy_uses);
}
{
df_record_exit_block_uses (&refs);
bitmap_copy (df->exit_block_uses,& refs);
- df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
+ df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
}
bitmap_clear (&refs);
}
basic_block dom;
first = bitmap_first_set_bit (blocks);
- dom = BASIC_BLOCK (first);
+ dom = BASIC_BLOCK_FOR_FN (cfun, first);
EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
- if (dom != BASIC_BLOCK (i))
- dom = nearest_common_dominator (dir, dom, BASIC_BLOCK (i));
+ if (dom != BASIC_BLOCK_FOR_FN (cfun, i))
+ dom = nearest_common_dominator (dir, dom, BASIC_BLOCK_FOR_FN (cfun, i));
return dom;
}
data->max_reg_pressure[pressure_class] += nregs;
EXECUTE_IF_SET_IN_BITMAP (hoisted_bbs, 0, k, bi)
{
- data = BB_DATA (BASIC_BLOCK (k));
+ data = BB_DATA (BASIC_BLOCK_FOR_FN (cfun, k));
data->max_reg_pressure[pressure_class] += nregs;
}
}
hoisted. */
EXECUTE_IF_SET_IN_BITMAP (hoisted_bbs, 0, k, bi)
{
- data = BB_DATA (BASIC_BLOCK (k));
+ data = BB_DATA (BASIC_BLOCK_FOR_FN (cfun, k));
bitmap_copy (data->live_in, data->backup);
data->max_reg_pressure[pressure_class]
= data->old_pressure;
for (i = n_basic_blocks_for_fn (fun) - n;
i < n_basic_blocks_for_fn (fun); i++)
{
- basic_block bb = BASIC_BLOCK (rpo[i]);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
draw_cfg_node (pp, fun->funcdef_no, bb);
bitmap_set_bit (visited, bb->index);
}
max = 1;
EXECUTE_IF_SET_IN_BITMAP (info.bb_set, 0, index, bi)
- max = MIN (max, BASIC_BLOCK (index)->frequency);
+ max = MIN (max, BASIC_BLOCK_FOR_FN (cfun, index)->frequency);
BITMAP_FREE (info.bb_set);
if (max < bb->frequency)
nblocks = pre_and_rev_post_order_compute (NULL, order, false);
for (n = 0; n < nblocks; n++)
{
- bb = BASIC_BLOCK (order[n]);
+ bb = BASIC_BLOCK_FOR_FN (cfun, order[n]);
freq = compute_call_stmt_bb_frequency (node->decl, bb);
/* TODO: Obviously predicates can be propagated down across CFG. */
EXECUTE_IF_SET_IN_BITMAP (forbidden_dominators, 1, dom_bb, bi)
{
- if (dominated_by_p (CDI_DOMINATORS, bb, BASIC_BLOCK (dom_bb)))
+ if (dominated_by_p (CDI_DOMINATORS, bb,
+ BASIC_BLOCK_FOR_FN (cfun, dom_bb)))
return true;
}
for (i = opt_info->first_new_block; i < (unsigned) last_basic_block; i++)
{
- bb = BASIC_BLOCK (i);
+ bb = BASIC_BLOCK_FOR_FN (cfun, i);
orig_bb = get_bb_original (bb);
/* bb->aux holds position in copy sequence initialized by
get_bb_copy (get_bb_original (bb)) == bb. */
for (i = opt_info->first_new_block; i < (unsigned) last_basic_block; i++)
{
- bb = BASIC_BLOCK (i);
+ bb = BASIC_BLOCK_FOR_FN (cfun, i);
orig_bb = get_bb_original (bb);
if (get_bb_copy (orig_bb) != bb)
continue;
rtx insn, end;
edge fallthru;
- bb = BASIC_BLOCK (i);
+ bb = BASIC_BLOCK_FOR_FN (cfun, i);
insn = BB_HEAD (bb);
end = BB_END (bb);
lra_assert (n_blocks_inverted == n_basic_blocks_for_fn (cfun));
for (i = n_blocks_inverted - 1; i >= 0; --i)
{
- bb = BASIC_BLOCK (post_order_rev_cfg[i]);
+ bb = BASIC_BLOCK_FOR_FN (cfun, post_order_rev_cfg[i]);
if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun))
continue;
edge_iterator ei;
int count = 0;
- bb = BASIC_BLOCK (i);
+ bb = BASIC_BLOCK_FOR_FN (cfun, i);
FOR_EACH_EDGE (e, ei, bb->preds)
{
for (i = 0; i < n_bbs; i++)
{
- basic_block bb1 = BASIC_BLOCK (inverse_postorder[i]);
+ basic_block bb1 = BASIC_BLOCK_FOR_FN (cfun, inverse_postorder[i]);
struct bb_rename_info *this_info;
bool success;
edge e;
bitmap local_live, bitmap local_processed,
int *local_live_last_luid)
{
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
static void
regstat_bb_compute_calls_crossed (unsigned int bb_index, bitmap live)
{
- basic_block bb = BASIC_BLOCK (bb_index);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
information, we can get it from there unless the insn at the
start of the basic block has been deleted. */
if (tinfo && tinfo->block != -1
- && ! INSN_DELETED_P (BB_HEAD (BASIC_BLOCK (tinfo->block))))
+ && ! INSN_DELETED_P (BB_HEAD (BASIC_BLOCK_FOR_FN (cfun,
+ tinfo->block))))
b = tinfo->block;
}
to use the LR problem. Otherwise, we must assume everything is live. */
if (b != -1)
{
- regset regs_live = DF_LR_IN (BASIC_BLOCK (b));
+ regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b));
rtx start_insn, stop_insn;
/* Compute hard regs live at start of block. */
/* Get starting and ending insn, handling the case where each might
be a SEQUENCE. */
start_insn = (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index ?
- insns : BB_HEAD (BASIC_BLOCK (b)));
+ insns : BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, b)));
stop_insn = target;
if (NONJUMP_INSN_P (start_insn)
gcc_assert (last_bb->index != bbi);
if (jump_bb_nexti == last_bb->index)
- last_bb = BASIC_BLOCK (jump_bbi);
+ last_bb = BASIC_BLOCK_FOR_FN (cfun, jump_bbi);
}
#endif /* INSN_SCHEDULING */
/* The mapping from ebb to block. */
extern int *ebb_head;
#define BB_TO_BLOCK(ebb) (rgn_bb_table[ebb_head[ebb]])
-#define EBB_FIRST_BB(ebb) BASIC_BLOCK (BB_TO_BLOCK (ebb))
-#define EBB_LAST_BB(ebb) BASIC_BLOCK (rgn_bb_table[ebb_head[ebb + 1] - 1])
+#define EBB_FIRST_BB(ebb) BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (ebb))
+#define EBB_LAST_BB(ebb) \
+ BASIC_BLOCK_FOR_FN (cfun, rgn_bb_table[ebb_head[ebb + 1] - 1])
#define INSN_BB(INSN) (BLOCK_TO_BB (BLOCK_NUM (INSN)))
extern int current_nr_blocks;
for (bb = 0; bb < rgn_table[rgn].rgn_nr_blocks; bb++)
{
- dump_bb (stderr, BASIC_BLOCK (rgn_bb_table[current_blocks + bb]),
+ dump_bb (stderr,
+ BASIC_BLOCK_FOR_FN (cfun, rgn_bb_table[current_blocks + bb]),
0, TDF_SLIM | TDF_BLOCKS);
fprintf (stderr, "\n");
}
edge e;
edge_iterator ei;
int src_bb_num = rgn_bb_table[current_blocks + i];
- basic_block bb = BASIC_BLOCK (src_bb_num);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, src_bb_num);
FOR_EACH_EDGE (e, ei, bb->succs)
if (bb_in_region_p (e->dest->index, rgn))
{
(*num_bbs)++;
(*num_insns) += (common_sched_info->estimate_number_of_insns
- (BASIC_BLOCK (block)));
+ (BASIC_BLOCK_FOR_FN (cfun, block)));
return ((*num_bbs > PARAM_VALUE (PARAM_MAX_SCHED_REGION_BLOCKS))
|| (*num_insns > PARAM_VALUE (PARAM_MAX_SCHED_REGION_INSNS)));
edge e;
child = queue[++head];
- FOR_EACH_EDGE (e, ei, BASIC_BLOCK (child)->preds)
+ FOR_EACH_EDGE (e, ei,
+ BASIC_BLOCK_FOR_FN (cfun, child)->preds)
{
node = e->src->index;
CONTAINING_RGN (child) = nr_regions;
queue[head] = queue[tail--];
- FOR_EACH_EDGE (e, ei, BASIC_BLOCK (child)->succs)
+ FOR_EACH_EDGE (e, ei,
+ BASIC_BLOCK_FOR_FN (cfun,
+ child)->succs)
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
--degree[e->dest->index];
}
{
int hdr = -1;
- FOR_EACH_EDGE (e, ei, BASIC_BLOCK (bbn)->preds)
+ FOR_EACH_EDGE (e, ei, BASIC_BLOCK_FOR_FN (cfun, bbn)->preds)
{
int predn = e->src->index;
CONTAINING_RGN (bbn) = nr_regions;
BLOCK_TO_BB (bbn) = 0;
- FOR_EACH_EDGE (e, ei, BASIC_BLOCK (bbn)->succs)
+ FOR_EACH_EDGE (e, ei, BASIC_BLOCK_FOR_FN (cfun, bbn)->succs)
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
degree[e->dest->index]--;
idx++;
- FOR_EACH_EDGE (e, ei, BASIC_BLOCK (succn)->succs)
+ FOR_EACH_EDGE (e, ei,
+ BASIC_BLOCK_FOR_FN (cfun, succn)->succs)
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
degree[e->dest->index]--;
}
/* Initialize dom[bb] to '111..1'. */
bitmap_ones (dom[bb]);
- FOR_EACH_EDGE (in_edge, in_ei, BASIC_BLOCK (BB_TO_BLOCK (bb))->preds)
+ FOR_EACH_EDGE (in_edge, in_ei,
+ BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (bb))->preds)
{
int pred_bb;
edge out_edge;
(bb_from == bb_to \
|| IS_RGN_ENTRY (bb_from) \
|| (bitmap_bit_p (ancestor_edges[bb_to], \
- EDGE_TO_BIT (single_pred_edge (BASIC_BLOCK (BB_TO_BLOCK (bb_from)))))))
+ EDGE_TO_BIT (single_pred_edge (BASIC_BLOCK_FOR_FN (cfun, \
+ BB_TO_BLOCK (bb_from)))))))
/* Turns on the fed_by_spec_load flag for insns fed by load_insn. */
static void
propagate_deps (int bb, struct deps_desc *pred_deps)
{
- basic_block block = BASIC_BLOCK (BB_TO_BLOCK (bb));
+ basic_block block = BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (bb));
edge_iterator ei;
edge e;
int bb;
for (bb = 0; bb < current_nr_blocks; bb++)
- if (!(BASIC_BLOCK (BB_TO_BLOCK (bb))->flags & BB_DISABLE_SCHEDULE))
+ if (!(BASIC_BLOCK_FOR_FN (cfun,
+ BB_TO_BLOCK (bb))->flags & BB_DISABLE_SCHEDULE))
return false;
return true;
DEBUG_FUNCTION void
debug_bb_n_slim (int n)
{
- basic_block bb = BASIC_BLOCK (n);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, n);
debug_bb_slim (bb);
}
bbs.create (current_nr_blocks);
for (i = 0; i < current_nr_blocks; i++)
- bbs.quick_push (BASIC_BLOCK (BB_TO_BLOCK (i)));
+ bbs.quick_push (BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (i)));
/* Clear AV_SETs and INSN_EXPRs. */
{
edge_iterator ei;
for (i = 0; i < current_nr_blocks; i++)
- FOR_EACH_EDGE (e, ei, BASIC_BLOCK (BB_TO_BLOCK (i))->succs)
+ FOR_EACH_EDGE (e, ei, BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (i))->succs)
if (in_current_region_p (e->dest)
&& BLOCK_TO_BB (e->dest->index) < i)
n++;
/* Do not attempt to delete the first basic block in the region. */
for (i = 1; i < current_nr_blocks; )
{
- basic_block b = BASIC_BLOCK (BB_TO_BLOCK (i));
+ basic_block b = BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (i));
if (maybe_tidy_empty_bb (b))
continue;
/* Add blocks that aren't within the current loop to PREHEADER_BLOCKS. */
for (i = 0; i < RGN_NR_BLOCKS (cur_rgn); i++)
{
- bb = BASIC_BLOCK (BB_TO_BLOCK (i));
+ bb = BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (i));
/* If the basic block belongs to region, but doesn't belong to
corresponding loop, then it should be a preheader. */
&& (EXPR_SPEC (expr)
|| !EXPR_ORIG_BB_INDEX (expr)
|| !dominated_by_p (CDI_DOMINATORS,
- BASIC_BLOCK (EXPR_ORIG_BB_INDEX (expr)),
+ BASIC_BLOCK_FOR_FN (cfun,
+ EXPR_ORIG_BB_INDEX (expr)),
BLOCK_FOR_INSN (FENCE_INSN (fence)))))
{
if (sched_verbose >= 4)
{
int i;
for (i = 0; i < current_nr_blocks; i++)
- if (! sel_bb_empty_p (BASIC_BLOCK (BB_TO_BLOCK (i))))
+ if (! sel_bb_empty_p (BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (i))))
return false;
return true;
bbs.create (current_nr_blocks);
for (i = 0; i < current_nr_blocks; i++)
- bbs.quick_push (BASIC_BLOCK (BB_TO_BLOCK (i)));
+ bbs.quick_push (BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (i)));
sel_init_bbs (bbs);
compute_live for the first insn of the loop. */
if (current_loop_nest)
{
- int header = (sel_is_loop_preheader_p (BASIC_BLOCK (BB_TO_BLOCK (0)))
- ? 1
- : 0);
+ int header =
+ (sel_is_loop_preheader_p (BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (0)))
+ ? 1
+ : 0);
if (current_nr_blocks == header + 1)
update_liveness_on_insn
- (sel_bb_head (BASIC_BLOCK (BB_TO_BLOCK (header))));
+ (sel_bb_head (BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (header))));
}
/* Set hooks so that no newly generated insn will go out unnoticed. */
for (i = 0; i < current_nr_blocks; i++)
{
- basic_block bb = BASIC_BLOCK (BB_TO_BLOCK (i));
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (i));
rtx insn;
FOR_BB_INSNS (bb, insn)
&& sub & GTMA_MAY_ENTER_IRREVOCABLE)
continue;
}
- expand_block_tm (r, BASIC_BLOCK (i));
+ expand_block_tm (r, BASIC_BLOCK_FOR_FN (cfun, i));
}
}
FOR_EACH_VEC_ELT (bb_regions, i, r)
if (r != NULL)
- expand_block_edges (r, BASIC_BLOCK (i));
+ expand_block_edges (r, BASIC_BLOCK_FOR_FN (cfun, i));
bb_regions.release ();
unsigned int i;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (region->exit_blocks, 0, i, bi)
- BB_VISITED_P (BASIC_BLOCK (i)) = true;
+ BB_VISITED_P (BASIC_BLOCK_FOR_FN (cfun, i)) = true;
}
qin = worklist;
unsigned i;
EXECUTE_IF_SET_IN_BITMAP (new_irr, 0, i, bmi)
- ipa_tm_decrement_clone_counts (BASIC_BLOCK (i), for_clone);
+ ipa_tm_decrement_clone_counts (BASIC_BLOCK_FOR_FN (cfun, i),
+ for_clone);
if (old_irr)
{
/* Create an edge from entry to the first block with executable
statements in it. */
- make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), BASIC_BLOCK (NUM_FIXED_BLOCKS),
+ make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
+ BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
EDGE_FALLTHRU);
/* Traverse the basic block array placing edges. */
edge_to_cases = NULL;
EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
{
- basic_block bb = BASIC_BLOCK (i);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
if (bb)
{
gimple stmt = last_stmt (bb);
and undefined variable warnings quite right. */
if (seen_error () && uid < 0)
{
- gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
+ gimple_stmt_iterator gsi =
+ gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
gimple stmt;
stmt = gimple_build_label (dest);
basic_block
gimple_debug_bb_n (int n)
{
- gimple_debug_bb (BASIC_BLOCK (n));
- return BASIC_BLOCK (n);
+ gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
+ return BASIC_BLOCK_FOR_FN (cfun, n);
}
return or not... */
for (i = 0; i < last_bb; i++)
{
- basic_block bb = BASIC_BLOCK (i);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
gimple_stmt_iterator gsi;
gimple stmt, last_stmt;
EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
{
- bb = BASIC_BLOCK (i);
+ bb = BASIC_BLOCK_FOR_FN (cfun, i);
bitmap_set_bit (df_idom,
get_immediate_dominator (CDI_DOMINATORS, bb)->index);
}
the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
{
- bb = BASIC_BLOCK (i);
+ bb = BASIC_BLOCK_FOR_FN (cfun, i);
for (dbb = first_dom_son (CDI_DOMINATORS, bb);
dbb;
dbb = next_dom_son (CDI_DOMINATORS, dbb))
EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
{
- basic_block bb = BASIC_BLOCK (i);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
/* Earlier gimple_purge_dead_eh_edges could have removed
this basic block already. */
EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
{
- basic_block bb = BASIC_BLOCK (i);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
/* Earlier gimple_purge_dead_abnormal_call_edges could have removed
this basic block already. */
SET_USE (use_p, error_mark_node);
}
EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
- delete_basic_block (BASIC_BLOCK (bb_index));
+ delete_basic_block (BASIC_BLOCK_FOR_FN (cfun, bb_index));
BITMAP_FREE (blocks);
release_ssa_name (op);
}
if (bb == NULL
|| bb->index < NUM_FIXED_BLOCKS
|| bb->index >= last_basic_block
- || BASIC_BLOCK (bb->index) != bb
+ || BASIC_BLOCK_FOR_FN (cfun, bb->index) != bb
|| !gimple_call_noreturn_p (stmt))
continue;
n = last_basic_block;
for (i = NUM_FIXED_BLOCKS; i < n; i++)
{
- bb = BASIC_BLOCK (i);
+ bb = BASIC_BLOCK_FOR_FN (cfun, i);
if (bb)
retval |= cleanup_tree_cfg_bb (bb);
}
if (i < NUM_FIXED_BLOCKS)
continue;
- bb = BASIC_BLOCK (i);
+ bb = BASIC_BLOCK_FOR_FN (cfun, i);
if (!bb)
continue;
for (; last < last_basic_block; last++)
{
if (need_debug_cleanup)
- maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
- BASIC_BLOCK (last)->aux = NULL;
+ maybe_move_debug_stmts_to_successors (id,
+ BASIC_BLOCK_FOR_FN (cfun, last));
+ BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
/* Update call edge destinations. This can not be done before loop
info is updated, because we may split basic blocks. */
if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
- redirect_all_calls (id, BASIC_BLOCK (last));
+ redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
}
entry_block_map->aux = NULL;
exit_block_map->aux = NULL;
fold_marked_statements (int first, struct pointer_set_t *statements)
{
for (; first < n_basic_blocks_for_fn (cfun); first++)
- if (BASIC_BLOCK (first))
+ if (BASIC_BLOCK_FOR_FN (cfun, first))
{
gimple_stmt_iterator gsi;
- for (gsi = gsi_start_bb (BASIC_BLOCK (first));
+ for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
!gsi_end_p (gsi);
gsi_next (&gsi))
if (pointer_set_contains (statements, gsi_stmt (gsi)))
break;
}
if (gsi_end_p (i2))
- i2 = gsi_start_bb (BASIC_BLOCK (first));
+ i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
else
gsi_next (&i2);
while (1)
is mood anyway. */
if (maybe_clean_or_replace_eh_stmt (old_stmt,
new_stmt))
- gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
+ gimple_purge_dead_eh_edges (
+ BASIC_BLOCK_FOR_FN (cfun, first));
break;
}
gsi_next (&i2);
new_stmt);
if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
- gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
+ gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
+ first));
}
}
}
if (def_block_index == -1
|| ! dominated_by_p (CDI_DOMINATORS, bb,
- BASIC_BLOCK (def_block_index)))
+ BASIC_BLOCK_FOR_FN (cfun, def_block_index)))
info->need_phi_state = NEED_PHI_STATE_MAYBE;
}
else
adef = 1;
EXECUTE_IF_SET_IN_BITMAP (to_remove, 0, i, bi)
{
- def_bb = BASIC_BLOCK (i);
+ def_bb = BASIC_BLOCK_FOR_FN (cfun, i);
defs[adef].bb_index = i;
defs[adef].dfs_num = bb_dom_dfs_in (CDI_DOMINATORS, def_bb);
defs[adef + 1].bb_index = i;
p = b;
else
{
- use_bb = get_immediate_dominator (CDI_DOMINATORS, BASIC_BLOCK (b));
+ use_bb = get_immediate_dominator (CDI_DOMINATORS,
+ BASIC_BLOCK_FOR_FN (cfun, b));
p = find_dfsnum_interval (defs, n_defs,
bb_dom_dfs_in (CDI_DOMINATORS, use_bb));
if (!bitmap_bit_p (phis, p))
continue;
/* Add the new uses to the worklist. */
- def_bb = BASIC_BLOCK (p);
+ def_bb = BASIC_BLOCK_FOR_FN (cfun, p);
FOR_EACH_EDGE (e, ei, def_bb->preds)
{
u = e->src->index;
/* And insert the PHI nodes. */
EXECUTE_IF_SET_IN_BITMAP (phi_insertion_points, 0, bb_index, bi)
{
- bb = BASIC_BLOCK (bb_index);
+ bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
if (update_p)
mark_block_for_update (bb);
db->def_blocks);
if (entry != ENTRY_BLOCK_PTR_FOR_FN (cfun))
EXECUTE_IF_SET_IN_BITMAP (idf, 0, i, bi)
- if (BASIC_BLOCK (i) != entry
- && dominated_by_p (CDI_DOMINATORS, BASIC_BLOCK (i), entry))
+ if (BASIC_BLOCK_FOR_FN (cfun, i) != entry
+ && dominated_by_p (CDI_DOMINATORS,
+ BASIC_BLOCK_FOR_FN (cfun, i), entry))
bitmap_set_bit (pruned_idf, i);
}
else
{
edge e;
edge_iterator ei;
- basic_block bb = BASIC_BLOCK (i);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
FOR_EACH_EDGE (e, ei, bb->preds)
if (e->src->index >= 0)
iterator. */
EXECUTE_IF_SET_IN_BITMAP (need_eh_cleanup, 0, i, bi)
{
- basic_block bb = BASIC_BLOCK (i);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
if (bb == NULL)
continue;
while (single_succ_p (bb)
while (live->stack_top != live->work_stack)
{
b = *--(live->stack_top);
- loe_visit_block (live, BASIC_BLOCK (b), visited, tmp);
+ loe_visit_block (live, BASIC_BLOCK_FOR_FN (cfun, b), visited, tmp);
}
BITMAP_FREE (tmp);
EXECUTE_IF_SET_IN_BITMAP (use_blocks, 0, i, bi)
{
- basic_block use_bb = BASIC_BLOCK (i);
+ basic_block use_bb = BASIC_BLOCK_FOR_FN (cfun, i);
struct loop *use_loop = use_bb->loop_father;
gcc_checking_assert (def_loop != use_loop
&& ! flow_loop_nested_p (def_loop, use_loop));
EXECUTE_IF_SET_IN_BITMAP (live_exits, 0, index, bi)
{
- add_exit_phi (BASIC_BLOCK (index), var);
+ add_exit_phi (BASIC_BLOCK_FOR_FN (cfun, index), var);
}
BITMAP_FREE (live_exits);
if (changed_bbs)
EXECUTE_IF_SET_IN_BITMAP (changed_bbs, 0, index, bi)
- find_uses_to_rename_bb (BASIC_BLOCK (index), use_blocks, need_phis);
+ find_uses_to_rename_bb (BASIC_BLOCK_FOR_FN (cfun, index), use_blocks, need_phis);
else
FOR_EACH_BB (bb)
find_uses_to_rename_bb (bb, use_blocks, need_phis);
unsigned i;
for (i = first_new_block; i < (unsigned) last_basic_block; i++)
- BASIC_BLOCK (i)->flags |= BB_DUPLICATED;
+ BASIC_BLOCK_FOR_FN (cfun, i)->flags |= BB_DUPLICATED;
for (i = first_new_block; i < (unsigned) last_basic_block; i++)
- add_phi_args_after_copy_bb (BASIC_BLOCK (i));
+ add_phi_args_after_copy_bb (BASIC_BLOCK_FOR_FN (cfun, i));
for (i = first_new_block; i < (unsigned) last_basic_block; i++)
- BASIC_BLOCK (i)->flags &= ~BB_DUPLICATED;
+ BASIC_BLOCK_FOR_FN (cfun, i)->flags &= ~BB_DUPLICATED;
}
{
if (bitmap_bit_p (changed_blocks, postorder[i]))
{
- basic_block block = BASIC_BLOCK (postorder[i]);
+ basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
changed |= compute_antic_aux (block,
bitmap_bit_p (has_abnormal_preds,
block->index));
{
if (bitmap_bit_p (changed_blocks, postorder[i]))
{
- basic_block block = BASIC_BLOCK (postorder[i]);
+ basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
changed
|= compute_partial_antic_aux (block,
bitmap_bit_p (has_abnormal_preds,
{
operand_entry_t oe = (*ops)[range->idx];
tree op = oe->op;
- gimple stmt = op ? SSA_NAME_DEF_STMT (op) : last_stmt (BASIC_BLOCK (oe->id));
+ gimple stmt = op ? SSA_NAME_DEF_STMT (op) :
+ last_stmt (BASIC_BLOCK_FOR_FN (cfun, oe->id));
location_t loc = gimple_location (stmt);
tree optype = op ? TREE_TYPE (op) : boolean_type_node;
tree tem = build_range_check (loc, optype, exp, in_p, low, high);
oe = (*ops)[i];
ranges[i].idx = i;
init_range_entry (ranges + i, oe->op,
- oe->op ? NULL : last_stmt (BASIC_BLOCK (oe->id)));
+ oe->op ? NULL :
+ last_stmt (BASIC_BLOCK_FOR_FN (cfun, oe->id)));
/* For | invert it now, we will invert it again before emitting
the optimized expression. */
if (opcode == BIT_IOR_EXPR
bitmap_set_bit (blocks, useblock->index);
}
}
- commondom = BASIC_BLOCK (bitmap_first_set_bit (blocks));
+ commondom = BASIC_BLOCK_FOR_FN (cfun, bitmap_first_set_bit (blocks));
EXECUTE_IF_SET_IN_BITMAP (blocks, 0, j, bi)
commondom = nearest_common_dominator (CDI_DOMINATORS, commondom,
- BASIC_BLOCK (j));
+ BASIC_BLOCK_FOR_FN (cfun, j));
BITMAP_FREE (blocks);
return commondom;
}
int flags;
unsigned int i;
unsigned int first = bitmap_first_set_bit (e->bbs);
- basic_block bb = BASIC_BLOCK (first);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, first);
int size = 0;
gimple_stmt_iterator gsi;
gimple stmt;
EXECUTE_IF_SET_IN_BITMAP (e->succs, 0, s, bs)
{
- int n = find_edge (bb, BASIC_BLOCK (s))->dest_idx;
- for (gsi = gsi_start_phis (BASIC_BLOCK (s)); !gsi_end_p (gsi);
+ int n = find_edge (bb, BASIC_BLOCK_FOR_FN (cfun, s))->dest_idx;
+ for (gsi = gsi_start_phis (BASIC_BLOCK_FOR_FN (cfun, s)); !gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple phi = gsi_stmt (gsi);
first1 = bitmap_first_set_bit (e1->bbs);
first2 = bitmap_first_set_bit (e2->bbs);
- bb1 = BASIC_BLOCK (first1);
- bb2 = BASIC_BLOCK (first2);
+ bb1 = BASIC_BLOCK_FOR_FN (cfun, first1);
+ bb2 = BASIC_BLOCK_FOR_FN (cfun, first2);
if (BB_SIZE (bb1) != BB_SIZE (bb2))
return 0;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (bbs, 0, i, bi)
- same_succ_flush_bb (BASIC_BLOCK (i));
+ same_succ_flush_bb (BASIC_BLOCK_FOR_FN (cfun, i));
}
/* Release the last vdef in BB, either normal or phi result. */
same = same_succ_alloc ();
EXECUTE_IF_SET_IN_BITMAP (deleted_bb_preds, 0, i, bi)
{
- bb = BASIC_BLOCK (i);
+ bb = BASIC_BLOCK_FOR_FN (cfun, i);
gcc_assert (bb != NULL);
find_same_succ_bb (bb, &same);
if (same == NULL)
merge = BB_CLUSTER (bb1);
merge_clusters (merge, old);
EXECUTE_IF_SET_IN_BITMAP (old->bbs, 0, i, bi)
- BB_CLUSTER (BASIC_BLOCK (i)) = merge;
+ BB_CLUSTER (BASIC_BLOCK_FOR_FN (cfun, i)) = merge;
all_clusters[old->index] = NULL;
update_rep_bb (merge, old->rep_bb);
delete_cluster (old);
EXECUTE_IF_SET_IN_BITMAP (same_succ->succs, 0, s, bs)
{
- succ = BASIC_BLOCK (s);
+ succ = BASIC_BLOCK_FOR_FN (cfun, s);
e1 = find_edge (bb1, succ);
e2 = find_edge (bb2, succ);
if (e1->flags & EDGE_COMPLEX
EXECUTE_IF_SET_IN_BITMAP (same_succ->bbs, 0, i, bi)
{
- bb1 = BASIC_BLOCK (i);
+ bb1 = BASIC_BLOCK_FOR_FN (cfun, i);
/* TODO: handle blocks with phi-nodes. We'll have to find corresponding
phi-nodes in bb1 and bb2, with the same alternatives for the same
nr_comparisons = 0;
EXECUTE_IF_SET_IN_BITMAP (same_succ->bbs, i + 1, j, bj)
{
- bb2 = BASIC_BLOCK (j);
+ bb2 = BASIC_BLOCK_FOR_FN (cfun, j);
if (bb_has_non_vop_phi (bb2))
continue;
bitmap_clear_bit (c->bbs, bb2->index);
EXECUTE_IF_SET_IN_BITMAP (c->bbs, 0, j, bj)
{
- bb1 = BASIC_BLOCK (j);
+ bb1 = BASIC_BLOCK_FOR_FN (cfun, j);
bitmap_clear_bit (update_bbs, bb1->index);
replace_block_by (bb1, bb2);
gimple stmt;
gimple_stmt_iterator gsi;
- bb = BASIC_BLOCK (i);
+ bb = BASIC_BLOCK_FOR_FN (cfun, i);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
stmt = gsi_stmt (gsi);
{
EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
{
- bb = BASIC_BLOCK (i);
+ bb = BASIC_BLOCK_FOR_FN (cfun, i);
if (EDGE_COUNT (bb->preds) > 1
&& !redirection_block_p (bb))
{
by trimming off the end of the jump thread path. */
EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
{
- basic_block bb = BASIC_BLOCK (i);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
FOR_EACH_EDGE (e, ei, bb->preds)
{
if (e->aux)
we have to iterate on those rather than the threaded_edges vector. */
EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
{
- bb = BASIC_BLOCK (i);
+ bb = BASIC_BLOCK_FOR_FN (cfun, i);
FOR_EACH_EDGE (e, ei, bb->preds)
{
if (e->aux)
loop structure. */
EXECUTE_IF_SET_IN_BITMAP (threaded_blocks, 0, i, bi)
{
- basic_block bb = BASIC_BLOCK (i);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
if (EDGE_COUNT (bb->preds) > 0)
retval |= thread_block (bb, true);
equivalency = XNEW (struct edge_equivalency);
equivalency->rhs = x;
equivalency->lhs = cond;
- find_edge (bb, BASIC_BLOCK (i))->aux = equivalency;
+ find_edge (bb, BASIC_BLOCK_FOR_FN (cfun, i))->aux =
+ equivalency;
}
}
free (info);
need_asserts = false;
for (i = rpo_cnt - 1; i >= 0; --i)
{
- basic_block bb = BASIC_BLOCK (rpo[i]);
+ basic_block bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
edge e;
edge_iterator ei;