+2014-08-21 David Malcolm <dmalcolm@redhat.com>
+
+ * cfgrtl.c (can_delete_note_p): Require a const rtx_note * rather
+ than a const_rtx.
+ (can_delete_label_p): Require a const rtx_code_label * rather than
+ a const_rtx.
+ (delete_insn): Add checked cast to rtx_code_label * when we know
+ we're dealing with LABEL_P (insn). Strengthen local "bb_note" from
+ rtx to rtx_insn *.
+ (delete_insn_chain): Strengthen locals "prev" and "current" from
+ rtx to rtx_insn *. Add a checked cast when assigning from
+ "finish" (strengthening the params will come later). Add a
+ checked cast to rtx_note * in region where we know
+ NOTE_P (current).
+ (rtl_delete_block): Strengthen locals "insn" and "end" from rtx to
+ rtx_insn *.
+ (compute_bb_for_insn): Likewise.
+ (free_bb_for_insn): Likewise for local "insn".
+ (compute_bb_for_insn): Likewise.
+ (update_bb_for_insn_chain): Strengthen params "begin", "end" and
+ local "insn" from rtx to rtx_insn *
+ (flow_active_insn_p): Require a const rtx_insn * rather than a
+ const_rtx.
+ (contains_no_active_insn_p): Strengthen local "insn" from rtx to
+ rtx_insn *.
+ (can_fallthru): Likewise for locals "insn" and "insn2".
+ (bb_note): Likewise for local "note".
+ (first_insn_after_basic_block_note): Likewise for local "note" and
+ for return type.
+ (rtl_split_block): Likewise for locals "insn" and "next".
+ (unique_locus_on_edge_between_p): Likewise for locals "insn" and
+ "end".
+ (rtl_merge_blocks): Likewise for locals "b_head", "b_end",
+ "a_end", "del_first", "del_last", "b_debug_start", "b_debug_end",
+ "prev", "tmp".
+ (try_redirect_by_replacing_jump): Likewise for locals "insn" (both of
+ them), "kill_from", "barrier", "new_insn".
+ (patch_jump_insn): Likewise for params "insn", "old_label".
+ (redirect_branch_edge): Likewise for locals "old_label", "insn".
+ (force_nonfallthru_and_redirect): Likewise for locals "insn",
+ "old_label", "new_label".
+ (rtl_tidy_fallthru_edge): Likewise for local "q".
+ (rtl_split_edge): Likewise for locals "before", "last".
+ (commit_one_edge_insertion): Likewise for locals "before",
+ "after", "insns", "tmp", "last", adding a checked cast where
+ currently necessary.
+ (commit_edge_insertions): Likewise.
+ (rtl_dump_bb): Likewise for locals "insn", "last".
+ (print_rtl_with_bb): Likewise for local "x".
+ (rtl_verify_bb_insns): Likewise for local "x".
+ (rtl_verify_bb_pointers): Likewise for local "insn".
+ (rtl_verify_bb_insn_chain): Likewise for locals "x", "last_head",
+ "head", "end".
+ (rtl_verify_fallthru): Likewise for local "insn".
+ (rtl_verify_bb_layout): Likewise for locals "x" and "rtx_first".
+ (purge_dead_edges): Likewise for local "insn".
+ (fixup_abnormal_edges): Likewise for locals "insn", "stop", "next".
+ (skip_insns_after_block): Likewise for return type and for locals
+ "insn", "last_insn", "next_head", "prev".
+ (record_effective_endpoints): Likewise for locals "next_insn",
+ "insn", "end".
+ (fixup_reorder_chain): Likewise for locals "bb_end_insn" and "end".
+ (verify_insn_chain): Likewise for locals "x", "prevx", "nextx".
+ (cfg_layout_can_duplicate_bb_p): Likewise for local "insn".
+ (duplicate_insn_chain): For now, add checked cast from rtx to
+ rtx_insn * when returning insn.
+ (cfg_layout_duplicate_bb): Likewise for local "insn".
+ (cfg_layout_delete_block): Likewise for locals "insn", "next",
+ "prev", "remaints".
+ (cfg_layout_merge_blocks): Likewise for local "insn", "last".
+ (rtl_block_empty_p): Likewise.
+ (rtl_split_block_before_cond_jump): Likewise for locals "insn",
+ "split_point", "last".
+ (rtl_block_ends_with_call_p): Likewise for local "insn".
+ (need_fake_edge_p): Strengthen param "insn" from const_rtx to
+ const rtx_insn *.
+ (rtl_flow_call_edges_add): Strengthen locals "insn", "prev_insn",
+ "split_at_insn" from rtx to rtx_insn *.
+ (rtl_lv_add_condition_to_bb): Likewise for locals "seq", "jump".
+ (rtl_can_remove_branch_p): Strengthen local "insn" from const_rtx
+ to const rtx_insn *.
+ (rtl_account_profile_record): Likewise.
+
2014-08-21 David Malcolm <dmalcolm@redhat.com>
* cfgloopanal.c (num_loop_insns): Strengthen local "insn" from
static GTY(()) rtx cfg_layout_function_footer;
static GTY(()) rtx cfg_layout_function_header;
-static rtx skip_insns_after_block (basic_block);
+static rtx_insn *skip_insns_after_block (basic_block);
static void record_effective_endpoints (void);
static rtx label_for_bb (basic_block);
static void fixup_reorder_chain (void);
void verify_insn_chain (void);
static void fixup_fallthru_exit_predecessor (void);
-static int can_delete_note_p (const_rtx);
-static int can_delete_label_p (const_rtx);
+static int can_delete_note_p (const rtx_note *);
+static int can_delete_label_p (const rtx_code_label *);
static basic_block rtl_split_edge (edge);
static bool rtl_move_block_after (basic_block, basic_block);
static int rtl_verify_flow_info (void);
so that we may simply delete it. */
static int
-can_delete_note_p (const_rtx note)
+can_delete_note_p (const rtx_note *note)
{
switch (NOTE_KIND (note))
{
/* True if a given label can be deleted. */
static int
-can_delete_label_p (const_rtx label)
+can_delete_label_p (const rtx_code_label *label)
{
return (!LABEL_PRESERVE_P (label)
/* User declared labels must be preserved. */
/* Some labels can't be directly removed from the INSN chain, as they
might be references via variables, constant pool etc.
Convert them to the special NOTE_INSN_DELETED_LABEL note. */
- if (! can_delete_label_p (insn))
+ if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
{
const char *name = LABEL_NAME (insn);
basic_block bb = BLOCK_FOR_INSN (insn);
- rtx bb_note = NEXT_INSN (insn);
+ rtx_insn *bb_note = NEXT_INSN (insn);
really_delete = false;
PUT_CODE (insn, NOTE);
void
delete_insn_chain (rtx start, rtx finish, bool clear_bb)
{
- rtx prev, current;
+ rtx_insn *prev, *current;
/* Unchain the insns one by one. It would be quicker to delete all of these
with a single unchaining, rather than one at a time, but we need to keep
the NOTE's. */
- current = finish;
+ current = safe_as_a <rtx_insn *> (finish);
while (1)
{
prev = PREV_INSN (current);
- if (NOTE_P (current) && !can_delete_note_p (current))
+ if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
;
else
delete_insn (current);
static void
rtl_delete_block (basic_block b)
{
- rtx insn, end;
+ rtx_insn *insn, *end;
/* If the head of this block is a CODE_LABEL, then it might be the
label for an exception handler which can't be reached. We need
FOR_EACH_BB_FN (bb, cfun)
{
- rtx end = BB_END (bb);
- rtx insn;
+ rtx_insn *end = BB_END (bb);
+ rtx_insn *insn;
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
{
unsigned int
free_bb_for_insn (void)
{
- rtx insn;
+ rtx_insn *insn;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (!BARRIER_P (insn))
BLOCK_FOR_INSN (insn) = NULL;
(i.e. both BEGIN and END will be updated. */
static void
-update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
+update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
{
- rtx insn;
+ rtx_insn *insn;
end = NEXT_INSN (end);
for (insn = begin; insn != end; insn = NEXT_INSN (insn))
even after reload. */
static bool
-flow_active_insn_p (const_rtx insn)
+flow_active_insn_p (const rtx_insn *insn)
{
if (active_insn_p (insn))
return true;
bool
contains_no_active_insn_p (const_basic_block bb)
{
- rtx insn;
+ rtx_insn *insn;
if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
|| !single_succ_p (bb))
bool
can_fallthru (basic_block src, basic_block target)
{
- rtx insn = BB_END (src);
- rtx insn2;
+ rtx_insn *insn = BB_END (src);
+ rtx_insn *insn2;
edge e;
edge_iterator ei;
rtx_note *
bb_note (basic_block bb)
{
- rtx note;
+ rtx_insn *note;
note = BB_HEAD (bb);
if (LABEL_P (note))
/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
note associated with the BLOCK. */
-static rtx
+static rtx_insn *
first_insn_after_basic_block_note (basic_block block)
{
- rtx insn;
+ rtx_insn *insn;
/* Get the first instruction in the block. */
insn = BB_HEAD (block);
if (insn == NULL_RTX)
- return NULL_RTX;
+ return NULL;
if (LABEL_P (insn))
insn = NEXT_INSN (insn);
gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
rtl_split_block (basic_block bb, void *insnp)
{
basic_block new_bb;
- rtx insn = (rtx) insnp;
+ rtx_insn *insn = (rtx_insn *) insnp;
edge e;
edge_iterator ei;
if (insn)
{
- rtx next = insn;
+ rtx_insn *next = insn;
insn = PREV_INSN (insn);
unique_locus_on_edge_between_p (basic_block a, basic_block b)
{
const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
- rtx insn, end;
+ rtx_insn *insn, *end;
if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
return false;
static void
rtl_merge_blocks (basic_block a, basic_block b)
{
- rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
- rtx del_first = NULL_RTX, del_last = NULL_RTX;
- rtx b_debug_start = b_end, b_debug_end = b_end;
+ rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
+ rtx_insn *del_first = NULL, *del_last = NULL;
+ rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
int b_empty = 0;
/* If there was a jump out of A, delete it. */
if (JUMP_P (a_end))
{
- rtx prev;
+ rtx_insn *prev;
for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
if (!NOTE_P (prev)
the insn that set cc0. */
if (only_sets_cc0_p (prev))
{
- rtx tmp = prev;
+ rtx_insn *tmp = prev;
prev = prev_nonnote_insn (prev);
if (!prev)
try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
{
basic_block src = e->src;
- rtx insn = BB_END (src), kill_from;
+ rtx_insn *insn = BB_END (src), *kill_from;
rtx set;
int fallthru = 0;
/* Selectively unlink whole insn chain. */
if (in_cfglayout)
{
- rtx insn = BB_FOOTER (src);
+ rtx_insn *insn = BB_FOOTER (src);
delete_insn_chain (kill_from, BB_END (src), false);
else
{
rtx target_label = block_label (target);
- rtx barrier, label;
+ rtx_insn *barrier;
+ rtx label;
rtx_jump_table_data *table;
emit_jump_insn_after_noloc (gen_jump (target_label), insn);
/* Move the jump before barrier so that the notes
which originally were or were created before jump table are
inside the basic block. */
- rtx new_insn = BB_END (src);
+ rtx_insn *new_insn = BB_END (src);
update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
PREV_INSN (barrier), src);
doesn't work. */
static bool
-patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
+patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
{
rtx_jump_table_data *table;
rtx tmp;
static edge
redirect_branch_edge (edge e, basic_block target)
{
- rtx old_label = BB_HEAD (e->dest);
+ rtx_insn *old_label = BB_HEAD (e->dest);
basic_block src = e->src;
- rtx insn = BB_END (src);
+ rtx_insn *insn = BB_END (src);
/* We can only redirect non-fallthru edges of jump insn. */
if (e->flags & EDGE_FALLTHRU)
}
if (adjust_jump_target)
{
- rtx insn = BB_END (e->src), note;
- rtx old_label = BB_HEAD (e->dest);
- rtx new_label = BB_HEAD (target);
+ rtx_insn *insn = BB_END (e->src);
+ rtx note;
+ rtx_insn *old_label = BB_HEAD (e->dest);
+ rtx_insn *new_label = BB_HEAD (target);
if (JUMP_LABEL (insn) == old_label)
{
static void
rtl_tidy_fallthru_edge (edge e)
{
- rtx q;
+ rtx_insn *q;
basic_block b = e->src, c = b->next_bb;
/* ??? In a late-running flow pass, other folks may have deleted basic
rtl_split_edge (edge edge_in)
{
basic_block bb, new_bb;
- rtx before;
+ rtx_insn *before;
/* Abnormal edges cannot be split. */
gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
before = BB_HEAD (edge_in->dest);
else
- before = NULL_RTX;
+ before = NULL;
/* If this is a fall through edge to the exit block, the blocks might be
not adjacent, and the right place is after the source. */
/* For asm goto even splitting of fallthru edge might
need insn patching, as other labels might point to the
old label. */
- rtx last = BB_END (edge_in->src);
+ rtx_insn *last = BB_END (edge_in->src);
if (last
&& JUMP_P (last)
&& edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
void
commit_one_edge_insertion (edge e)
{
- rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
+ rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
basic_block bb;
/* Pull the insns off the edge now since the edge might go away. */
- insns = e->insns.r;
+ insns = safe_as_a <rtx_insn *> (e->insns.r);
e->insns.r = NULL_RTX;
/* Figure out where to put these insns. If the destination has
static void
rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
{
- rtx insn;
- rtx last;
+ rtx_insn *insn;
+ rtx_insn *last;
char *s_indent;
s_indent = (char *) alloca ((size_t) indent + 1);
{
FOR_EACH_BB_REVERSE_FN (bb, cfun)
{
- rtx x;
+ rtx_insn *x;
start[INSN_UID (BB_HEAD (bb))] = bb;
end[INSN_UID (BB_END (bb))] = bb;
static int
rtl_verify_bb_insns (void)
{
- rtx x;
+ rtx_insn *x;
int err = 0;
basic_block bb;
/* Check the general integrity of the basic blocks. */
FOR_EACH_BB_REVERSE_FN (bb, cfun)
{
- rtx insn;
+ rtx_insn *insn;
if (!(bb->flags & BB_RTL))
{
{
basic_block bb;
int err = 0;
- rtx x;
- rtx last_head = get_last_insn ();
+ rtx_insn *x;
+ rtx_insn *last_head = get_last_insn ();
basic_block *bb_info;
const int max_uid = get_max_uid ();
FOR_EACH_BB_REVERSE_FN (bb, cfun)
{
- rtx head = BB_HEAD (bb);
- rtx end = BB_END (bb);
+ rtx_insn *head = BB_HEAD (bb);
+ rtx_insn *end = BB_END (bb);
for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
{
e = find_fallthru_edge (bb->succs);
if (!e)
{
- rtx insn;
+ rtx_insn *insn;
/* Ensure existence of barrier in BB with no fallthru edges. */
for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
&& e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
{
- rtx insn;
+ rtx_insn *insn;
if (e->src->next_bb != e->dest)
{
{
basic_block bb;
int err = 0;
- rtx x;
+ rtx_insn *x;
int num_bb_notes;
- const rtx rtx_first = get_insns ();
+ rtx_insn * const rtx_first = get_insns ();
basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
num_bb_notes = 0;
purge_dead_edges (basic_block bb)
{
edge e;
- rtx insn = BB_END (bb), note;
+ rtx_insn *insn = BB_END (bb);
+ rtx note;
bool purged = false;
bool found;
edge_iterator ei;
if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
{
- rtx insn;
+ rtx_insn *insn;
/* Get past the new insns generated. Allow notes, as the insns
may be already deleted. */
if (CALL_P (insn) || can_throw_internal (insn))
{
- rtx stop, next;
+ rtx_insn *stop, *next;
e = find_fallthru_edge (bb->succs);
associated with BB (e.g., barriers). If there are any such insns,
we return the last one. Otherwise, we return the end of BB. */
-static rtx
+static rtx_insn *
skip_insns_after_block (basic_block bb)
{
- rtx insn, last_insn, next_head, prev;
+ rtx_insn *insn, *last_insn, *next_head, *prev;
- next_head = NULL_RTX;
+ next_head = NULL;
if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
next_head = BB_HEAD (bb->next_bb);
static void
record_effective_endpoints (void)
{
- rtx next_insn;
+ rtx_insn *next_insn;
basic_block bb;
- rtx insn;
+ rtx_insn *insn;
for (insn = get_insns ();
insn
next_insn = get_insns ();
FOR_EACH_BB_FN (bb, cfun)
{
- rtx end;
+ rtx_insn *end;
if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
SET_BB_HEADER (bb) = unlink_insn_chain (next_insn,
bb->aux)
{
edge e_fall, e_taken, e;
- rtx bb_end_insn;
+ rtx_insn *bb_end_insn;
rtx ret_label = NULL_RTX;
basic_block nb;
edge_iterator ei;
edge e2;
edge_iterator ei2;
basic_block dest, nb;
- rtx end;
+ rtx_insn *end;
insn = BB_END (e->src);
end = PREV_INSN (BB_HEAD (e->src));
DEBUG_FUNCTION void
verify_insn_chain (void)
{
- rtx x, prevx, nextx;
+ rtx_insn *x, *prevx, *nextx;
int insn_cnt1, insn_cnt2;
for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
/* Do not duplicate blocks containing insns that can't be copied. */
if (targetm.cannot_copy_insn_p)
{
- rtx insn = BB_HEAD (bb);
+ rtx_insn *insn = BB_HEAD (bb);
while (1)
{
if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
static basic_block
cfg_layout_duplicate_bb (basic_block bb)
{
- rtx insn;
+ rtx_insn *insn;
basic_block new_bb;
insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
static void
cfg_layout_delete_block (basic_block bb)
{
- rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
+ rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
+ rtx *to;
if (BB_HEADER (bb))
{
cfg_layout_merge_blocks (basic_block a, basic_block b)
{
bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
- rtx insn;
+ rtx_insn *insn;
gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
SET_BB_FOOTER (a) = SET_BB_FOOTER (b);
else
{
- rtx last = BB_FOOTER (a);
+ rtx_insn *last = BB_FOOTER (a);
while (NEXT_INSN (last))
last = NEXT_INSN (last);
SET_BB_FOOTER (a) = BB_HEADER (b);
else
{
- rtx last = BB_HEADER (b);
+ rtx_insn *last = BB_HEADER (b);
while (NEXT_INSN (last))
last = NEXT_INSN (last);
static bool
rtl_block_empty_p (basic_block bb)
{
- rtx insn;
+ rtx_insn *insn;
if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
|| bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
static basic_block
rtl_split_block_before_cond_jump (basic_block bb)
{
- rtx insn;
- rtx split_point = NULL;
- rtx last = NULL;
+ rtx_insn *insn;
+ rtx_insn *split_point = NULL;
+ rtx_insn *last = NULL;
bool found_code = false;
FOR_BB_INSNS (bb, insn)
static bool
rtl_block_ends_with_call_p (basic_block bb)
{
- rtx insn = BB_END (bb);
+ rtx_insn *insn = BB_END (bb);
while (!CALL_P (insn)
&& insn != BB_HEAD (bb)
Helper function for rtl_flow_call_edges_add. */
static bool
-need_fake_edge_p (const_rtx insn)
+need_fake_edge_p (const rtx_insn *insn)
{
if (!INSN_P (insn))
return false;
if (check_last_block)
{
basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
- rtx insn = BB_END (bb);
+ rtx_insn *insn = BB_END (bb);
/* Back up past insns that must be kept in the same block as a call. */
while (insn != BB_HEAD (bb)
for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
- rtx insn;
- rtx prev_insn;
+ rtx_insn *insn;
+ rtx_insn *prev_insn;
if (!bb)
continue;
if (need_fake_edge_p (insn))
{
edge e;
- rtx split_at_insn = insn;
+ rtx_insn *split_at_insn = insn;
/* Don't split the block between a call and an insn that should
remain in the same block as the call. */
basic_block second_head ATTRIBUTE_UNUSED,
basic_block cond_bb, void *comp_rtx)
{
- rtx label, seq, jump;
+ rtx label;
+ rtx_insn *seq, *jump;
rtx op0 = XEXP ((rtx)comp_rtx, 0);
rtx op1 = XEXP ((rtx)comp_rtx, 1);
enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
{
const_basic_block src = e->src;
const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
- const_rtx insn = BB_END (src), set;
+ const rtx_insn *insn = BB_END (src);
+ rtx set;
/* The conditions are taken from try_redirect_by_replacing_jump. */
if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
rtl_account_profile_record (basic_block bb, int after_pass,
struct profile_record *record)
{
- rtx insn;
+ rtx_insn *insn;
FOR_BB_INSNS (bb, insn)
if (INSN_P (insn))
{