+2014-08-22 David Malcolm <dmalcolm@redhat.com>
+
+ * sel-sched-ir.c (vinsn_copy): Strengthen local "copy" from rtx to
+ rtx_insn *.
+ (speculate_expr): Likewise for locals "orig_insn_rtx",
+ "spec_insn_rtx".
+ (eq_transformed_insns): Likewise for locals "i1", "i2".
+ (check_for_new_jump): Likewise for return type and local "end".
+ (find_new_jump): Likewise for return type and local "jump".
+ (sel_split_edge): Likewise for local "jump".
+ (sel_create_recovery_block): Likewise.
+ (sel_redirect_edge_and_branch_force): Likewise.
+ (sel_redirect_edge_and_branch): Likewise.
+
2014-08-22 David Malcolm <dmalcolm@redhat.com>
* sel-sched.c (substitute_reg_in_expr): Strengthen local
vinsn_t
vinsn_copy (vinsn_t vi, bool reattach_p)
{
- rtx copy;
+ rtx_insn *copy;
bool unique = VINSN_UNIQUE_P (vi);
vinsn_t new_vi;
speculate_expr (expr_t expr, ds_t ds)
{
int res;
- rtx orig_insn_rtx;
+ rtx_insn *orig_insn_rtx;
rtx spec_pat;
ds_t target_ds, current_ds;
case 1:
{
- rtx spec_insn_rtx = create_insn_rtx_from_pattern (spec_pat, NULL_RTX);
+ rtx_insn *spec_insn_rtx =
+ create_insn_rtx_from_pattern (spec_pat, NULL_RTX);
vinsn_t spec_vinsn = create_vinsn_from_insn_rtx (spec_insn_rtx, false);
change_vinsn_in_expr (expr, spec_vinsn);
static int
eq_transformed_insns (const void *p, const void *q)
{
- rtx i1 = VINSN_INSN_RTX (((const struct transformed_insns *) p)->vinsn_old);
- rtx i2 = VINSN_INSN_RTX (((const struct transformed_insns *) q)->vinsn_old);
+ rtx_insn *i1 =
+ VINSN_INSN_RTX (((const struct transformed_insns *) p)->vinsn_old);
+ rtx_insn *i2 =
+ VINSN_INSN_RTX (((const struct transformed_insns *) q)->vinsn_old);
if (INSN_UID (i1) == INSN_UID (i2))
return 1;
/* If BB ends with a jump insn whose ID is bigger then PREV_MAX_UID, return it.
Otherwise returns NULL. */
-static rtx
+static rtx_insn *
check_for_new_jump (basic_block bb, int prev_max_uid)
{
- rtx end;
+ rtx_insn *end;
end = sel_bb_end (bb);
if (end && INSN_UID (end) >= prev_max_uid)
/* Look for a new jump either in FROM_BB block or in newly created JUMP_BB block.
New means having UID at least equal to PREV_MAX_UID. */
-static rtx
+static rtx_insn *
find_new_jump (basic_block from, basic_block jump_bb, int prev_max_uid)
{
- rtx jump;
+ rtx_insn *jump;
/* Return immediately if no new insns were emitted. */
if (get_max_uid () == prev_max_uid)
{
basic_block new_bb, src, other_bb = NULL;
int prev_max_uid;
- rtx jump;
+ rtx_insn *jump;
src = e->src;
prev_max_uid = get_max_uid ();
{
basic_block first_bb, second_bb, recovery_block;
basic_block before_recovery = NULL;
- rtx jump;
+ rtx_insn *jump;
first_bb = BLOCK_FOR_INSN (orig_insn);
if (sel_bb_end_p (orig_insn))
{
basic_block jump_bb, src, orig_dest = e->dest;
int prev_max_uid;
- rtx jump;
+ rtx_insn *jump;
int old_seqno = -1;
/* This function is now used only for bookkeeping code creation, where
bool latch_edge_p;
basic_block src, orig_dest = e->dest;
int prev_max_uid;
- rtx jump;
+ rtx_insn *jump;
edge redirected;
bool recompute_toporder_p = false;
bool maybe_unreachable = single_pred_p (orig_dest);