/* A subroutine of the atomic operation splitters. Jump to LABEL if
COND is true. Mark the jump as unlikely to be taken. */
-static void
-emit_unlikely_jump (rtx cond, rtx label)
+rtx
+alpha_emit_unlikely_jump (rtx cond, rtx label)
{
rtx x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
rtx_insn *insn = emit_jump_insn (gen_rtx_SET (pc_rtx, x));
add_reg_br_prob_note (insn, profile_probability::very_unlikely ());
+ return insn;
}
/* Subroutines of the atomic operation splitters. Emit barriers
emit_insn (gen_store_conditional (mode, cond, mem, scratch));
x = gen_rtx_EQ (DImode, cond, const0_rtx);
- emit_unlikely_jump (x, label);
+ alpha_emit_unlikely_jump (x, label);
alpha_post_atomic_barrier (model);
}
emit_insn (gen_rtx_SET (cond, x));
x = gen_rtx_EQ (DImode, cond, const0_rtx);
}
- emit_unlikely_jump (x, label2);
+ alpha_emit_unlikely_jump (x, label2);
emit_move_insn (cond, newval);
emit_insn (gen_store_conditional
if (!is_weak)
{
x = gen_rtx_EQ (DImode, cond, const0_rtx);
- emit_unlikely_jump (x, label1);
+ alpha_emit_unlikely_jump (x, label1);
}
if (!is_mm_relaxed (mod_f))
emit_insn (gen_rtx_SET (cond, x));
x = gen_rtx_EQ (DImode, cond, const0_rtx);
}
- emit_unlikely_jump (x, label2);
+ alpha_emit_unlikely_jump (x, label2);
emit_insn (gen_mskxl (cond, scratch, mask, addr));
if (!is_weak)
{
x = gen_rtx_EQ (DImode, cond, const0_rtx);
- emit_unlikely_jump (x, label1);
+ alpha_emit_unlikely_jump (x, label1);
}
if (!is_mm_relaxed (mod_f))
emit_insn (gen_store_conditional (mode, cond, mem, scratch));
x = gen_rtx_EQ (DImode, cond, const0_rtx);
- emit_unlikely_jump (x, label);
+ alpha_emit_unlikely_jump (x, label);
alpha_post_atomic_barrier (model);
}
emit_insn (gen_store_conditional (DImode, scratch, mem, scratch));
x = gen_rtx_EQ (DImode, scratch, const0_rtx);
- emit_unlikely_jump (x, label);
+ alpha_emit_unlikely_jump (x, label);
alpha_post_atomic_barrier (model);
}