.out = tgen_qemu_ld,
};
-static void tcg_out_qemu_st(TCGContext *s, TCGReg data_reg, TCGReg addr_reg,
- MemOpIdx oi, TCGType data_type)
+static void tgen_qemu_st(TCGContext *s, TCGType data_type, TCGReg data_reg,
+ TCGReg addr_reg, MemOpIdx oi)
{
TCGLabelQemuLdst *ldst;
HostAddress h;
}
}
+static const TCGOutOpQemuLdSt outop_qemu_st = {
+ .base.static_constraint = C_O0_I2(rz, r),
+ .out = tgen_qemu_st,
+};
+
static void tcg_out_qemu_ldst_i128(TCGContext *s, TCGReg datalo, TCGReg datahi,
TCGReg addr_reg, MemOpIdx oi, bool is_ld)
{
.out = tgen_qemu_ld2,
};
+static void tgen_qemu_st2(TCGContext *s, TCGType type, TCGReg datalo,
+ TCGReg datahi, TCGReg addr_reg, MemOpIdx oi)
+{
+ tcg_out_qemu_ldst_i128(s, datalo, datahi, addr_reg, oi, false);
+}
+
+static const TCGOutOpQemuLdSt2 outop_qemu_st2 = {
+ .base.static_constraint = C_O0_I3(rz, rz, r),
+ .out = tgen_qemu_st2,
+};
+
static const tcg_insn_unit *tb_ret_addr;
static void tcg_out_exit_tb(TCGContext *s, uintptr_t a0)
const TCGArg args[TCG_MAX_OP_ARGS],
const int const_args[TCG_MAX_OP_ARGS])
{
- /* Hoist the loads of the most common arguments. */
- TCGArg a0 = args[0];
- TCGArg a1 = args[1];
- TCGArg a2 = args[2];
-
- switch (opc) {
- case INDEX_op_qemu_st:
- tcg_out_qemu_st(s, a0, a1, a2, ext);
- break;
- case INDEX_op_qemu_st2:
- tcg_out_qemu_ldst_i128(s, a0, a1, a2, args[3], false);
- break;
-
- case INDEX_op_call: /* Always emitted via tcg_out_call. */
- case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
- case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- default:
- g_assert_not_reached();
- }
+ g_assert_not_reached();
}
static void tcg_out_vec_op(TCGContext *s, TCGOpcode opc,
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
switch (op) {
- case INDEX_op_qemu_st:
- return C_O0_I2(rz, r);
- case INDEX_op_qemu_st2:
- return C_O0_I3(rz, rz, r);
-
case INDEX_op_add_vec:
case INDEX_op_sub_vec:
case INDEX_op_mul_vec:
}
}
-static void tcg_out_qemu_st(TCGContext *s, TCGReg datalo, TCGReg datahi,
- TCGReg addr, MemOpIdx oi, TCGType data_type)
+static void tgen_qemu_st(TCGContext *s, TCGType type, TCGReg data,
+ TCGReg addr, MemOpIdx oi)
{
MemOp opc = get_memop(oi);
TCGLabelQemuLdst *ldst;
ldst = prepare_host_addr(s, &h, addr, oi, false);
if (ldst) {
- ldst->type = data_type;
+ ldst->type = type;
+ ldst->datalo_reg = data;
+ ldst->datahi_reg = -1;
+
+ h.cond = COND_EQ;
+ tcg_out_qemu_st_direct(s, opc, data, -1, h);
+
+ /* The conditional call is last, as we're going to return here. */
+ ldst->label_ptr[0] = s->code_ptr;
+ tcg_out_bl_imm(s, COND_NE, 0);
+ ldst->raddr = tcg_splitwx_to_rx(s->code_ptr);
+ } else {
+ tcg_out_qemu_st_direct(s, opc, data, -1, h);
+ }
+}
+
+static const TCGOutOpQemuLdSt outop_qemu_st = {
+ .base.static_constraint = C_O0_I2(q, q),
+ .out = tgen_qemu_st,
+};
+
+static void tgen_qemu_st2(TCGContext *s, TCGType type, TCGReg datalo,
+ TCGReg datahi, TCGReg addr, MemOpIdx oi)
+{
+ MemOp opc = get_memop(oi);
+ TCGLabelQemuLdst *ldst;
+ HostAddress h;
+
+ ldst = prepare_host_addr(s, &h, addr, oi, false);
+ if (ldst) {
+ ldst->type = type;
ldst->datalo_reg = datalo;
ldst->datahi_reg = datahi;
}
}
+static const TCGOutOpQemuLdSt2 outop_qemu_st2 = {
+ .base.static_constraint = C_O0_I3(Q, p, q),
+ .out = tgen_qemu_st2,
+};
+
static void tcg_out_epilogue(TCGContext *s);
static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
const TCGArg args[TCG_MAX_OP_ARGS],
const int const_args[TCG_MAX_OP_ARGS])
{
- switch (opc) {
- case INDEX_op_qemu_st:
- tcg_out_qemu_st(s, args[0], -1, args[1], args[2], TCG_TYPE_I32);
- break;
- case INDEX_op_qemu_st2:
- tcg_out_qemu_st(s, args[0], args[1], args[2], args[3], TCG_TYPE_I64);
- break;
-
- case INDEX_op_call: /* Always emitted via tcg_out_call. */
- case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
- case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- default:
- g_assert_not_reached();
- }
+ g_assert_not_reached();
}
static TCGConstraintSetIndex
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
switch (op) {
- case INDEX_op_qemu_st:
- return C_O0_I2(q, q);
- case INDEX_op_qemu_st2:
- return C_O0_I3(Q, p, q);
-
case INDEX_op_st_vec:
return C_O0_I2(w, r);
case INDEX_op_ld_vec:
switch (memop & MO_SIZE) {
case MO_8:
- /* This is handled with constraints on INDEX_op_qemu_st. */
+ /* This is handled with constraints in cset_qemu_st(). */
tcg_debug_assert(TCG_TARGET_REG_BITS == 64 || datalo < 4);
tcg_out_modrm_sib_offset(s, OPC_MOVB_EvGv + P_REXB_R + h.seg,
datalo, h.base, h.index, 0, h.ofs);
}
}
-static void tcg_out_qemu_st(TCGContext *s, TCGReg datalo, TCGReg datahi,
- TCGReg addr, MemOpIdx oi, TCGType data_type)
+static void tgen_qemu_st(TCGContext *s, TCGType type, TCGReg data,
+ TCGReg addr, MemOpIdx oi)
+{
+ TCGLabelQemuLdst *ldst;
+ HostAddress h;
+
+ ldst = prepare_host_addr(s, &h, addr, oi, false);
+ tcg_out_qemu_st_direct(s, data, -1, h, get_memop(oi));
+
+ if (ldst) {
+ ldst->type = type;
+ ldst->datalo_reg = data;
+ ldst->datahi_reg = -1;
+ ldst->raddr = tcg_splitwx_to_rx(s->code_ptr);
+ }
+}
+
+static TCGConstraintSetIndex cset_qemu_st(TCGType type, unsigned flags)
+{
+ return flags == MO_8 ? C_O0_I2(s, L) : C_O0_I2(L, L);
+}
+
+static const TCGOutOpQemuLdSt outop_qemu_st = {
+ .base.static_constraint =
+ TCG_TARGET_REG_BITS == 32 ? C_Dynamic : C_O0_I2(L, L),
+ .base.dynamic_constraint =
+ TCG_TARGET_REG_BITS == 32 ? cset_qemu_st : NULL,
+ .out = tgen_qemu_st,
+};
+
+static void tgen_qemu_st2(TCGContext *s, TCGType type, TCGReg datalo,
+ TCGReg datahi, TCGReg addr, MemOpIdx oi)
{
TCGLabelQemuLdst *ldst;
HostAddress h;
tcg_out_qemu_st_direct(s, datalo, datahi, h, get_memop(oi));
if (ldst) {
- ldst->type = data_type;
+ ldst->type = type;
ldst->datalo_reg = datalo;
ldst->datahi_reg = datahi;
ldst->raddr = tcg_splitwx_to_rx(s->code_ptr);
}
}
+static const TCGOutOpQemuLdSt2 outop_qemu_st2 = {
+ .base.static_constraint = C_O0_I3(L, L, L),
+ .out = tgen_qemu_st2,
+};
+
static void tcg_out_exit_tb(TCGContext *s, uintptr_t a0)
{
/* Reuse the zeroing that exists for goto_ptr. */
const TCGArg args[TCG_MAX_OP_ARGS],
const int const_args[TCG_MAX_OP_ARGS])
{
- TCGArg a0, a1, a2;
-
- /* Hoist the loads of the most common arguments. */
- a0 = args[0];
- a1 = args[1];
- a2 = args[2];
-
- switch (opc) {
- case INDEX_op_qemu_st:
- tcg_out_qemu_st(s, a0, -1, a1, a2, type);
- break;
- case INDEX_op_qemu_st2:
- tcg_out_qemu_st(s, a0, a1, a2, args[3], type);
- break;
-
- case INDEX_op_call: /* Always emitted via tcg_out_call. */
- case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
- case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- default:
- g_assert_not_reached();
- }
+ g_assert_not_reached();
}
static int const umin_insn[4] = {
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
switch (op) {
- case INDEX_op_qemu_st:
- return (TCG_TARGET_REG_BITS == 32 && flags == MO_8
- ? C_O0_I2(s, L)
- : C_O0_I2(L, L));
-
- case INDEX_op_qemu_st2:
- return C_O0_I3(L, L, L);
-
case INDEX_op_ld_vec:
case INDEX_op_dupm_vec:
return C_O1_I1(x, r);
}
}
-static void tcg_out_qemu_st(TCGContext *s, TCGReg data_reg, TCGReg addr_reg,
- MemOpIdx oi, TCGType data_type)
+static void tgen_qemu_st(TCGContext *s, TCGType type, TCGReg data_reg,
+ TCGReg addr_reg, MemOpIdx oi)
{
TCGLabelQemuLdst *ldst;
HostAddress h;
tcg_out_qemu_st_indexed(s, get_memop(oi), data_reg, h);
if (ldst) {
- ldst->type = data_type;
+ ldst->type = type;
ldst->datalo_reg = data_reg;
ldst->raddr = tcg_splitwx_to_rx(s->code_ptr);
}
}
+static const TCGOutOpQemuLdSt outop_qemu_st = {
+ .base.static_constraint = C_O0_I2(rz, r),
+ .out = tgen_qemu_st,
+};
+
static void tcg_out_qemu_ldst_i128(TCGContext *s, TCGReg data_lo, TCGReg data_hi,
TCGReg addr_reg, MemOpIdx oi, bool is_ld)
{
.out = tgen_qemu_ld2,
};
+static void tgen_qemu_st2(TCGContext *s, TCGType type, TCGReg datalo,
+ TCGReg datahi, TCGReg addr_reg, MemOpIdx oi)
+{
+ tcg_out_qemu_ldst_i128(s, datalo, datahi, addr_reg, oi, false);
+}
+
+static const TCGOutOpQemuLdSt2 outop_qemu_st2 = {
+ .base.static_constraint = C_O0_I3(r, r, r),
+ .out = tgen_qemu_st2,
+};
+
/*
* Entry-points
*/
const TCGArg args[TCG_MAX_OP_ARGS],
const int const_args[TCG_MAX_OP_ARGS])
{
- TCGArg a0 = args[0];
- TCGArg a1 = args[1];
- TCGArg a2 = args[2];
- TCGArg a3 = args[3];
-
- switch (opc) {
- case INDEX_op_qemu_st:
- tcg_out_qemu_st(s, a0, a1, a2, type);
- break;
- case INDEX_op_qemu_st2:
- tcg_out_qemu_ldst_i128(s, a0, a1, a2, a3, false);
- break;
-
- case INDEX_op_call: /* Always emitted via tcg_out_call. */
- case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
- case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- default:
- g_assert_not_reached();
- }
+ g_assert_not_reached();
}
static bool tcg_out_dup_vec(TCGContext *s, TCGType type, unsigned vece,
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
switch (op) {
- case INDEX_op_qemu_st:
- return C_O0_I2(rz, r);
- case INDEX_op_qemu_st2:
- return C_O0_I3(r, r, r);
-
case INDEX_op_ld_vec:
case INDEX_op_dupm_vec:
case INDEX_op_dup_vec:
}
}
-static void tcg_out_qemu_st(TCGContext *s, TCGReg datalo, TCGReg datahi,
- TCGReg addr, MemOpIdx oi, TCGType data_type)
+static void tgen_qemu_st(TCGContext *s, TCGType type, TCGReg data,
+ TCGReg addr, MemOpIdx oi)
{
MemOp opc = get_memop(oi);
TCGLabelQemuLdst *ldst;
ldst = prepare_host_addr(s, &h, addr, oi, false);
+ if (use_mips32r6_instructions || h.aa.align >= (opc & MO_SIZE)) {
+ tcg_out_qemu_st_direct(s, data, 0, h.base, opc);
+ } else {
+ tcg_out_qemu_st_unalign(s, data, 0, h.base, opc);
+ }
+
+ if (ldst) {
+ ldst->type = type;
+ ldst->datalo_reg = data;
+ ldst->datahi_reg = 0;
+ ldst->raddr = tcg_splitwx_to_rx(s->code_ptr);
+ }
+}
+
+static const TCGOutOpQemuLdSt outop_qemu_st = {
+ .base.static_constraint = C_O0_I2(rz, r),
+ .out = tgen_qemu_st,
+};
+
+static void tgen_qemu_st2(TCGContext *s, TCGType type, TCGReg datalo,
+ TCGReg datahi, TCGReg addr, MemOpIdx oi)
+{
+ MemOp opc = get_memop(oi);
+ TCGLabelQemuLdst *ldst;
+ HostAddress h;
+
+ tcg_debug_assert(TCG_TARGET_REG_BITS == 32);
+ ldst = prepare_host_addr(s, &h, addr, oi, false);
+
if (use_mips32r6_instructions || h.aa.align >= (opc & MO_SIZE)) {
tcg_out_qemu_st_direct(s, datalo, datahi, h.base, opc);
} else {
}
if (ldst) {
- ldst->type = data_type;
+ ldst->type = type;
ldst->datalo_reg = datalo;
ldst->datahi_reg = datahi;
ldst->raddr = tcg_splitwx_to_rx(s->code_ptr);
}
}
+static const TCGOutOpQemuLdSt2 outop_qemu_st2 = {
+ /* Ensure that the mips32 code is compiled but discarded for mips64. */
+ .base.static_constraint =
+ TCG_TARGET_REG_BITS == 32 ? C_O0_I3(rz, rz, r) : C_NotImplemented,
+ .out =
+ TCG_TARGET_REG_BITS == 32 ? tgen_qemu_st2 : NULL,
+};
+
static void tcg_out_mb(TCGContext *s, unsigned a0)
{
static const MIPSInsn sync[] = {
const TCGArg args[TCG_MAX_OP_ARGS],
const int const_args[TCG_MAX_OP_ARGS])
{
- TCGArg a0, a1, a2;
-
- a0 = args[0];
- a1 = args[1];
- a2 = args[2];
-
- switch (opc) {
- case INDEX_op_qemu_st:
- tcg_out_qemu_st(s, a0, 0, a1, a2, type);
- break;
- case INDEX_op_qemu_st2:
- tcg_debug_assert(TCG_TARGET_REG_BITS == 32);
- tcg_out_qemu_st(s, a0, a1, a2, args[3], type);
- break;
-
- case INDEX_op_call: /* Always emitted via tcg_out_call. */
- case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
- case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- default:
- g_assert_not_reached();
- }
+ g_assert_not_reached();
}
static TCGConstraintSetIndex
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
- switch (op) {
- case INDEX_op_qemu_st:
- return C_O0_I2(rz, r);
- case INDEX_op_qemu_ld2:
- return TCG_TARGET_REG_BITS == 64 ? C_NotImplemented : C_O2_I1(r, r, r);
- case INDEX_op_qemu_st2:
- return TCG_TARGET_REG_BITS == 64 ? C_NotImplemented : C_O0_I3(rz, rz, r);
-
- default:
- return C_NotImplemented;
- }
+ return C_NotImplemented;
}
static const int tcg_target_callee_save_regs[] = {
.out = tgen_qemu_ld2,
};
+static void tgen_qemu_st(TCGContext *s, TCGType type, TCGReg data,
+ TCGReg addr, MemOpIdx oi)
+{
+ tcg_out_qemu_st(s, data, -1, addr, oi, type);
+}
+
+static const TCGOutOpQemuLdSt outop_qemu_st = {
+ .base.static_constraint = C_O0_I2(r, r),
+ .out = tgen_qemu_st,
+};
+
+static void tgen_qemu_st2(TCGContext *s, TCGType type, TCGReg datalo,
+ TCGReg datahi, TCGReg addr, MemOpIdx oi)
+{
+ if (TCG_TARGET_REG_BITS == 32) {
+ tcg_out_qemu_st(s, datalo, datahi, addr, oi, type);
+ } else {
+ tcg_out_qemu_ldst_i128(s, datalo, datahi, addr, oi, false);
+ }
+}
+
+static const TCGOutOpQemuLdSt2 outop_qemu_st2 = {
+ .base.static_constraint =
+ TCG_TARGET_REG_BITS == 64 ? C_O0_I3(o, m, r) : C_O0_I3(r, r, r),
+ .out = tgen_qemu_st2,
+};
+
static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
{
int i;
const TCGArg args[TCG_MAX_OP_ARGS],
const int const_args[TCG_MAX_OP_ARGS])
{
- switch (opc) {
- case INDEX_op_qemu_st:
- tcg_out_qemu_st(s, args[0], -1, args[1], args[2], type);
- break;
- case INDEX_op_qemu_st2:
- if (TCG_TARGET_REG_BITS == 32) {
- tcg_out_qemu_st(s, args[0], args[1], args[2],
- args[3], TCG_TYPE_I64);
- break;
- }
- tcg_out_qemu_ldst_i128(s, args[0], args[1], args[2], args[3], false);
- break;
-
- case INDEX_op_call: /* Always emitted via tcg_out_call. */
- case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
- case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- default:
- g_assert_not_reached();
- }
+ g_assert_not_reached();
}
int tcg_can_emit_vec_op(TCGOpcode opc, TCGType type, unsigned vece)
}
}
-static void tcg_out_qemu_st(TCGContext *s, TCGReg data_reg, TCGReg addr_reg,
- MemOpIdx oi, TCGType data_type)
+static void tgen_qemu_st(TCGContext *s, TCGType type, TCGReg data_reg,
+ TCGReg addr_reg, MemOpIdx oi)
{
TCGLabelQemuLdst *ldst;
TCGReg base;
tcg_out_qemu_st_direct(s, data_reg, base, get_memop(oi));
if (ldst) {
- ldst->type = data_type;
+ ldst->type = type;
ldst->datalo_reg = data_reg;
ldst->raddr = tcg_splitwx_to_rx(s->code_ptr);
}
}
+static const TCGOutOpQemuLdSt outop_qemu_st = {
+ .base.static_constraint = C_O0_I2(rz, r),
+ .out = tgen_qemu_st,
+};
+
+static const TCGOutOpQemuLdSt2 outop_qemu_st2 = {
+ .base.static_constraint = C_NotImplemented,
+};
+
static const tcg_insn_unit *tb_ret_addr;
static void tcg_out_exit_tb(TCGContext *s, uintptr_t a0)
const TCGArg args[TCG_MAX_OP_ARGS],
const int const_args[TCG_MAX_OP_ARGS])
{
- TCGArg a0 = args[0];
- TCGArg a1 = args[1];
- TCGArg a2 = args[2];
-
- switch (opc) {
- case INDEX_op_qemu_st:
- tcg_out_qemu_st(s, a0, a1, a2, type);
- break;
-
- case INDEX_op_call: /* Always emitted via tcg_out_call. */
- case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
- case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- default:
- g_assert_not_reached();
- }
+ g_assert_not_reached();
}
static void tcg_out_vec_op(TCGContext *s, TCGOpcode opc,
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
switch (op) {
- case INDEX_op_qemu_st:
- return C_O0_I2(rz, r);
-
case INDEX_op_st_vec:
return C_O0_I2(v, r);
case INDEX_op_dup_vec:
.out = tgen_qemu_ld,
};
-static void tcg_out_qemu_st(TCGContext* s, TCGReg data_reg, TCGReg addr_reg,
- MemOpIdx oi, TCGType data_type)
+static void tgen_qemu_st(TCGContext *s, TCGType type, TCGReg data_reg,
+ TCGReg addr_reg, MemOpIdx oi)
{
TCGLabelQemuLdst *ldst;
HostAddress h;
tcg_out_qemu_st_direct(s, get_memop(oi), data_reg, h);
if (ldst) {
- ldst->type = data_type;
+ ldst->type = type;
ldst->datalo_reg = data_reg;
ldst->raddr = tcg_splitwx_to_rx(s->code_ptr);
}
}
+static const TCGOutOpQemuLdSt outop_qemu_st = {
+ .base.static_constraint = C_O0_I2(r, r),
+ .out = tgen_qemu_st,
+};
+
static void tcg_out_qemu_ldst_i128(TCGContext *s, TCGReg datalo, TCGReg datahi,
TCGReg addr_reg, MemOpIdx oi, bool is_ld)
{
.out = tgen_qemu_ld2,
};
+static void tgen_qemu_st2(TCGContext *s, TCGType type, TCGReg datalo,
+ TCGReg datahi, TCGReg addr_reg, MemOpIdx oi)
+{
+ tcg_out_qemu_ldst_i128(s, datalo, datahi, addr_reg, oi, false);
+}
+
+static const TCGOutOpQemuLdSt2 outop_qemu_st2 = {
+ .base.static_constraint = C_O0_I3(o, m, r),
+ .out = tgen_qemu_st2,
+};
+
static void tcg_out_exit_tb(TCGContext *s, uintptr_t a0)
{
/* Reuse the zeroing that exists for goto_ptr. */
const TCGArg args[TCG_MAX_OP_ARGS],
const int const_args[TCG_MAX_OP_ARGS])
{
- switch (opc) {
- case INDEX_op_qemu_st:
- tcg_out_qemu_st(s, args[0], args[1], args[2], type);
- break;
- case INDEX_op_qemu_st2:
- tcg_out_qemu_ldst_i128(s, args[0], args[1], args[2], args[3], false);
- break;
-
- case INDEX_op_call: /* Always emitted via tcg_out_call. */
- case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
- case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- default:
- g_assert_not_reached();
- }
+ g_assert_not_reached();
}
static bool tcg_out_dup_vec(TCGContext *s, TCGType type, unsigned vece,
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
switch (op) {
- case INDEX_op_qemu_st:
- return C_O0_I2(r, r);
- case INDEX_op_qemu_st2:
- return C_O0_I3(o, m, r);
-
case INDEX_op_st_vec:
return C_O0_I2(v, r);
case INDEX_op_ld_vec:
.base.static_constraint = C_NotImplemented,
};
-static void tcg_out_qemu_st(TCGContext *s, TCGReg data, TCGReg addr,
- MemOpIdx oi, TCGType data_type)
+static void tgen_qemu_st(TCGContext *s, TCGType type, TCGReg data,
+ TCGReg addr, MemOpIdx oi)
{
static const int st_opc[(MO_SIZE | MO_BSWAP) + 1] = {
[MO_UB] = STB,
st_opc[get_memop(oi) & (MO_BSWAP | MO_SIZE)]);
if (ldst) {
- ldst->type = data_type;
+ ldst->type = type;
ldst->datalo_reg = data;
ldst->raddr = tcg_splitwx_to_rx(s->code_ptr);
}
}
+static const TCGOutOpQemuLdSt outop_qemu_st = {
+ .base.static_constraint = C_O0_I2(rz, r),
+ .out = tgen_qemu_st,
+};
+
+static const TCGOutOpQemuLdSt2 outop_qemu_st2 = {
+ .base.static_constraint = C_NotImplemented,
+};
+
static void tcg_out_exit_tb(TCGContext *s, uintptr_t a0)
{
if (check_fit_ptr(a0, 13)) {
const TCGArg args[TCG_MAX_OP_ARGS],
const int const_args[TCG_MAX_OP_ARGS])
{
- TCGArg a0, a1, a2;
-
- /* Hoist the loads of the most common arguments. */
- a0 = args[0];
- a1 = args[1];
- a2 = args[2];
-
- switch (opc) {
- case INDEX_op_qemu_st:
- tcg_out_qemu_st(s, a0, a1, a2, type);
- break;
-
- case INDEX_op_call: /* Always emitted via tcg_out_call. */
- case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
- case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- default:
- g_assert_not_reached();
- }
+ g_assert_not_reached();
}
static TCGConstraintSetIndex
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
- switch (op) {
- case INDEX_op_qemu_st:
- return C_O0_I2(rz, r);
-
- default:
- return C_NotImplemented;
- }
+ return C_NotImplemented;
}
static void tcg_target_init(TCGContext *s)
OUTOP(INDEX_op_orc, TCGOutOpBinary, outop_orc),
OUTOP(INDEX_op_qemu_ld, TCGOutOpQemuLdSt, outop_qemu_ld),
OUTOP(INDEX_op_qemu_ld2, TCGOutOpQemuLdSt2, outop_qemu_ld2),
+ OUTOP(INDEX_op_qemu_st, TCGOutOpQemuLdSt, outop_qemu_st),
+ OUTOP(INDEX_op_qemu_st2, TCGOutOpQemuLdSt2, outop_qemu_st2),
OUTOP(INDEX_op_rems, TCGOutOpBinary, outop_rems),
OUTOP(INDEX_op_remu, TCGOutOpBinary, outop_remu),
OUTOP(INDEX_op_rotl, TCGOutOpBinary, outop_rotl),
break;
case INDEX_op_qemu_ld:
+ case INDEX_op_qemu_st:
{
- const TCGOutOpQemuLdSt *out = &outop_qemu_ld;
+ const TCGOutOpQemuLdSt *out =
+ container_of(all_outop[op->opc], TCGOutOpQemuLdSt, base);
+
out->out(s, type, new_args[0], new_args[1], new_args[2]);
}
break;
case INDEX_op_qemu_ld2:
+ case INDEX_op_qemu_st2:
{
- const TCGOutOpQemuLdSt2 *out = &outop_qemu_ld2;
+ const TCGOutOpQemuLdSt2 *out =
+ container_of(all_outop[op->opc], TCGOutOpQemuLdSt2, base);
+
out->out(s, type, new_args[0], new_args[1],
new_args[2], new_args[3]);
}
static TCGConstraintSetIndex
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
- switch (op) {
- case INDEX_op_qemu_st:
- return C_O0_I2(r, r);
- case INDEX_op_qemu_st2:
- return TCG_TARGET_REG_BITS == 64 ? C_NotImplemented : C_O0_I3(r, r, r);
-
- default:
- return C_NotImplemented;
- }
+ return C_NotImplemented;
}
static const int tcg_target_reg_alloc_order[] = {
TCG_TARGET_REG_BITS == 64 ? NULL : tgen_qemu_ld2,
};
+static void tgen_qemu_st(TCGContext *s, TCGType type, TCGReg data,
+ TCGReg addr, MemOpIdx oi)
+{
+ tcg_out_op_rrm(s, INDEX_op_qemu_st, data, addr, oi);
+}
+
+static const TCGOutOpQemuLdSt outop_qemu_st = {
+ .base.static_constraint = C_O0_I2(r, r),
+ .out = tgen_qemu_st,
+};
+
+static void tgen_qemu_st2(TCGContext *s, TCGType type, TCGReg datalo,
+ TCGReg datahi, TCGReg addr, MemOpIdx oi)
+{
+ tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, oi);
+ tcg_out_op_rrrr(s, INDEX_op_qemu_st2, datalo, datahi, addr, TCG_REG_TMP);
+}
+
+static const TCGOutOpQemuLdSt2 outop_qemu_st2 = {
+ .base.static_constraint =
+ TCG_TARGET_REG_BITS == 64 ? C_NotImplemented : C_O0_I3(r, r, r),
+ .out =
+ TCG_TARGET_REG_BITS == 64 ? NULL : tgen_qemu_st2,
+};
+
static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
const TCGArg args[TCG_MAX_OP_ARGS],
const int const_args[TCG_MAX_OP_ARGS])
{
- switch (opc) {
- case INDEX_op_qemu_st:
- tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
- break;
- case INDEX_op_qemu_st2:
- tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
- tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
- break;
-
- case INDEX_op_call: /* Always emitted via tcg_out_call. */
- case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
- case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- default:
- g_assert_not_reached();
- }
+ g_assert_not_reached();
}
static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,