}
}
+ /* CmpNE64(ccall, 64-bit constant) (--smc-check=all optimisation).
+ Saves a "movq %rax, %tmp" compared to the default route. */
+ if (e->tag == Iex_Binop
+ && e->Iex.Binop.op == Iop_CmpNE64
+ && e->Iex.Binop.arg1->tag == Iex_CCall
+ && e->Iex.Binop.arg2->tag == Iex_Const) {
+ IRExpr* cal = e->Iex.Binop.arg1;
+ IRExpr* con = e->Iex.Binop.arg2;
+ HReg tmp = newVRegI(env);
+ /* clone & partial-eval of generic Iex_CCall and Iex_Const cases */
+ vassert(cal->Iex.CCall.retty == Ity_I64); /* else ill-typed IR */
+ vassert(con->Iex.Const.con->tag == Ico_U64);
+ /* Marshal args, do the call. */
+ doHelperCall( env, False, NULL, cal->Iex.CCall.cee, cal->Iex.CCall.args );
+ addInstr(env, AMD64Instr_Imm64(con->Iex.Const.con->Ico.U64, tmp));
+ addInstr(env, AMD64Instr_Alu64R(Aalu_CMP,
+ AMD64RMI_Reg(hregAMD64_RAX()), tmp));
+ return Acc_NZ;
+ }
+
/* Cmp*64*(x,y) */
if (e->tag == Iex_Binop
&& (e->Iex.Binop.op == Iop_CmpEQ64
}
}
+ /* CmpNE32(ccall, 32-bit constant) (--smc-check=all optimisation).
+ Saves a "movl %eax, %tmp" compared to the default route. */
+ if (e->tag == Iex_Binop
+ && e->Iex.Binop.op == Iop_CmpNE32
+ && e->Iex.Binop.arg1->tag == Iex_CCall
+ && e->Iex.Binop.arg2->tag == Iex_Const) {
+ IRExpr* cal = e->Iex.Binop.arg1;
+ IRExpr* con = e->Iex.Binop.arg2;
+ /* clone & partial-eval of generic Iex_CCall and Iex_Const cases */
+ vassert(cal->Iex.CCall.retty == Ity_I32); /* else ill-typed IR */
+ vassert(con->Iex.Const.con->tag == Ico_U32);
+ /* Marshal args, do the call. */
+ doHelperCall( env, False, NULL, cal->Iex.CCall.cee, cal->Iex.CCall.args );
+ addInstr(env, X86Instr_Alu32R(Xalu_CMP,
+ X86RMI_Imm(con->Iex.Const.con->Ico.U32),
+ hregX86_EAX()));
+ return Xcc_NZ;
+ }
+
/* Cmp*32*(x,y) */
if (e->tag == Iex_Binop
&& (e->Iex.Binop.op == Iop_CmpEQ32