binop(Iop_Shl64,cc_dep2,mkU8(32))));
}
-//.. if (isU32(cc_op, AMD64G_CC_OP_SUBL) && isU32(cond, X86CondNZ)) {
-//.. /* long sub/cmp, then NZ --> test dst!=src */
-//.. return unop(Iop_1Uto32,
-//.. binop(Iop_CmpNE32, cc_dep1, cc_dep2));
-//.. }
-
if (isU64(cc_op, AMD64G_CC_OP_SUBL) && isU64(cond, AMD64CondL)) {
/* long sub/cmp, then L (signed less than)
--> test dst <s src */
binop(Iop_Shl64,cc_dep2,mkU8(32))));
}
-//.. if (isU32(cc_op, AMD64G_CC_OP_SUBL) && isU32(cond, X86CondB)) {
-//.. /* long sub/cmp, then B (unsigned less than)
-//.. --> test dst <u src */
-//.. return unop(Iop_1Uto32,
-//.. binop(Iop_CmpLT32U, cc_dep1, cc_dep2));
-//.. }
+ if (isU64(cc_op, AMD64G_CC_OP_SUBL) && isU64(cond, AMD64CondNBE)) {
+ /* long sub/cmp, then NBE (unsigned greater than)
+ --> test src <u dst */
+ /* Note, args are opposite way round from the usual */
+ return unop(Iop_1Uto64,
+ binop(Iop_CmpLT64U,
+ binop(Iop_Shl64,cc_dep2,mkU8(32)),
+ binop(Iop_Shl64,cc_dep1,mkU8(32))));
+ }
/*---------------- SUBW ----------------*/
unop(Iop_64to8,cc_dep2)));
}
+ if (isU64(cc_op, AMD64G_CC_OP_SUBB) && isU64(cond, AMD64CondNZ)) {
+ /* byte sub/cmp, then NZ --> test dst!=src */
+ return unop(Iop_1Uto64,
+ binop(Iop_CmpNE8,
+ unop(Iop_64to8,cc_dep1),
+ unop(Iop_64to8,cc_dep2)));
+ }
+
if (isU64(cc_op, AMD64G_CC_OP_SUBB) && isU64(cond, AMD64CondS)
&& isU64(cc_dep2, 0)) {
/* byte sub/cmp of zero, then S --> test (dst-0 <s 0)
return unop(Iop_1Uto32,binop(Iop_CmpLT32S, cc_dep1, mkU32(0)));
}
+ /*---------------- DECW ----------------*/
+
+ if (isU32(cc_op, X86G_CC_OP_DECW) && isU32(cond, X86CondZ)) {
+ /* dec W, then Z --> test dst == 0 */
+ return unop(Iop_1Uto32,
+ binop(Iop_CmpEQ32,
+ binop(Iop_Shl32,cc_dep1,mkU8(16)),
+ mkU32(0)));
+ }
+
/*---------------- INCW ----------------*/
if (isU32(cc_op, X86G_CC_OP_INCW) && isU32(cond, X86CondZ)) {