return unop(Iop_1Uto64,
binop(Iop_CmpEQ64,cc_dep1,cc_dep2));
}
-
if (isU64(cc_op, AMD64G_CC_OP_SUBQ) && isU64(cond, AMD64CondNZ)) {
/* long long sub/cmp, then NZ --> test dst!=src */
return unop(Iop_1Uto64,
return unop(Iop_1Uto64,
binop(Iop_CmpLT64U, cc_dep1, cc_dep2));
}
-
if (isU64(cc_op, AMD64G_CC_OP_SUBQ) && isU64(cond, AMD64CondNB)) {
/* long long sub/cmp, then NB (unsigned greater than or equal)
--> test src <=u dst */
binop(Iop_Shl64,cc_dep1,mkU8(32)),
binop(Iop_Shl64,cc_dep2,mkU8(32))));
}
-
if (isU64(cc_op, AMD64G_CC_OP_SUBL) && isU64(cond, AMD64CondNZ)) {
/* long sub/cmp, then NZ --> test dst!=src */
return unop(Iop_1Uto64,
binop(Iop_Shl64,cc_dep1,mkU8(32)),
binop(Iop_Shl64,cc_dep2,mkU8(32))));
}
-
if (isU64(cc_op, AMD64G_CC_OP_SUBL) && isU64(cond, AMD64CondNBE)) {
/* long sub/cmp, then NBE (unsigned greater than)
--> test src <u dst */
unop(Iop_64to8,cc_dep1),
unop(Iop_64to8,cc_dep2)));
}
-
if (isU64(cc_op, AMD64G_CC_OP_SUBB) && isU64(cond, AMD64CondNZ)) {
/* byte sub/cmp, then NZ --> test dst!=src */
return unop(Iop_1Uto64,
mkU64(1));
}
-// if (isU64(cc_op, AMD64G_CC_OP_SUBB) && isU64(cond, AMD64CondNZ)) {
-// /* byte sub/cmp, then NZ --> test dst!=src */
-// return unop(Iop_32Uto64,
-// unop(Iop_1Uto32,
-// binop(Iop_CmpNE8,
-// unop(Iop_32to8,unop(Iop_64to32,cc_dep1)),
-// unop(Iop_32to8,unop(Iop_64to32,cc_dep2)))));
-// }
-
-//.. if (isU32(cc_op, AMD64G_CC_OP_SUBB) && isU32(cond, X86CondNBE)) {
-//.. /* long sub/cmp, then NBE (unsigned greater than)
-//.. --> test src <u dst */
-//.. /* Note, args are opposite way round from the usual */
-//.. return unop(Iop_1Uto32,
-//.. binop(Iop_CmpLT32U,
-//.. binop(Iop_And32,cc_dep2,mkU32(0xFF)),
-//.. binop(Iop_And32,cc_dep1,mkU32(0xFF))));
-//.. }
-
/*---------------- LOGICQ ----------------*/
if (isU64(cc_op, AMD64G_CC_OP_LOGICQ) && isU64(cond, AMD64CondZ)) {
mkU64(0)));
}
-//.. if (isU32(cc_op, AMD64G_CC_OP_LOGICL) && isU32(cond, X86CondS)) {
-//.. /* long and/or/xor, then S --> test dst <s 0 */
-//.. return unop(Iop_1Uto32,binop(Iop_CmpLT32S, cc_dep1, mkU32(0)));
-//.. }
-
if (isU64(cc_op, AMD64G_CC_OP_LOGICL) && isU64(cond, AMD64CondLE)) {
/* long and/or/xor, then LE
This is pretty subtle. LOGIC sets SF and ZF according to the
mkU64(0)));
}
-//.. if (isU32(cc_op, AMD64G_CC_OP_LOGICL) && isU32(cond, X86CondBE)) {
-//.. /* long and/or/xor, then BE
-//.. LOGIC sets ZF according to the result and makes CF be zero.
-//.. BE computes (CF | ZF), but CF is zero, so this reduces ZF
-//.. -- which will be 1 iff the result is zero. Hence ...
-//.. */
-//.. return unop(Iop_1Uto32,binop(Iop_CmpEQ32, cc_dep1, mkU32(0)));
-//.. }
-//..
-//.. /*---------------- LOGICW ----------------*/
-//..
-//.. if (isU32(cc_op, AMD64G_CC_OP_LOGICW) && isU32(cond, X86CondZ)) {
-//.. /* byte and/or/xor, then Z --> test dst==0 */
-//.. return unop(Iop_1Uto32,
-//.. binop(Iop_CmpEQ32, binop(Iop_And32,cc_dep1,mkU32(0xFFFF)),
-//.. mkU32(0)));
-//.. }
-
/*---------------- LOGICB ----------------*/
if (isU64(cc_op, AMD64G_CC_OP_LOGICB) && isU64(cond, AMD64CondZ)) {
mkU64(0)));
}
+ /*---------------- INCW ----------------*/
+
+ if (isU64(cc_op, AMD64G_CC_OP_INCW) && isU64(cond, AMD64CondZ)) {
+ /* 16-bit inc, then Z --> test dst == 0 */
+ return unop(Iop_1Uto64,
+ binop(Iop_CmpEQ64,
+ binop(Iop_Shl64,cc_dep1,mkU8(48)),
+ mkU64(0)));
+ }
+
/*---------------- DECL ----------------*/
if (isU64(cc_op, AMD64G_CC_OP_DECL) && isU64(cond, AMD64CondZ)) {
mkU64(0)));
}
-//.. /*---------------- DECL ----------------*/
-//..
-//.. if (isU32(cc_op, AMD64G_CC_OP_DECL) && isU32(cond, X86CondZ)) {
-//.. /* dec L, then Z --> test dst == 0 */
-//.. return unop(Iop_1Uto32,binop(Iop_CmpEQ32, cc_dep1, mkU32(0)));
-//.. }
-//..
-//.. if (isU32(cc_op, AMD64G_CC_OP_DECL) && isU32(cond, X86CondS)) {
-//.. /* dec L, then S --> compare DST <s 0 */
-//.. return unop(Iop_1Uto32,binop(Iop_CmpLT32S, cc_dep1, mkU32(0)));
-//.. }
-//..
-//.. /*---------------- SHRL ----------------*/
-//..
-//.. if (isU32(cc_op, AMD64G_CC_OP_SHRL) && isU32(cond, X86CondZ)) {
-//.. /* SHRL, then Z --> test dep1 == 0 */
-//.. return unop(Iop_1Uto32,binop(Iop_CmpEQ32, cc_dep1, mkU32(0)));
-//.. }
-
/*---------------- COPY ----------------*/
/* This can happen, as a result of amd64 FP compares: "comisd ... ;
jbe" for example. */
);
}
+ if (isU64(cc_op, AMD64G_CC_OP_COPY)
+ && (isU64(cond, AMD64CondZ) || isU64(cond, AMD64CondNZ))) {
+ /* COPY, then Z --> extract Z from dep1, and test (Z == 1). */
+ /* COPY, then NZ --> extract Z from dep1, and test (Z == 0). */
+ UInt nnn = isU64(cond, AMD64CondZ) ? 1 : 0;
+ return
+ unop(
+ Iop_1Uto64,
+ binop(
+ Iop_CmpEQ64,
+ binop(
+ Iop_And64,
+ binop(Iop_Shr64, cc_dep1, mkU8(AMD64G_CC_SHIFT_Z)),
+ mkU64(1)
+ ),
+ mkU64(nnn)
+ )
+ );
+ }
+
+ if (isU64(cc_op, AMD64G_CC_OP_COPY) && isU64(cond, AMD64CondP)) {
+ /* COPY, then P --> extract P from dep1, and test (P == 1). */
+ return
+ unop(
+ Iop_1Uto64,
+ binop(
+ Iop_CmpNE64,
+ binop(
+ Iop_And64,
+ binop(Iop_Shr64, cc_dep1, mkU8(AMD64G_CC_SHIFT_P)),
+ mkU64(1)
+ ),
+ mkU64(0)
+ )
+ );
+ }
+
return NULL;
}
/* If the thunk is dec or inc, the cflag is supplied as CC_NDEP. */
return cc_ndep;
}
-//.. if (isU64(cc_op, AMD64G_CC_OP_COPY)) {
-//.. /* cflag after COPY is stored in DEP1. */
-//.. return
-//.. binop(
-//.. Iop_And64,
-//.. binop(Iop_Shr64, cc_dep1, mkU8(AMD64G_CC_SHIFT_C)),
-//.. mkU64(1)
-//.. );
-//.. }
-//.. # if 0
-//.. if (cc_op->tag == Iex_Const) {
-//.. vex_printf("CFLAG "); ppIRExpr(cc_op); vex_printf("\n");
-//.. }
-//.. # endif
+
+# if 0
+ if (cc_op->tag == Iex_Const) {
+ vex_printf("CFLAG "); ppIRExpr(cc_op); vex_printf("\n");
+ }
+# endif
return NULL;
}
-//.. /* --------- specialising "x86g_calculate_rflags_all" --------- */
-//..
-//.. if (vex_streq(function_name, "x86g_calculate_rflags_all")) {
-//.. /* specialise calls to above "calculate_rflags_all" function */
-//.. IRExpr *cc_op, *cc_dep1, *cc_dep2, *cc_ndep;
-//.. vassert(arity == 4);
-//.. cc_op = args[0];
-//.. cc_dep1 = args[1];
-//.. cc_dep2 = args[2];
-//.. cc_ndep = args[3];
-//..
-//.. if (isU32(cc_op, AMD64G_CC_OP_COPY)) {
-//.. /* eflags after COPY are stored in DEP1. */
-//.. return
-//.. binop(
-//.. Iop_And32,
-//.. cc_dep1,
-//.. mkU32(AMD64G_CC_MASK_O | AMD64G_CC_MASK_S | AMD64G_CC_MASK_Z
-//.. | AMD64G_CC_MASK_A | AMD64G_CC_MASK_C | AMD64G_CC_MASK_P)
-//.. );
-//.. }
-//.. return NULL;
-//.. }
-
# undef unop
# undef binop
# undef mkU64
return unop(Iop_1Uto32,
binop(Iop_CmpEQ32, cc_dep1, cc_dep2));
}
-
if (isU32(cc_op, X86G_CC_OP_SUBL) && isU32(cond, X86CondNZ)) {
/* long sub/cmp, then NZ --> test dst!=src */
return unop(Iop_1Uto32,
return unop(Iop_1Uto32,
binop(Iop_CmpLT32S, cc_dep1, cc_dep2));
}
+ if (isU32(cc_op, X86G_CC_OP_SUBL) && isU32(cond, X86CondNL)) {
+ /* long sub/cmp, then NL (signed greater than or equal)
+ --> test !(dst <s src) */
+ return binop(Iop_Xor32,
+ unop(Iop_1Uto32,
+ binop(Iop_CmpLT32S, cc_dep1, cc_dep2)),
+ mkU32(1));
+ }
if (isU32(cc_op, X86G_CC_OP_SUBL) && isU32(cond, X86CondLE)) {
/* long sub/cmp, then LE (signed less than or equal)
return unop(Iop_1Uto32,
binop(Iop_CmpLE32S, cc_dep1, cc_dep2));
}
-
if (isU32(cc_op, X86G_CC_OP_SUBL) && isU32(cond, X86CondNLE)) {
- /* long sub/cmp, then LE (signed not less than or equal)
+ /* long sub/cmp, then NLE (signed not less than or equal)
--> test dst >s src
--> test !(dst <=s src) */
return binop(Iop_Xor32,
return unop(Iop_1Uto32,
binop(Iop_CmpLE32U, cc_dep1, cc_dep2));
}
+ if (isU32(cc_op, X86G_CC_OP_SUBL) && isU32(cond, X86CondNBE)) {
+ /* long sub/cmp, then BE (unsigned greater than)
+ --> test !(dst <=u src) */
+ return binop(Iop_Xor32,
+ unop(Iop_1Uto32,
+ binop(Iop_CmpLE32U, cc_dep1, cc_dep2)),
+ mkU32(1));
+ }
if (isU32(cc_op, X86G_CC_OP_SUBL) && isU32(cond, X86CondB)) {
/* long sub/cmp, then B (unsigned less than)
return unop(Iop_1Uto32,
binop(Iop_CmpLT32U, cc_dep1, cc_dep2));
}
+ if (isU32(cc_op, X86G_CC_OP_SUBL) && isU32(cond, X86CondNB)) {
+ /* long sub/cmp, then NB (unsigned greater than or equal)
+ --> test !(dst <u src) */
+ return binop(Iop_Xor32,
+ unop(Iop_1Uto32,
+ binop(Iop_CmpLT32U, cc_dep1, cc_dep2)),
+ mkU32(1));
+ }
if (isU32(cc_op, X86G_CC_OP_SUBL) && isU32(cond, X86CondS)) {
- /* long sub/cmp, then S --> test (dst-src <s 0) */
+ /* long sub/cmp, then S (negative) --> test (dst-src <s 0) */
return unop(Iop_1Uto32,
binop(Iop_CmpLT32S,
binop(Iop_Sub32, cc_dep1, cc_dep2),
mkU32(0)));
}
+ if (isU32(cc_op, X86G_CC_OP_SUBL) && isU32(cond, X86CondNS)) {
+ /* long sub/cmp, then NS (not negative) --> test !(dst-src <s 0) */
+ return binop(Iop_Xor32,
+ unop(Iop_1Uto32,
+ binop(Iop_CmpLT32S,
+ binop(Iop_Sub32, cc_dep1, cc_dep2),
+ mkU32(0))),
+ mkU32(1));
+ }
/*---------------- SUBW ----------------*/
unop(Iop_32to8,cc_dep1),
unop(Iop_32to8,cc_dep2)));
}
-
if (isU32(cc_op, X86G_CC_OP_SUBB) && isU32(cond, X86CondNZ)) {
/* byte sub/cmp, then NZ --> test dst!=src */
return unop(Iop_1Uto32,
/* long and/or/xor, then Z --> test dst==0 */
return unop(Iop_1Uto32,binop(Iop_CmpEQ32, cc_dep1, mkU32(0)));
}
-
if (isU32(cc_op, X86G_CC_OP_LOGICL) && isU32(cond, X86CondNZ)) {
/* long and/or/xor, then NZ --> test dst!=0 */
return unop(Iop_1Uto32,binop(Iop_CmpNE32, cc_dep1, mkU32(0)));
binop(Iop_Shr32,cc_dep1,mkU8(15)),
mkU32(1));
}
- //Probably correct, but no test case for it yet found
- //if (isU32(cc_op, X86G_CC_OP_LOGICW) && isU32(cond, X86CondNS)) {
- // /* see comment below for (LOGICB, CondNS) */
- // /* word and/or/xor, then S --> (UInt) ~ result[15] */
- // vassert(0+0);
- // return binop(Iop_Xor32,
- // binop(Iop_And32,
- // binop(Iop_Shr32,cc_dep1,mkU8(15)),
- // mkU32(1)),
- // mkU32(1));
- //}
/*---------------- LOGICB ----------------*/
binop(Iop_CmpEQ32, binop(Iop_And32,cc_dep1,mkU32(255)),
mkU32(0)));
}
-
if (isU32(cc_op, X86G_CC_OP_LOGICB) && isU32(cond, X86CondNZ)) {
/* byte and/or/xor, then Z --> test dst!=0 */
/* b9ac9: 84 c0 test %al,%al
if (isU32(cc_op, X86G_CC_OP_COPY) &&
(isU32(cond, X86CondBE) || isU32(cond, X86CondNBE))) {
- /* COPY, then BE --> extract C and Z from dep1, and test (C
- or Z == 1). */
- /* COPY, then NBE --> extract C and Z from dep1, and test (C
- or Z == 0). */
+ /* COPY, then BE --> extract C and Z from dep1, and test
+ (C or Z) == 1. */
+ /* COPY, then NBE --> extract C and Z from dep1, and test
+ (C or Z) == 0. */
UInt nnn = isU32(cond, X86CondBE) ? 1 : 0;
return
unop(
);
}
- if (isU32(cc_op, X86G_CC_OP_COPY) &&
- (isU32(cond, X86CondB) || isU32(cond, X86CondNB))) {
+ if (isU32(cc_op, X86G_CC_OP_COPY)
+ && (isU32(cond, X86CondB) || isU32(cond, X86CondNB))) {
/* COPY, then B --> extract C from dep1, and test (C == 1). */
/* COPY, then NB --> extract C from dep1, and test (C == 0). */
UInt nnn = isU32(cond, X86CondB) ? 1 : 0;
);
}
- if (isU32(cc_op, X86G_CC_OP_COPY) && isU32(cond, X86CondZ)) {
+ if (isU32(cc_op, X86G_CC_OP_COPY)
+ && (isU32(cond, X86CondZ) || isU32(cond, X86CondNZ))) {
/* COPY, then Z --> extract Z from dep1, and test (Z == 1). */
+ /* COPY, then NZ --> extract Z from dep1, and test (Z == 0). */
+ UInt nnn = isU32(cond, X86CondZ) ? 1 : 0;
return
unop(
Iop_1Uto32,
binop(
- Iop_CmpNE32,
+ Iop_CmpEQ32,
binop(
Iop_And32,
binop(Iop_Shr32, cc_dep1, mkU8(X86G_CC_SHIFT_Z)),
mkU32(1)
),
- mkU32(0)
+ mkU32(nnn)
)
);
}
- if (isU32(cc_op, X86G_CC_OP_COPY) && isU32(cond, X86CondP)) {
+ if (isU32(cc_op, X86G_CC_OP_COPY)
+ && (isU32(cond, X86CondP) || isU32(cond, X86CondNP))) {
/* COPY, then P --> extract P from dep1, and test (P == 1). */
+ /* COPY, then NP --> extract P from dep1, and test (P == 0). */
+ UInt nnn = isU32(cond, X86CondP) ? 1 : 0;
return
unop(
Iop_1Uto32,
binop(
- Iop_CmpNE32,
+ Iop_CmpEQ32,
binop(
Iop_And32,
binop(Iop_Shr32, cc_dep1, mkU8(X86G_CC_SHIFT_P)),
mkU32(1)
),
- mkU32(0)
+ mkU32(nnn)
)
);
}
mkU32(1)
);
}
+ if (isU32(cc_op, X86G_CC_OP_ADDL)) {
+ /* C after add denotes sum <u either arg */
+ return unop(Iop_1Uto32,
+ binop(Iop_CmpLT32U,
+ binop(Iop_Add32, cc_dep1, cc_dep2),
+ cc_dep1));
+ }
# if 0
if (cc_op->tag == Iex_Const) {
vex_printf("CFLAG "); ppIRExpr(cc_op); vex_printf("\n");