/*---------------- ADDQ ----------------*/
+ /* 4, */
if (isU64(cc_op, AMD64G_CC_OP_ADDQ) && isU64(cond, AMD64CondZ)) {
/* long long add, then Z --> test (dst+src == 0) */
return unop(Iop_1Uto64,
mkU64(0)));
}
+ /* 8, */
+ if (isU64(cc_op, AMD64G_CC_OP_ADDQ) && isU64(cond, AMD64CondS)) {
+ /* long long add, then S (negative)
+ --> (dst+src)[63]
+ --> ((dst + src) >>u 63) & 1
+ */
+ return binop(Iop_And64,
+ binop(Iop_Shr64,
+ binop(Iop_Add64, cc_dep1, cc_dep2),
+ mkU8(63)),
+ mkU64(1));
+ }
+
/*---------------- ADDL ----------------*/
+ /* 0, */
if (isU64(cc_op, AMD64G_CC_OP_ADDL) && isU64(cond, AMD64CondO)) {
/* This is very commonly generated by Javascript JITs, for
the idiom "do a 32-bit add and jump to out-of-line code if
}
+ /* 8, 9 */
+ if (isU64(cc_op, AMD64G_CC_OP_ADDL) && isU64(cond, AMD64CondS)) {
+ /* long add, then S (negative)
+ --> (dst+src)[31]
+ --> ((dst +64 src) >>u 31) & 1
+ Pointless to narrow the args to 32 bit before the add. */
+ return binop(Iop_And64,
+ binop(Iop_Shr64,
+ binop(Iop_Add64, cc_dep1, cc_dep2),
+ mkU8(31)),
+ mkU64(1));
+ }
+ if (isU64(cc_op, AMD64G_CC_OP_ADDL) && isU64(cond, AMD64CondNS)) {
+ /* long add, then NS (not negative)
+ --> (dst+src)[31] ^ 1
+ --> (((dst +64 src) >>u 31) & 1) ^ 1
+ Pointless to narrow the args to 32 bit before the add. */
+ return binop(Iop_Xor64,
+ binop(Iop_And64,
+ binop(Iop_Shr64,
+ binop(Iop_Add64, cc_dep1, cc_dep2),
+ mkU8(31)),
+ mkU64(1)),
+ mkU64(1));
+ }
+
/*---------------- SUBQ ----------------*/
/* 0, */