mkU64(0)));
}
+ /*---------------- ADDL ----------------*/
+
+ if (isU64(cc_op, AMD64G_CC_OP_ADDL) && isU64(cond, AMD64CondO)) {
+ /* This is very commonly generated by Javascript JITs, for
+ the idiom "do a 32-bit add and jump to out-of-line code if
+ an overflow occurs". */
+ /* long add, then O (overflow)
+ --> ((dep1 ^ dep2 ^ -1) & (dep1 ^ (dep1 + dep2)))[31]
+ --> (((dep1 ^ dep2 ^ -1) & (dep1 ^ (dep1 +64 dep2))) >>u 31) & 1
+ --> (((not(dep1 ^ dep2)) & (dep1 ^ (dep1 +64 dep2))) >>u 31) & 1
+ */
+ vassert(isIRAtom(cc_dep1));
+ vassert(isIRAtom(cc_dep2));
+ return
+ binop(Iop_And64,
+ binop(Iop_Shr64,
+ binop(Iop_And64,
+ unop(Iop_Not64,
+ binop(Iop_Xor64, cc_dep1, cc_dep2)),
+ binop(Iop_Xor64,
+ cc_dep1,
+ binop(Iop_Add64, cc_dep1, cc_dep2))),
+ mkU8(31)),
+ mkU64(1));
+
+ }
+
/*---------------- SUBQ ----------------*/
+ /* 0, */
+ if (isU64(cc_op, AMD64G_CC_OP_SUBQ) && isU64(cond, AMD64CondO)) {
+ /* long long sub/cmp, then O (overflow)
+ --> ((dep1 ^ dep2) & (dep1 ^ (dep1 - dep2)))[63]
+ --> ((dep1 ^ dep2) & (dep1 ^ (dep1 - dep2))) >>u 63
+ */
+ vassert(isIRAtom(cc_dep1));
+ vassert(isIRAtom(cc_dep2));
+ return binop(Iop_Shr64,
+ binop(Iop_And64,
+ binop(Iop_Xor64, cc_dep1, cc_dep2),
+ binop(Iop_Xor64,
+ cc_dep1,
+ binop(Iop_Sub64, cc_dep1, cc_dep2))),
+ mkU8(64));
+ }
+ if (isU64(cc_op, AMD64G_CC_OP_SUBQ) && isU64(cond, AMD64CondNO)) {
+ /* No action. Never yet found a test case. */
+ }
+
/* 2, 3 */
if (isU64(cc_op, AMD64G_CC_OP_SUBQ) && isU64(cond, AMD64CondB)) {
/* long long sub/cmp, then B (unsigned less than)
mkU64(1));
}
- /* 12, */
+ /* 8, 9 */
+ if (isU64(cc_op, AMD64G_CC_OP_SUBQ) && isU64(cond, AMD64CondS)) {
+ /* long long sub/cmp, then S (negative)
+ --> (dst-src)[63]
+ --> (dst-src) >>u 63 */
+ return binop(Iop_Shr64,
+ binop(Iop_Sub64, cc_dep1, cc_dep2),
+ mkU8(63));
+ }
+ if (isU64(cc_op, AMD64G_CC_OP_SUBQ) && isU64(cond, AMD64CondNS)) {
+ /* long long sub/cmp, then NS (not negative)
+ --> (dst-src)[63] ^ 1
+ --> ((dst-src) >>u 63) ^ 1 */
+ return binop(Iop_Xor64,
+ binop(Iop_Shr64,
+ binop(Iop_Sub64, cc_dep1, cc_dep2),
+ mkU8(63)),
+ mkU64(1));
+ }
+
+ /* 12, 13 */
if (isU64(cc_op, AMD64G_CC_OP_SUBQ) && isU64(cond, AMD64CondL)) {
/* long long sub/cmp, then L (signed less than)
--> test dst <s src */
return unop(Iop_1Uto64,
binop(Iop_CmpLT64S, cc_dep1, cc_dep2));
}
+ if (isU64(cc_op, AMD64G_CC_OP_SUBQ) && isU64(cond, AMD64CondNL)) {
+ /* long long sub/cmp, then NL (signed greater than or equal)
+ --> test dst >=s src
+ --> test src <=s dst */
+ return unop(Iop_1Uto64,
+ binop(Iop_CmpLE64S, cc_dep2, cc_dep1));
+ }
- /* , 15 */
+ /* 14, 15 */
+ if (isU64(cc_op, AMD64G_CC_OP_SUBQ) && isU64(cond, AMD64CondLE)) {
+ /* long long sub/cmp, then LE (signed less than or equal)
+ --> test dst <=s src */
+ return unop(Iop_1Uto64,
+ binop(Iop_CmpLE64S, cc_dep1, cc_dep2));
+ }
if (isU64(cc_op, AMD64G_CC_OP_SUBQ) && isU64(cond, AMD64CondNLE)) {
/* long sub/cmp, then NLE (signed greater than)
--> test !(dst <=s src)
/*---------------- SUBL ----------------*/
- /* 2, */
+ /* 0, */
+ if (isU64(cc_op, AMD64G_CC_OP_SUBL) && isU64(cond, AMD64CondO)) {
+ /* This is very commonly generated by Javascript JITs, for
+ the idiom "do a 32-bit subtract and jump to out-of-line
+ code if an overflow occurs". */
+ /* long sub/cmp, then O (overflow)
+ --> ((dep1 ^ dep2) & (dep1 ^ (dep1 - dep2)))[31]
+ --> (((dep1 ^ dep2) & (dep1 ^ (dep1 -64 dep2))) >>u 31) & 1
+ */
+ vassert(isIRAtom(cc_dep1));
+ vassert(isIRAtom(cc_dep2));
+ return
+ binop(Iop_And64,
+ binop(Iop_Shr64,
+ binop(Iop_And64,
+ binop(Iop_Xor64, cc_dep1, cc_dep2),
+ binop(Iop_Xor64,
+ cc_dep1,
+ binop(Iop_Sub64, cc_dep1, cc_dep2))),
+ mkU8(31)),
+ mkU64(1));
+ }
+ if (isU64(cc_op, AMD64G_CC_OP_SUBL) && isU64(cond, AMD64CondNO)) {
+ /* No action. Never yet found a test case. */
+ }
+
+ /* 2, 3 */
if (isU64(cc_op, AMD64G_CC_OP_SUBL) && isU64(cond, AMD64CondB)) {
/* long sub/cmp, then B (unsigned less than)
--> test dst <u src */
unop(Iop_64to32, cc_dep1),
unop(Iop_64to32, cc_dep2)));
}
+ if (isU64(cc_op, AMD64G_CC_OP_SUBL) && isU64(cond, AMD64CondNB)) {
+ /* long sub/cmp, then NB (unsigned greater than or equal)
+ --> test src <=u dst */
+ /* Note, args are opposite way round from the usual */
+ return unop(Iop_1Uto64,
+ binop(Iop_CmpLE32U,
+ unop(Iop_64to32, cc_dep2),
+ unop(Iop_64to32, cc_dep1)));
+ }
/* 4, 5 */
if (isU64(cc_op, AMD64G_CC_OP_SUBL) && isU64(cond, AMD64CondZ)) {
unop(Iop_64to32, cc_dep1)));
}
- /* 8, */
+ /* 8, 9 */
if (isU64(cc_op, AMD64G_CC_OP_SUBL) && isU64(cond, AMD64CondS)) {
- /* long sub/cmp, then S (negative) --> test (dst-src <s 0) */
- return unop(Iop_1Uto64,
- binop(Iop_CmpLT32S,
- binop(Iop_Sub32,
- unop(Iop_64to32, cc_dep1),
- unop(Iop_64to32, cc_dep2)),
- mkU32(0)));
+ /* long sub/cmp, then S (negative)
+ --> (dst-src)[31]
+ --> ((dst -64 src) >>u 31) & 1
+ Pointless to narrow the args to 32 bit before the subtract. */
+ return binop(Iop_And64,
+ binop(Iop_Shr64,
+ binop(Iop_Sub64, cc_dep1, cc_dep2),
+ mkU8(31)),
+ mkU64(1));
+ }
+ if (isU64(cc_op, AMD64G_CC_OP_SUBL) && isU64(cond, AMD64CondNS)) {
+ /* long sub/cmp, then NS (not negative)
+ --> (dst-src)[31] ^ 1
+ --> (((dst -64 src) >>u 31) & 1) ^ 1
+ Pointless to narrow the args to 32 bit before the subtract. */
+ return binop(Iop_Xor64,
+ binop(Iop_And64,
+ binop(Iop_Shr64,
+ binop(Iop_Sub64, cc_dep1, cc_dep2),
+ mkU8(31)),
+ mkU64(1)),
+ mkU64(1));
}
- /* 12, */
+ /* 12, 13 */
if (isU64(cc_op, AMD64G_CC_OP_SUBL) && isU64(cond, AMD64CondL)) {
/* long sub/cmp, then L (signed less than)
--> test dst <s src */
unop(Iop_64to32, cc_dep1),
unop(Iop_64to32, cc_dep2)));
}
+ if (isU64(cc_op, AMD64G_CC_OP_SUBL) && isU64(cond, AMD64CondNL)) {
+ /* long sub/cmp, then NL (signed greater than or equal)
+ --> test dst >=s src
+ --> test src <=s dst */
+ return unop(Iop_1Uto64,
+ binop(Iop_CmpLE32S,
+ unop(Iop_64to32, cc_dep2),
+ unop(Iop_64to32, cc_dep1)));
+ }
/* 14, 15 */
if (isU64(cc_op, AMD64G_CC_OP_SUBL) && isU64(cond, AMD64CondLE)) {
unop(Iop_64to16,cc_dep2)));
}
+ if (isU64(cc_op, AMD64G_CC_OP_SUBW) && isU64(cond, AMD64CondBE)) {
+ /* word sub/cmp, then BE (unsigned less than or equal)
+ --> test dst <=u src */
+ return unop(Iop_1Uto64,
+ binop(Iop_CmpLE64U,
+ binop(Iop_Shl64, cc_dep1, mkU8(48)),
+ binop(Iop_Shl64, cc_dep2, mkU8(48))));
+ }
+
if (isU64(cc_op, AMD64G_CC_OP_SUBW) && isU64(cond, AMD64CondLE)) {
/* word sub/cmp, then LE (signed less than or equal)
--> test dst <=s src */