]> git.ipfire.org Git - thirdparty/valgrind.git/commitdiff
amd64: add spec rules for: S/NS after ADDL, S after ADDQ.
authorJulian Seward <jseward@acm.org>
Thu, 23 Sep 2021 13:46:21 +0000 (15:46 +0200)
committerJulian Seward <jseward@acm.org>
Thu, 23 Sep 2021 13:46:21 +0000 (15:46 +0200)
VEX/priv/guest_amd64_helpers.c

index af2ddc29c547772dc3677e9b11aa7cd2d8d5216c..9d61e7a0fd238d1bc2157cbc97508c77671a3c7c 100644 (file)
@@ -1092,6 +1092,7 @@ IRExpr* guest_amd64_spechelper ( const HChar* function_name,
 
       /*---------------- ADDQ ----------------*/
 
+      /* 4, */
       if (isU64(cc_op, AMD64G_CC_OP_ADDQ) && isU64(cond, AMD64CondZ)) {
          /* long long add, then Z --> test (dst+src == 0) */
          return unop(Iop_1Uto64,
@@ -1100,8 +1101,22 @@ IRExpr* guest_amd64_spechelper ( const HChar* function_name,
                            mkU64(0)));
       }
 
+      /* 8, */
+      if (isU64(cc_op, AMD64G_CC_OP_ADDQ) && isU64(cond, AMD64CondS)) {
+         /* long long add, then S (negative)
+            --> (dst+src)[63]
+            --> ((dst + src) >>u 63) & 1
+         */
+         return binop(Iop_And64,
+                      binop(Iop_Shr64,
+                            binop(Iop_Add64, cc_dep1, cc_dep2),
+                            mkU8(63)),
+                      mkU64(1));
+      }
+
       /*---------------- ADDL ----------------*/
 
+      /* 0, */
       if (isU64(cc_op, AMD64G_CC_OP_ADDL) && isU64(cond, AMD64CondO)) {
          /* This is very commonly generated by Javascript JITs, for
             the idiom "do a 32-bit add and jump to out-of-line code if
@@ -1127,6 +1142,32 @@ IRExpr* guest_amd64_spechelper ( const HChar* function_name,
 
       }
 
+      /* 8, 9 */
+      if (isU64(cc_op, AMD64G_CC_OP_ADDL) && isU64(cond, AMD64CondS)) {
+         /* long add, then S (negative)
+            --> (dst+src)[31]
+            --> ((dst +64 src) >>u 31) & 1
+            Pointless to narrow the args to 32 bit before the add. */
+         return binop(Iop_And64,
+                      binop(Iop_Shr64,
+                            binop(Iop_Add64, cc_dep1, cc_dep2),
+                            mkU8(31)),
+                      mkU64(1));
+      }
+      if (isU64(cc_op, AMD64G_CC_OP_ADDL) && isU64(cond, AMD64CondNS)) {
+         /* long add, then NS (not negative)
+            --> (dst+src)[31] ^ 1
+            --> (((dst +64 src) >>u 31) & 1) ^ 1
+            Pointless to narrow the args to 32 bit before the add. */
+         return binop(Iop_Xor64,
+                      binop(Iop_And64,
+                            binop(Iop_Shr64,
+                                  binop(Iop_Add64, cc_dep1, cc_dep2),
+                                  mkU8(31)),
+                            mkU64(1)),
+                      mkU64(1));
+      }
+
       /*---------------- SUBQ ----------------*/
 
       /* 0, */