From 7f2ecef135193eb22669e8bcfaa4eb8aef93e9db Mon Sep 17 00:00:00 2001 From: Julian Seward Date: Thu, 7 Jul 2005 12:26:36 +0000 Subject: [PATCH] Tidy up some loose ends in the self-checking-translations machinery, and unroll the adler32 loop in a not-very-successful attempt to reduce the overhead of checking. git-svn-id: svn://svn.valgrind.org/vex/trunk@1264 --- VEX/priv/guest-amd64/ghelpers.c | 6 +++-- VEX/priv/guest-generic/bb_to_IR.c | 38 +++++++++++++++++++------------ VEX/priv/guest-x86/ghelpers.c | 6 +++-- VEX/pub/libvex.h | 2 +- 4 files changed, 32 insertions(+), 20 deletions(-) diff --git a/VEX/priv/guest-amd64/ghelpers.c b/VEX/priv/guest-amd64/ghelpers.c index 24c5ed0a54..d9226d004d 100644 --- a/VEX/priv/guest-amd64/ghelpers.c +++ b/VEX/priv/guest-amd64/ghelpers.c @@ -1729,7 +1729,7 @@ VexGuestLayout /* Describe any sections to be regarded by Memcheck as 'always-defined'. */ - .n_alwaysDefd = 12, + .n_alwaysDefd = 14, /* flags thunk: OP and NDEP are always defd, whereas DEP1 and DEP2 have to be tracked. See detailed comment in @@ -1754,7 +1754,9 @@ VexGuestLayout // /* */ ALWAYSDEFD(guest_LDT), // /* */ ALWAYSDEFD(guest_GDT), /* 10 */ ALWAYSDEFD(guest_EMWARN), - /* 11 */ ALWAYSDEFD(guest_SSEROUND) + /* 11 */ ALWAYSDEFD(guest_SSEROUND), + /* 12 */ ALWAYSDEFD(guest_TISTART), + /* 13 */ ALWAYSDEFD(guest_TILEN) } }; diff --git a/VEX/priv/guest-generic/bb_to_IR.c b/VEX/priv/guest-generic/bb_to_IR.c index 2263580441..228d1f6b0f 100644 --- a/VEX/priv/guest-generic/bb_to_IR.c +++ b/VEX/priv/guest-generic/bb_to_IR.c @@ -42,6 +42,7 @@ /* Forwards .. */ +__attribute((regparm(2))) static UInt genericg_compute_adler32 ( HWord addr, UInt len ); @@ -119,7 +120,7 @@ IRBB* bb_to_IR ( /*OUT*/VexGuestExtents* vge, delta = 0; n_instrs = 0; - /* If asked to make a self-checking translation, leave a 3 spaces + /* If asked to make a self-checking translation, leave a 5 spaces in which to put the check statements. We'll fill them in later when we know the length and adler32 of the area to check. */ if (do_self_check) { @@ -127,6 +128,8 @@ IRBB* bb_to_IR ( /*OUT*/VexGuestExtents* vge, addStmtToIRBB( irbb, IRStmt_NoOp() ); addStmtToIRBB( irbb, IRStmt_NoOp() ); addStmtToIRBB( irbb, IRStmt_NoOp() ); + addStmtToIRBB( irbb, IRStmt_NoOp() ); + addStmtToIRBB( irbb, IRStmt_NoOp() ); } /* Process instructions. */ @@ -285,6 +288,7 @@ IRBB* bb_to_IR ( /*OUT*/VexGuestExtents* vge, UInt len2check, adler32; IRConst* guest_IP_bbstart_IRConst; + IRTemp tistart_tmp, tilen_tmp; vassert(vge->n_used == 1); len2check = vge->len[0]; @@ -295,29 +299,39 @@ IRBB* bb_to_IR ( /*OUT*/VexGuestExtents* vge, guest_IP_bbstart_IRConst = guest_word_type==Ity_I32 - ? IRConst_U32(guest_IP_bbstart) + ? IRConst_U32(toUInt(guest_IP_bbstart)) : IRConst_U64(guest_IP_bbstart); /* Set TISTART and TILEN. These will describe to the despatcher the area of guest code to invalidate should we exit with a self-check failure. */ + tistart_tmp = newIRTemp(irbb->tyenv, guest_word_type); + tilen_tmp = newIRTemp(irbb->tyenv, guest_word_type); + irbb->stmts[selfcheck_idx+0] - = IRStmt_Put( offB_TILEN, - guest_word_type==Ity_I32 - ? IRExpr_Const(IRConst_U32(len2check)) - : IRExpr_Const(IRConst_U64(len2check)) ); + = IRStmt_Tmp(tistart_tmp, IRExpr_Const(guest_IP_bbstart_IRConst) ); irbb->stmts[selfcheck_idx+1] - = IRStmt_Put( offB_TISTART, IRExpr_Const(guest_IP_bbstart_IRConst) ); + = IRStmt_Tmp(tilen_tmp, + guest_word_type==Ity_I32 + ? IRExpr_Const(IRConst_U32(len2check)) + : IRExpr_Const(IRConst_U64(len2check)) + ); irbb->stmts[selfcheck_idx+2] + = IRStmt_Put( offB_TISTART, IRExpr_Tmp(tistart_tmp) ); + + irbb->stmts[selfcheck_idx+3] + = IRStmt_Put( offB_TILEN, IRExpr_Tmp(tilen_tmp) ); + + irbb->stmts[selfcheck_idx+4] = IRStmt_Exit( IRExpr_Binop( Iop_CmpNE32, mkIRExprCCall( Ity_I32, - 0/*regparms*/, + 2/*regparms*/, "genericg_compute_adler32", &genericg_compute_adler32, mkIRExprVec_2( @@ -350,17 +364,12 @@ IRBB* bb_to_IR ( /*OUT*/VexGuestExtents* vge, get anywhere near that many bytes to deal with. This fn is called once for every use of a self-checking translation, so it needs to be as fast as possible. */ +__attribute((regparm(2))) static UInt genericg_compute_adler32 ( HWord addr, UInt len ) { - UInt i; UInt s1 = 1; UInt s2 = 0; UChar* buf = (UChar*)addr; - for (i = 0; i < len; i++) { - s1 += (UInt)buf[i]; - s2 += s1; - } -#if 0 while (len >= 4) { s1 += buf[0]; s2 += s1; @@ -379,7 +388,6 @@ static UInt genericg_compute_adler32 ( HWord addr, UInt len ) len--; buf++; } -#endif return (s2 << 16) + s1; } diff --git a/VEX/priv/guest-x86/ghelpers.c b/VEX/priv/guest-x86/ghelpers.c index f64b44574e..7d8cce36d3 100644 --- a/VEX/priv/guest-x86/ghelpers.c +++ b/VEX/priv/guest-x86/ghelpers.c @@ -2061,7 +2061,7 @@ VexGuestLayout /* Describe any sections to be regarded by Memcheck as 'always-defined'. */ - .n_alwaysDefd = 19, + .n_alwaysDefd = 21, /* flags thunk: OP and NDEP are always defd, whereas DEP1 and DEP2 have to be tracked. See detailed comment in @@ -2085,7 +2085,9 @@ VexGuestLayout /* 15 */ ALWAYSDEFD(guest_LDT), /* 16 */ ALWAYSDEFD(guest_GDT), /* 17 */ ALWAYSDEFD(guest_EMWARN), - /* 18 */ ALWAYSDEFD(guest_SSEROUND) + /* 18 */ ALWAYSDEFD(guest_SSEROUND), + /* 19 */ ALWAYSDEFD(guest_TISTART), + /* 20 */ ALWAYSDEFD(guest_TILEN) } }; diff --git a/VEX/pub/libvex.h b/VEX/pub/libvex.h index ab405016ad..3e07fafb96 100644 --- a/VEX/pub/libvex.h +++ b/VEX/pub/libvex.h @@ -171,7 +171,7 @@ extern void LibVEX_ShowAllocStats ( void ); /* The max number of guest state chunks which we can describe as always defined (for the benefit of Memcheck). */ -#define VEXGLO_N_ALWAYSDEFD 19 +#define VEXGLO_N_ALWAYSDEFD 21 typedef struct { -- 2.47.3