Constify parameters of the LibVEX_Chain/Unchain/PatchProfInc.
git-svn-id: svn://svn.valgrind.org/vex/trunk@2956
emitInstr case for XDirect, above. */
VexInvalRange chainXDirect_AMD64 ( VexEndness endness_host,
void* place_to_chain,
- void* disp_cp_chain_me_EXPECTED,
- void* place_to_jump_to )
+ const void* disp_cp_chain_me_EXPECTED,
+ const void* place_to_jump_to )
{
vassert(endness_host == VexEndnessLE);
*/
/* This is the delta we need to put into a JMP d32 insn. It's
relative to the start of the next insn, hence the -5. */
- Long delta = (Long)((UChar*)place_to_jump_to - (UChar*)p) - (Long)5;
+ Long delta = (Long)((const UChar *)place_to_jump_to - (const UChar*)p) - 5;
Bool shortOK = delta >= -1000*1000*1000 && delta < 1000*1000*1000;
static UInt shortCTR = 0; /* DO NOT MAKE NON-STATIC */
emitInstr case for XDirect, above. */
VexInvalRange unchainXDirect_AMD64 ( VexEndness endness_host,
void* place_to_unchain,
- void* place_to_jump_to_EXPECTED,
- void* disp_cp_chain_me )
+ const void* place_to_jump_to_EXPECTED,
+ const void* disp_cp_chain_me )
{
vassert(endness_host == VexEndnessLE);
/* It's the short form. Check the offset is right. */
Int s32 = *(Int*)(&p[1]);
Long s64 = (Long)s32;
- if ((UChar*)p + 5 + s64 == (UChar*)place_to_jump_to_EXPECTED) {
+ if ((UChar*)p + 5 + s64 == place_to_jump_to_EXPECTED) {
valid = True;
if (0)
vex_printf("QQQ unchainXDirect_AMD64: found short form\n");
created by the Ain_ProfInc case for emit_AMD64Instr. */
VexInvalRange patchProfInc_AMD64 ( VexEndness endness_host,
void* place_to_patch,
- ULong* location_of_counter )
+ const ULong* location_of_counter )
{
vassert(endness_host == VexEndnessLE);
vassert(sizeof(ULong*) == 8);
/* Perform a chaining and unchaining of an XDirect jump. */
extern VexInvalRange chainXDirect_AMD64 ( VexEndness endness_host,
void* place_to_chain,
- void* disp_cp_chain_me_EXPECTED,
- void* place_to_jump_to );
+ const void* disp_cp_chain_me_EXPECTED,
+ const void* place_to_jump_to );
extern VexInvalRange unchainXDirect_AMD64 ( VexEndness endness_host,
void* place_to_unchain,
- void* place_to_jump_to_EXPECTED,
- void* disp_cp_chain_me );
+ const void* place_to_jump_to_EXPECTED,
+ const void* disp_cp_chain_me );
/* Patch the counter location into an existing ProfInc point. */
extern VexInvalRange patchProfInc_AMD64 ( VexEndness endness_host,
void* place_to_patch,
- ULong* location_of_counter );
+ const ULong* location_of_counter );
#endif /* ndef __VEX_HOST_AMD64_DEFS_H */
emitInstr case for XDirect, above. */
VexInvalRange chainXDirect_ARM64 ( VexEndness endness_host,
void* place_to_chain,
- void* disp_cp_chain_me_EXPECTED,
- void* place_to_jump_to )
+ const void* disp_cp_chain_me_EXPECTED,
+ const void* place_to_jump_to )
{
vassert(endness_host == VexEndnessLE);
emitInstr case for XDirect, above. */
VexInvalRange unchainXDirect_ARM64 ( VexEndness endness_host,
void* place_to_unchain,
- void* place_to_jump_to_EXPECTED,
- void* disp_cp_chain_me )
+ const void* place_to_jump_to_EXPECTED,
+ const void* disp_cp_chain_me )
{
vassert(endness_host == VexEndnessLE);
created by the ARM64in_ProfInc case for emit_ARM64Instr. */
VexInvalRange patchProfInc_ARM64 ( VexEndness endness_host,
void* place_to_patch,
- ULong* location_of_counter )
+ const ULong* location_of_counter )
{
vassert(sizeof(ULong*) == 8);
vassert(endness_host == VexEndnessLE);
/* Perform a chaining and unchaining of an XDirect jump. */
extern VexInvalRange chainXDirect_ARM64 ( VexEndness endness_host,
void* place_to_chain,
- void* disp_cp_chain_me_EXPECTED,
- void* place_to_jump_to );
+ const void* disp_cp_chain_me_EXPECTED,
+ const void* place_to_jump_to );
extern VexInvalRange unchainXDirect_ARM64 ( VexEndness endness_host,
void* place_to_unchain,
- void* place_to_jump_to_EXPECTED,
- void* disp_cp_chain_me );
+ const void* place_to_jump_to_EXPECTED,
+ const void* disp_cp_chain_me );
/* Patch the counter location into an existing ProfInc point. */
extern VexInvalRange patchProfInc_ARM64 ( VexEndness endness_host,
void* place_to_patch,
- ULong* location_of_counter );
+ const ULong* location_of_counter );
#endif /* ndef __VEX_HOST_ARM64_DEFS_H */
emitInstr case for XDirect, above. */
VexInvalRange chainXDirect_ARM ( VexEndness endness_host,
void* place_to_chain,
- void* disp_cp_chain_me_EXPECTED,
- void* place_to_jump_to )
+ const void* disp_cp_chain_me_EXPECTED,
+ const void* place_to_jump_to )
{
vassert(endness_host == VexEndnessLE);
/* This is the delta we need to put into a B insn. It's relative
to the start of the next-but-one insn, hence the -8. */
- Long delta = (Long)((UChar*)place_to_jump_to - (UChar*)p) - (Long)8;
+ Long delta = (Long)((const UChar *)place_to_jump_to - (const UChar*)p) - 8;
Bool shortOK = delta >= -30*1000*1000 && delta < 30*1000*1000;
vassert(0 == (delta & (Long)3));
emitInstr case for XDirect, above. */
VexInvalRange unchainXDirect_ARM ( VexEndness endness_host,
void* place_to_unchain,
- void* place_to_jump_to_EXPECTED,
- void* disp_cp_chain_me )
+ const void* place_to_jump_to_EXPECTED,
+ const void* disp_cp_chain_me )
{
vassert(endness_host == VexEndnessLE);
/* It's the short form. Check the displacement is right. */
Int simm24 = p[0] & 0x00FFFFFF;
simm24 <<= 8; simm24 >>= 8;
- if ((UChar*)p + (simm24 << 2) + 8 == (UChar*)place_to_jump_to_EXPECTED) {
+ if ((UChar*)p + (simm24 << 2) + 8 == place_to_jump_to_EXPECTED) {
valid = True;
if (0)
vex_printf("QQQ unchainXDirect_ARM: found short form\n");
created by the ARMin_ProfInc case for emit_ARMInstr. */
VexInvalRange patchProfInc_ARM ( VexEndness endness_host,
void* place_to_patch,
- ULong* location_of_counter )
+ const ULong* location_of_counter )
{
vassert(endness_host == VexEndnessLE);
vassert(sizeof(ULong*) == 4);
/* Perform a chaining and unchaining of an XDirect jump. */
extern VexInvalRange chainXDirect_ARM ( VexEndness endness_host,
void* place_to_chain,
- void* disp_cp_chain_me_EXPECTED,
- void* place_to_jump_to );
+ const void* disp_cp_chain_me_EXPECTED,
+ const void* place_to_jump_to );
extern VexInvalRange unchainXDirect_ARM ( VexEndness endness_host,
void* place_to_unchain,
- void* place_to_jump_to_EXPECTED,
- void* disp_cp_chain_me );
+ const void* place_to_jump_to_EXPECTED,
+ const void* disp_cp_chain_me );
/* Patch the counter location into an existing ProfInc point. */
extern VexInvalRange patchProfInc_ARM ( VexEndness endness_host,
void* place_to_patch,
- ULong* location_of_counter );
+ const ULong* location_of_counter );
#endif /* ndef __VEX_HOST_ARM_DEFS_H */
emitInstr case for XDirect, above. */
VexInvalRange chainXDirect_MIPS ( VexEndness endness_host,
void* place_to_chain,
- void* disp_cp_chain_me_EXPECTED,
- void* place_to_jump_to,
+ const void* disp_cp_chain_me_EXPECTED,
+ const void* place_to_jump_to,
Bool mode64 )
{
vassert(endness_host == VexEndnessLE || endness_host == VexEndnessBE);
emitInstr case for XDirect, above. */
VexInvalRange unchainXDirect_MIPS ( VexEndness endness_host,
void* place_to_unchain,
- void* place_to_jump_to_EXPECTED,
- void* disp_cp_chain_me,
+ const void* place_to_jump_to_EXPECTED,
+ const void* disp_cp_chain_me,
Bool mode64 )
{
vassert(endness_host == VexEndnessLE || endness_host == VexEndnessBE);
created by the Min_ProfInc case for emit_MIPSInstr. */
VexInvalRange patchProfInc_MIPS ( VexEndness endness_host,
void* place_to_patch,
- ULong* location_of_counter, Bool mode64 )
+ const ULong* location_of_counter,
+ Bool mode64 )
{
vassert(endness_host == VexEndnessLE || endness_host == VexEndnessBE);
if (mode64) {
/* Perform a chaining and unchaining of an XDirect jump. */
extern VexInvalRange chainXDirect_MIPS ( VexEndness endness_host,
void* place_to_chain,
- void* disp_cp_chain_me_EXPECTED,
- void* place_to_jump_to,
+ const void* disp_cp_chain_me_EXPECTED,
+ const void* place_to_jump_to,
Bool mode64 );
extern VexInvalRange unchainXDirect_MIPS ( VexEndness endness_host,
void* place_to_unchain,
- void* place_to_jump_to_EXPECTED,
- void* disp_cp_chain_me,
+ const void* place_to_jump_to_EXPECTED,
+ const void* disp_cp_chain_me,
Bool mode64 );
/* Patch the counter location into an existing ProfInc point. */
extern VexInvalRange patchProfInc_MIPS ( VexEndness endness_host,
void* place_to_patch,
- ULong* location_of_counter,
+ const ULong* location_of_counter,
Bool mode64 );
#endif /* ndef __VEX_HOST_MIPS_DEFS_H */
emitInstr case for XDirect, above. */
VexInvalRange chainXDirect_PPC ( VexEndness endness_host,
void* place_to_chain,
- void* disp_cp_chain_me_EXPECTED,
- void* place_to_jump_to,
+ const void* disp_cp_chain_me_EXPECTED,
+ const void* place_to_jump_to,
Bool mode64 )
{
if (mode64) {
emitInstr case for XDirect, above. */
VexInvalRange unchainXDirect_PPC ( VexEndness endness_host,
void* place_to_unchain,
- void* place_to_jump_to_EXPECTED,
- void* disp_cp_chain_me,
+ const void* place_to_jump_to_EXPECTED,
+ const void* disp_cp_chain_me,
Bool mode64 )
{
if (mode64) {
created by the Pin_ProfInc case for emit_PPCInstr. */
VexInvalRange patchProfInc_PPC ( VexEndness endness_host,
void* place_to_patch,
- ULong* location_of_counter,
+ const ULong* location_of_counter,
Bool mode64 )
{
if (mode64) {
/* Perform a chaining and unchaining of an XDirect jump. */
extern VexInvalRange chainXDirect_PPC ( VexEndness endness_host,
void* place_to_chain,
- void* disp_cp_chain_me_EXPECTED,
- void* place_to_jump_to,
+ const void* disp_cp_chain_me_EXPECTED,
+ const void* place_to_jump_to,
Bool mode64 );
extern VexInvalRange unchainXDirect_PPC ( VexEndness endness_host,
void* place_to_unchain,
- void* place_to_jump_to_EXPECTED,
- void* disp_cp_chain_me,
+ const void* place_to_jump_to_EXPECTED,
+ const void* disp_cp_chain_me,
Bool mode64 );
/* Patch the counter location into an existing ProfInc point. */
extern VexInvalRange patchProfInc_PPC ( VexEndness endness_host,
void* place_to_patch,
- ULong* location_of_counter,
+ const ULong* location_of_counter,
Bool mode64 );
generated by s390_insn_profinc_emit. */
VexInvalRange
patchProfInc_S390(VexEndness endness_host,
- void *code_to_patch, ULong *location_of_counter)
+ void *code_to_patch, const ULong *location_of_counter)
{
vassert(sizeof(ULong *) == 8);
VexInvalRange
chainXDirect_S390(VexEndness endness_host,
void *place_to_chain,
- void *disp_cp_chain_me_EXPECTED,
- void *place_to_jump_to)
+ const void *disp_cp_chain_me_EXPECTED,
+ const void *place_to_jump_to)
{
vassert(endness_host == VexEndnessBE);
/* This is the delta we need to put into a BRCL insn. Note, that the
offset in BRCL is in half-words. Hence division by 2. */
- Long delta = (Long)((UChar *)place_to_jump_to - (UChar *)place_to_chain) / 2;
+ Long delta =
+ (Long)((const UChar *)place_to_jump_to - (const UChar *)place_to_chain) / 2;
Bool shortOK = delta >= -1000*1000*1000 && delta < 1000*1000*1000;
static UInt shortCTR = 0; /* DO NOT MAKE NON-STATIC */
VexInvalRange
unchainXDirect_S390(VexEndness endness_host,
void *place_to_unchain,
- void *place_to_jump_to_EXPECTED,
- void *disp_cp_chain_me)
+ const void *place_to_jump_to_EXPECTED,
+ const void *disp_cp_chain_me)
{
vassert(endness_host == VexEndnessBE);
/* Perform a chaining and unchaining of an XDirect jump. */
VexInvalRange chainXDirect_S390(VexEndness endness_host,
void *place_to_chain,
- void *disp_cp_chain_me_EXPECTED,
- void *place_to_jump_to);
+ const void *disp_cp_chain_me_EXPECTED,
+ const void *place_to_jump_to);
VexInvalRange unchainXDirect_S390(VexEndness endness_host,
void *place_to_unchain,
- void *place_to_jump_to_EXPECTED,
- void *disp_cp_chain_me);
+ const void *place_to_jump_to_EXPECTED,
+ const void *disp_cp_chain_me);
/* Patch the counter location into an existing ProfInc point. */
VexInvalRange patchProfInc_S390(VexEndness endness_host,
void *code_to_patch,
- ULong *location_of_counter);
+ const ULong *location_of_counter);
/* KLUDGE: See detailled comment in host_s390_defs.c. */
extern UInt s390_host_hwcaps;
emitInstr case for XDirect, above. */
VexInvalRange chainXDirect_X86 ( VexEndness endness_host,
void* place_to_chain,
- void* disp_cp_chain_me_EXPECTED,
- void* place_to_jump_to )
+ const void* disp_cp_chain_me_EXPECTED,
+ const void* place_to_jump_to )
{
vassert(endness_host == VexEndnessLE);
*/
/* This is the delta we need to put into a JMP d32 insn. It's
relative to the start of the next insn, hence the -5. */
- Long delta = (Long)((UChar*)place_to_jump_to - (UChar*)p) - (Long)5;
+ Long delta = (Long)((const UChar *)place_to_jump_to - p) - 5;
/* And make the modifications. */
p[0] = 0xE9;
emitInstr case for XDirect, above. */
VexInvalRange unchainXDirect_X86 ( VexEndness endness_host,
void* place_to_unchain,
- void* place_to_jump_to_EXPECTED,
- void* disp_cp_chain_me )
+ const void* place_to_jump_to_EXPECTED,
+ const void* disp_cp_chain_me )
{
vassert(endness_host == VexEndnessLE);
&& p[5] == 0x0F && p[6] == 0x0B) {
/* Check the offset is right. */
Int s32 = *(Int*)(&p[1]);
- if ((UChar*)p + 5 + s32 == (UChar*)place_to_jump_to_EXPECTED) {
+ if ((UChar*)p + 5 + s32 == place_to_jump_to_EXPECTED) {
valid = True;
if (0)
vex_printf("QQQ unchainXDirect_X86: found valid\n");
created by the Xin_ProfInc case for emit_X86Instr. */
VexInvalRange patchProfInc_X86 ( VexEndness endness_host,
void* place_to_patch,
- ULong* location_of_counter )
+ const ULong* location_of_counter )
{
vassert(endness_host == VexEndnessLE);
vassert(sizeof(ULong*) == 4);
/* Perform a chaining and unchaining of an XDirect jump. */
extern VexInvalRange chainXDirect_X86 ( VexEndness endness_host,
void* place_to_chain,
- void* disp_cp_chain_me_EXPECTED,
- void* place_to_jump_to );
+ const void* disp_cp_chain_me_EXPECTED,
+ const void* place_to_jump_to );
extern VexInvalRange unchainXDirect_X86 ( VexEndness endness_host,
void* place_to_unchain,
- void* place_to_jump_to_EXPECTED,
- void* disp_cp_chain_me );
+ const void* place_to_jump_to_EXPECTED,
+ const void* disp_cp_chain_me );
/* Patch the counter location into an existing ProfInc point. */
extern VexInvalRange patchProfInc_X86 ( VexEndness endness_host,
void* place_to_patch,
- ULong* location_of_counter );
+ const ULong* location_of_counter );
#endif /* ndef __VEX_HOST_X86_DEFS_H */
/* --------- Chain/Unchain XDirects. --------- */
-VexInvalRange LibVEX_Chain ( VexArch arch_host,
- VexEndness endness_host,
- void* place_to_chain,
- void* disp_cp_chain_me_EXPECTED,
- void* place_to_jump_to )
+VexInvalRange LibVEX_Chain ( VexArch arch_host,
+ VexEndness endness_host,
+ void* place_to_chain,
+ const void* disp_cp_chain_me_EXPECTED,
+ const void* place_to_jump_to )
{
- VexInvalRange (*chainXDirect)(VexEndness, void*, void*, void*) = NULL;
switch (arch_host) {
case VexArchX86:
- chainXDirect = chainXDirect_X86; break;
+ return chainXDirect_X86(endness_host,
+ place_to_chain,
+ disp_cp_chain_me_EXPECTED,
+ place_to_jump_to);
case VexArchAMD64:
- chainXDirect = chainXDirect_AMD64; break;
+ return chainXDirect_AMD64(endness_host,
+ place_to_chain,
+ disp_cp_chain_me_EXPECTED,
+ place_to_jump_to);
case VexArchARM:
- chainXDirect = chainXDirect_ARM; break;
+ return chainXDirect_ARM(endness_host,
+ place_to_chain,
+ disp_cp_chain_me_EXPECTED,
+ place_to_jump_to);
case VexArchARM64:
- chainXDirect = chainXDirect_ARM64; break;
+ return chainXDirect_ARM64(endness_host,
+ place_to_chain,
+ disp_cp_chain_me_EXPECTED,
+ place_to_jump_to);
case VexArchS390X:
- chainXDirect = chainXDirect_S390; break;
+ return chainXDirect_S390(endness_host,
+ place_to_chain,
+ disp_cp_chain_me_EXPECTED,
+ place_to_jump_to);
case VexArchPPC32:
return chainXDirect_PPC(endness_host,
place_to_chain,
default:
vassert(0);
}
- vassert(chainXDirect);
- VexInvalRange vir
- = chainXDirect(endness_host, place_to_chain,
- disp_cp_chain_me_EXPECTED, place_to_jump_to);
- return vir;
}
-VexInvalRange LibVEX_UnChain ( VexArch arch_host,
- VexEndness endness_host,
- void* place_to_unchain,
- void* place_to_jump_to_EXPECTED,
- void* disp_cp_chain_me )
+VexInvalRange LibVEX_UnChain ( VexArch arch_host,
+ VexEndness endness_host,
+ void* place_to_unchain,
+ const void* place_to_jump_to_EXPECTED,
+ const void* disp_cp_chain_me )
{
- VexInvalRange (*unchainXDirect)(VexEndness, void*, void*, void*) = NULL;
switch (arch_host) {
case VexArchX86:
- unchainXDirect = unchainXDirect_X86; break;
+ return unchainXDirect_X86(endness_host,
+ place_to_unchain,
+ place_to_jump_to_EXPECTED,
+ disp_cp_chain_me);
case VexArchAMD64:
- unchainXDirect = unchainXDirect_AMD64; break;
+ return unchainXDirect_AMD64(endness_host,
+ place_to_unchain,
+ place_to_jump_to_EXPECTED,
+ disp_cp_chain_me);
case VexArchARM:
- unchainXDirect = unchainXDirect_ARM; break;
+ return unchainXDirect_ARM(endness_host,
+ place_to_unchain,
+ place_to_jump_to_EXPECTED,
+ disp_cp_chain_me);
case VexArchARM64:
- unchainXDirect = unchainXDirect_ARM64; break;
+ return unchainXDirect_ARM64(endness_host,
+ place_to_unchain,
+ place_to_jump_to_EXPECTED,
+ disp_cp_chain_me);
case VexArchS390X:
- unchainXDirect = unchainXDirect_S390; break;
+ return unchainXDirect_S390(endness_host,
+ place_to_unchain,
+ place_to_jump_to_EXPECTED,
+ disp_cp_chain_me);
case VexArchPPC32:
return unchainXDirect_PPC(endness_host,
place_to_unchain,
default:
vassert(0);
}
- vassert(unchainXDirect);
- VexInvalRange vir
- = unchainXDirect(endness_host, place_to_unchain,
- place_to_jump_to_EXPECTED, disp_cp_chain_me);
- return vir;
}
Int LibVEX_evCheckSzB ( VexArch arch_host,
VexInvalRange LibVEX_PatchProfInc ( VexArch arch_host,
VexEndness endness_host,
void* place_to_patch,
- ULong* location_of_counter )
+ const ULong* location_of_counter )
{
- VexInvalRange (*patchProfInc)(VexEndness,void*,ULong*) = NULL;
switch (arch_host) {
case VexArchX86:
- patchProfInc = patchProfInc_X86; break;
+ return patchProfInc_X86(endness_host, place_to_patch,
+ location_of_counter);
case VexArchAMD64:
- patchProfInc = patchProfInc_AMD64; break;
+ return patchProfInc_AMD64(endness_host, place_to_patch,
+ location_of_counter);
case VexArchARM:
- patchProfInc = patchProfInc_ARM; break;
+ return patchProfInc_ARM(endness_host, place_to_patch,
+ location_of_counter);
case VexArchARM64:
- patchProfInc = patchProfInc_ARM64; break;
+ return patchProfInc_ARM64(endness_host, place_to_patch,
+ location_of_counter);
case VexArchS390X:
- patchProfInc = patchProfInc_S390; break;
+ return patchProfInc_S390(endness_host, place_to_patch,
+ location_of_counter);
case VexArchPPC32:
return patchProfInc_PPC(endness_host, place_to_patch,
location_of_counter, False/*!mode64*/);
default:
vassert(0);
}
- vassert(patchProfInc);
- VexInvalRange vir
- = patchProfInc(endness_host, place_to_patch, location_of_counter);
- return vir;
}
currently contains a call to the dispatcher specified by
disp_cp_chain_me_EXPECTED. */
extern
-VexInvalRange LibVEX_Chain ( VexArch arch_host,
- VexEndness endhess_host,
- void* place_to_chain,
- void* disp_cp_chain_me_EXPECTED,
- void* place_to_jump_to );
+VexInvalRange LibVEX_Chain ( VexArch arch_host,
+ VexEndness endhess_host,
+ void* place_to_chain,
+ const void* disp_cp_chain_me_EXPECTED,
+ const void* place_to_jump_to );
/* Undo an XDirect jump located at place_to_unchain, so it is
converted back into a call to disp_cp_chain_me. It is expected
(and checked) that this site currently contains a jump directly to
the address specified by place_to_jump_to_EXPECTED. */
extern
-VexInvalRange LibVEX_UnChain ( VexArch arch_host,
- VexEndness endness_host,
- void* place_to_unchain,
- void* place_to_jump_to_EXPECTED,
- void* disp_cp_chain_me );
+VexInvalRange LibVEX_UnChain ( VexArch arch_host,
+ VexEndness endness_host,
+ void* place_to_unchain,
+ const void* place_to_jump_to_EXPECTED,
+ const void* disp_cp_chain_me );
/* Returns a constant -- the size of the event check that is put at
the start of every translation. This makes it possible to
/* Patch the counter location into an existing ProfInc point. The
specified point is checked to make sure it is plausible. */
extern
-VexInvalRange LibVEX_PatchProfInc ( VexArch arch_host,
- VexEndness endness_host,
- void* place_to_patch,
- ULong* location_of_counter );
+VexInvalRange LibVEX_PatchProfInc ( VexArch arch_host,
+ VexEndness endness_host,
+ void* place_to_patch,
+ const ULong* location_of_counter );
/*-------------------------------------------------------*/