);
#endif /* VM_X86_ANY */
#elif defined _MSC_VER
-#if defined __x86_64__ || defined VM_ARM_32
_InterlockedAnd((long *)&var->value, (long)val);
-#else
- __asm mov eax, val
- __asm mov ebx, var
- __asm lock And [ebx]Atomic_uint32.value, eax
-#endif
#else
#error No compiler defined for Atomic_And
#endif
);
#endif /* VM_X86_ANY */
#elif defined _MSC_VER
-#if defined __x86_64__ || defined VM_ARM_32
_InterlockedOr((long *)&var->value, (long)val);
-#else
- __asm mov eax, val
- __asm mov ebx, var
- __asm lock Or [ebx]Atomic_uint32.value, eax
-#endif
#else
#error No compiler defined for Atomic_Or
#endif
);
#endif /* VM_X86_ANY */
#elif defined _MSC_VER
-#if defined __x86_64__ || defined VM_ARM_32
_InterlockedXor((long *)&var->value, (long)val);
-#else
- __asm mov eax, val
- __asm mov ebx, var
- __asm lock Xor [ebx]Atomic_uint32.value, eax
-#endif
#else
#error No compiler defined for Atomic_Xor
#endif
*/
return var->value;
#elif defined _MSC_VER && defined VM_ARM_32
+ /* MSVC + 32-bit ARM has add64 but no cmpxchg64 */
return _InterlockedAdd64((__int64 *)&var->value, 0);
#elif defined _MSC_VER && defined __i386__
-# pragma warning(push)
-# pragma warning(disable : 4035) // disable no-return warning
- {
- __asm mov ecx, var
- __asm mov edx, ecx
- __asm mov eax, ebx
- __asm lock cmpxchg8b [ecx]
- // edx:eax is the return value; this is documented to work. --mann
- }
-# pragma warning(pop)
+ /* MSVC + 32-bit x86 has cmpxchg64 but no add64 */
+ return _InterlockedCompareExchange64((__int64 *)&var->value,
+ (__int64)255, // Unlikely value to
+ (__int64)255); // not dirty cache
#elif defined __GNUC__ && defined VM_ARM_V7
__asm__ __volatile__(
"ldrexd %[value], %H[value], [%[var]] \n\t"
static INLINE void *
uint16set(void *dst, uint16 val, size_t count)
{
-#ifdef VM_X86_64
+#ifdef VM_X86_ANY
__stosw((uint16*)dst, val, count);
-#elif defined(VM_ARM_32)
+#else
size_t i;
for (i = 0; i < count; i++) {
((uint16 *)dst)[i] = val;
}
-#else
- __asm { pushf;
- mov ax, val;
- mov ecx, count;
- mov edi, dst;
- cld;
- rep stosw;
- popf;
- }
#endif
return dst;
}
static INLINE void *
uint32set(void *dst, uint32 val, size_t count)
{
-#ifdef VM_X86_64
+#ifdef VM_X86_ANY
__stosd((unsigned long*)dst, (unsigned long)val, count);
-#elif defined(VM_ARM_32)
+#else
size_t i;
for (i = 0; i < count; i++) {
((uint32 *)dst)[i] = val;
}
-#else
- __asm { pushf;
- mov eax, val;
- mov ecx, count;
- mov edi, dst;
- cld;
- rep stosd;
- popf;
- }
#endif
return dst;
}
#endif
}
#elif defined(_MSC_VER)
-#ifdef VM_X86_64
{
_mm_pause();
}
-#else /* VM_X86_64 */
-#pragma warning( disable : 4035)
-{
- __asm _emit 0xf3 __asm _emit 0x90
-}
-#pragma warning (default: 4035)
-#endif /* VM_X86_64 */
#else /* __GNUC__ */
#error No compiler defined for PAUSE
#endif
#endif
}
#elif defined(_MSC_VER)
-#ifdef VM_X86_64
+#ifdef VM_X86_ANY
{
return __rdtsc();
}
-#elif defined(VM_ARM_32)
+#else
{
/*
* We need to do more inverstagetion here to find
NOT_IMPLEMENTED();
return 0;
}
-#else
-#pragma warning( disable : 4035)
-{
- __asm _emit 0x0f __asm _emit 0x31
-}
-#pragma warning (default: 4035)
-#endif /* VM_X86_64 */
+#endif /* VM_X86_ANY */
#else /* __GNUC__ */
#error No compiler defined for RDTSC
#endif /* __GNUC__ */
#define OUTW(port, val) __GCC_OUT(w, w, port, val)
#define OUT32(port, val) __GCC_OUT(l, , port, val)
-#define GET_CURRENT_EIP(_eip) \
- __asm__ __volatile("call 0\n\tpopl %0" : "=r" (_eip): );
-
static INLINE unsigned int
GetCallerEFlags(void)
{
#undef NEAR
#endif
-#define GET_CURRENT_EIP(_eip) do { \
- __asm call NEAR PTR $+5 \
- __asm pop eax \
- __asm mov _eip, eax \
-} while (0)
#endif // VM_X86_64
static INLINE unsigned int
);
#elif defined _MSC_VER
_ReadWriteBarrier();
-#if defined VM_X86_32
- __asm sfence;
-#else
_mm_sfence();
-#endif
_ReadWriteBarrier();
#else
#error No compiler defined for SFENCE