/*********************************************************
- * Copyright (c) 2003-2023 VMware, Inc. All rights reserved.
+ * Copyright (c) 2003-2024 VMware, Inc. All rights reserved.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published
* lssbPtr_0 LSB set (uintptr_t;64-bit) 0..63 -1
* mssbPtr_0 MSB set (uintptr_t;32-bit) 0..31 -1
* mssbPtr_0 MSB set (uintptr_t;64-bit) 0..63 -1
- * lssbPtr LSB set (uintptr_t;32-bit) 1..32 0
- * lssbPtr LSB set (uintptr_t;64-bit) 1..64 0
- * mssbPtr MSB set (uintptr_t;32-bit) 1..32 0
- * mssbPtr MSB set (uintptr_t;64-bit) 1..64 0
* lssb32 LSB set (uint32) 1..32 0
* mssb32 MSB set (uint32) 1..32 0
* lssb64 LSB set (uint64) 1..64 0
* mssb64 MSB set (uint64) 1..64 0
+ * lssbPtr LSB set (uintptr_t;32-bit) 1..32 0
+ * lssbPtr LSB set (uintptr_t;64-bit) 1..64 0
+ * mssbPtr MSB set (uintptr_t;32-bit) 1..32 0
+ * mssbPtr MSB set (uintptr_t;64-bit) 1..64 0
*/
#if defined(_MSC_VER) && !defined(__clang__) // Clang defines _MSC_VER on Windows
-static INLINE int
+static inline int
lssb32_0(const uint32 value)
{
unsigned long idx;
return idx;
}
-static INLINE int
+static inline int
mssb32_0(const uint32 value)
{
unsigned long idx;
return idx;
}
-static INLINE int
+static inline int
lssb64_0(const uint64 value)
{
if (UNLIKELY(value == 0)) {
}
}
-static INLINE int
+static inline int
mssb64_0(const uint64 value)
{
if (UNLIKELY(value == 0)) {
* **********************************************************
*/
-static INLINE int
+static inline int
lssb32_0(uint32 v)
{
int value = (int)v;
return __builtin_ffs(value) - 1;
}
-static INLINE int
+static inline int
mssb32_0(uint32 value)
{
/*
}
}
-static INLINE int
+static inline int
lssb64_0(const uint64 v)
{
int64 value = (int64)v;
}
-static INLINE int
+static inline int
mssb64_0(const uint64 value)
{
if (UNLIKELY(value == 0)) {
#endif // __GNUC__
-static INLINE int
+static inline int
lssbPtr_0(const uintptr_t value)
{
#ifdef VM_64BIT
#endif
}
-static INLINE unsigned
+static inline unsigned
lssbPtr(const uintptr_t value)
{
return (unsigned)lssbPtr_0(value) + 1;
}
-static INLINE int
+static inline int
mssbPtr_0(const uintptr_t value)
{
#ifdef VM_64BIT
#endif
}
-static INLINE unsigned
+static inline unsigned
mssbPtr(const uintptr_t value)
{
return (unsigned)mssbPtr_0(value) + 1;
}
-static INLINE unsigned
+static inline unsigned
lssb32(const uint32 value)
{
return (unsigned)lssb32_0(value) + 1;
}
-static INLINE unsigned
+static inline unsigned
mssb32(const uint32 value)
{
return (unsigned)mssb32_0(value) + 1;
}
-static INLINE unsigned
+static inline unsigned
lssb64(const uint64 value)
{
return (unsigned)lssb64_0(value) + 1;
}
-static INLINE unsigned
+static inline unsigned
mssb64(const uint64 value)
{
return (unsigned)mssb64_0(value) + 1;
*-----------------------------------------------------------------------------
*/
-static INLINE uint16
+static inline uint16
Bswap16(uint16 v)
{
#if defined(VM_ARM_64) && !defined(_MSC_VER)
*-----------------------------------------------------------------------------
*/
-static INLINE uint32
+static inline uint32
Bswap32(uint32 v) // IN
{
#if defined(__GNUC__) && defined(VM_X86_ANY)
*-----------------------------------------------------------------------------
*/
-static INLINE uint64
+static inline uint64
Bswap64(uint64 v) // IN
{
#if defined _MSC_VER
* on non-P4 IA32 systems, the encoding is interpreted as a REPZ-NOP.
* Use volatile to avoid NOP removal.
*/
-static INLINE void
+static inline void
PAUSE(void)
#if defined(__GNUC__) || defined(VM_ARM_32)
{
*
* volatile because the tsc always changes without the compiler knowing it.
*/
-static INLINE uint64
+static inline uint64
RDTSC(void)
#ifdef __GNUC__
{
*-----------------------------------------------------------------------------
*/
-static INLINE void
+static inline void
SetBit32(uint32 *var, unsigned index)
{
*var |= 1 << index;
}
-static INLINE void
+static inline void
ClearBit32(uint32 *var, unsigned index)
{
*var &= ~(1 << index);
}
-static INLINE void
+static inline void
ToggleBit32(uint32 *var, unsigned index)
{
*var ^= 1 << index;
}
-static INLINE void
+static inline void
SetBit64(uint64 *var, unsigned index)
{
*var |= CONST64U(1) << index;
}
-static INLINE void
+static inline void
ClearBit64(uint64 *var, unsigned index)
{
*var &= ~(CONST64U(1) << index);
}
-static INLINE void
+static inline void
ToggleBit64(uint64 *var, unsigned index)
{
*var ^= (CONST64U(1) << index);
}
-static INLINE Bool
+static inline Bool
TestBit32(const uint32 *var, unsigned index)
{
return (*var & (1 << index)) != 0;
}
-static INLINE Bool
+static inline Bool
TestBit64(const uint64 *var, unsigned index)
{
return (*var & (CONST64U(1) << index)) != 0;
#define GCC_ASM_BT_EPILOG "\n\tsetc\t%0" : "=qQm"
#endif
-static INLINE Bool
+static inline Bool
SetBitVector(void *var, int32 index)
{
#if defined(__GNUC__) && defined(VM_X86_ANY)
#endif
}
-static INLINE Bool
+static inline Bool
ClearBitVector(void *var, int32 index)
{
#if defined(__GNUC__) && defined(VM_X86_ANY)
#endif
}
-static INLINE Bool
+static inline Bool
ComplementBitVector(void *var, int32 index)
{
#if defined(__GNUC__) && defined(VM_X86_ANY)
#endif
}
-static INLINE Bool
+static inline Bool
TestBitVector(const void *var, int32 index)
{
#if defined(__GNUC__) && defined(VM_X86_ANY)
*-----------------------------------------------------------------------------
*/
-static INLINE uint64
+static inline uint64
RoundDownPow2_64(uint64 value)
{
if ((value & (value - 1)) == 0) {
}
-static INLINE uint32
+static inline uint32
RoundDownPow2_32(uint32 value)
{
if ((value & (value - 1)) == 0) {
/*
- * Already a power of two.
+ * Already zero or a power of two.
*/
return value;
}
*-----------------------------------------------------------------------------
*/
-static INLINE uint64
+static inline uint64
RoundUpPow2C64(uint64 value)
{
if (value <= 1 || value > (CONST64U(1) << 63)) {
}
#if defined(__GNUC__) && defined(VM_X86_64)
-static INLINE uint64
+static inline uint64
RoundUpPow2Asm64(uint64 value)
{
uint64 out = 2;
}
#endif
-static INLINE uint64
+static inline uint64
RoundUpPow2_64(uint64 value)
{
#if defined(__GNUC__) && defined(VM_X86_64)
#endif
}
-static INLINE uint32
+static inline uint32
RoundUpPow2C32(uint32 value)
{
if (value <= 1 || value > (1U << 31)) {
}
#ifdef __GNUC__
-static INLINE uint32
+static inline uint32
RoundUpPow2Asm32(uint32 value)
{
#ifdef VM_ARM_32
}
#endif // __GNUC__
-static INLINE uint32
+static inline uint32
RoundUpPow2_32(uint32 value)
{
#ifdef __GNUC__
*-----------------------------------------------------------------------------
*/
-static INLINE unsigned
+static inline unsigned
PopCount32(uint32 value)
{
#if defined(__GNUC__) && defined(__POPCNT__)
*-----------------------------------------------------------------------------
*/
-static INLINE unsigned
+static inline unsigned
PopCount64(uint64 value)
{
#if defined(__GNUC__) && defined(__POPCNT__)
#ifdef __GNUC__
-static INLINE void
+static inline void
INTR_RW_BARRIER_RW(void)
{
__asm__ __volatile__("" ::: "memory");
#elif defined _MSC_VER
-static INLINE void
+static inline void
INTR_R_BARRIER_R(void)
{
_ReadBarrier();
}
-static INLINE void
+static inline void
INTR_W_BARRIER_W(void)
{
_WriteBarrier();
}
-static INLINE void
+static inline void
INTR_RW_BARRIER_RW(void)
{
_ReadWriteBarrier();