msvc will optimize away the condition check.
{
# if defined(_MSC_VER) /* Visual */
# if STATIC_BMI2 == 1
- return _lzcnt_u32(val) ^ 31;
+ return _lzcnt_u32(val) ^ 31;
# else
- unsigned long r;
- return _BitScanReverse(&r, val) ? (unsigned)r : 0;
+ if (val != 0) {
+ unsigned long r;
+ _BitScanReverse(&r, val);
+ return (unsigned)r;
+ } else {
+ /* Should not reach this code path */
+ __assume(0);
+ }
# endif
# elif defined(__GNUC__) && (__GNUC__ >= 3) /* Use GCC Intrinsic */
return __builtin_clz (val) ^ 31;
assert(val != 0);
{
# if defined(_MSC_VER) /* Visual */
- unsigned long r;
- return _BitScanForward(&r, val) ? (unsigned)r : 0;
+ if (val != 0) {
+ unsigned long r;
+ _BitScanForward(&r, val);
+ return (unsigned)r;
+ } else {
+ /* Should not reach this code path */
+ __assume(0);
+ }
# elif defined(__GNUC__) && (__GNUC__ >= 3) /* GCC Intrinsic */
return __builtin_ctz(val);
# elif defined(__ICCARM__) /* IAR Intrinsic */
# if STATIC_BMI2 == 1
return _lzcnt_u32(val)^31;
# else
- unsigned long r;
- return _BitScanReverse(&r, val) ? (unsigned)r : 0;
+ if (val != 0) {
+ unsigned long r;
+ _BitScanReverse(&r, val);
+ return (unsigned)r;
+ } else {
+ /* Should not reach this code path */
+ __assume(0);
+ }
# endif
# elif defined(__GNUC__) && (__GNUC__ >= 3) /* GCC Intrinsic */
return __builtin_clz (val) ^ 31;
# if STATIC_BMI2
return _tzcnt_u64(val);
# else
- unsigned long r;
- return _BitScanForward64( &r, (U64)val ) ? (unsigned)(r >> 3) : 0;
+ if (val != 0) {
+ unsigned long r;
+ _BitScanForward64(&r, (U64)val);
+ return (unsigned)(r >> 3);
+ } else {
+ /* Should not reach this code path */
+ __assume(0);
+ }
# endif
# elif defined(__GNUC__) && (__GNUC__ >= 4)
return __builtin_ctzll((U64)val);
# endif
} else { /* 32 bits */
# if defined(_MSC_VER)
- unsigned long r;
- return _BitScanForward( &r, (U32)val ) ? (unsigned)(r >> 3) : 0;
+ if (val != 0) {
+ unsigned long r;
+ _BitScanForward(&r, (U32)val);
+ return (unsigned)(r >> 3);
+ } else {
+ /* Should not reach this code path */
+ __assume(0);
+ }
# elif defined(__GNUC__) && (__GNUC__ >= 3)
return (__builtin_ctz((U32)val) >> 3);
# else
# if STATIC_BMI2
return _tzcnt_u64(val) >> 3;
# else
- unsigned long r;
- return _BitScanForward64( &r, (U64)val ) ? (unsigned)(r >> 3) : 0;
+ if (val != 0) {
+ unsigned long r;
+ _BitScanForward64(&r, (U64)val);
+ return (unsigned)(r >> 3);
+ } else {
+ /* Should not reach this code path */
+ __assume(0);
+ }
# endif
# elif defined(__GNUC__) && (__GNUC__ >= 4)
return (__builtin_ctzll((U64)val) >> 3);
# endif
} else { /* 32 bits */
# if defined(_MSC_VER)
- unsigned long r;
- return _BitScanForward( &r, (U32)val ) ? (unsigned)(r >> 3) : 0;
+ if (val != 0) {
+ unsigned long r;
+ _BitScanForward(&r, (U32)val);
+ return (unsigned)(r >> 3);
+ } else {
+ /* Should not reach this code path */
+ __assume(0);
+ }
# elif defined(__GNUC__) && (__GNUC__ >= 3)
return (__builtin_ctz((U32)val) >> 3);
# else
# if STATIC_BMI2
return _lzcnt_u64(val) >> 3;
# else
- unsigned long r;
- return _BitScanReverse64(&r, (U64)val) ? (unsigned)(r >> 3) : 0;
+ if (val != 0) {
+ unsigned long r;
+ _BitScanReverse64(&r, (U64)val);
+ return (unsigned)(r >> 3);
+ } else {
+ /* Should not reach this code path */
+ __assume(0);
+ }
# endif
# elif defined(__GNUC__) && (__GNUC__ >= 4)
return (__builtin_clzll(val) >> 3);
# endif
} else { /* 32 bits */
# if defined(_MSC_VER)
- unsigned long r;
- return _BitScanReverse( &r, (unsigned long)val ) ? (unsigned)(r >> 3) : 0;
+ if (val != 0) {
+ unsigned long r;
+ _BitScanReverse(&r, (unsigned long)val);
+ return (unsigned)(r >> 3);
+ } else {
+ /* Should not reach this code path */
+ __assume(0);
+ }
# elif defined(__GNUC__) && (__GNUC__ >= 3)
return (__builtin_clz((U32)val) >> 3);
# else
static U32 ZSTD_VecMask_next(ZSTD_VecMask val) {
assert(val != 0);
# if defined(_MSC_VER) && defined(_WIN64)
- {
- unsigned long r;
- /* _BitScanForward64 is not defined outside of x64 */
- return _BitScanForward64(&r, val) ? (U32)r : 0;
- }
+ if (val != 0) {
+ unsigned long r;
+ _BitScanForward64(&r, val);
+ return (U32)(r);
+ } else {
+ /* Should not reach this code path */
+ __assume(0);
+ }
# elif (defined(__GNUC__) && ((__GNUC__ > 3) || ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))
if (sizeof(size_t) == 4) {
U32 mostSignificantWord = (U32)(val >> 32);
if (MEM_isLittleEndian()) {
if (MEM_64bits()) {
# if defined(_MSC_VER) && defined(_WIN64)
- unsigned long r;
- return _BitScanForward64(&r, (U64)val) ? (unsigned)(r >> 3) : 0;
+ if (val != 0) {
+ unsigned long r;
+ _BitScanForward64(&r, (U64)val);
+ return (unsigned)(r >> 3);
+ } else {
+ /* Should not reach this code path */
+ __assume(0);
+ }
# elif defined(__GNUC__) && (__GNUC__ >= 3)
return (unsigned)(__builtin_ctzll((U64)val) >> 3);
# else
# endif
} else { /* 32 bits */
# if defined(_MSC_VER)
- unsigned long r;
- return _BitScanForward(&r, (U32)val) ? (unsigned)(r >> 3) : 0;
+ if (val != 0) {
+ unsigned long r;
+ _BitScanForward(&r, (U32)val);
+ return (unsigned)(r >> 3);
+ } else {
+ /* Should not reach this code path */
+ __assume(0);
+ }
# elif defined(__GNUC__) && (__GNUC__ >= 3)
return (unsigned)(__builtin_ctz((U32)val) >> 3);
# else
} else { /* Big Endian CPU */
if (MEM_64bits()) {
# if defined(_MSC_VER) && defined(_WIN64)
- unsigned long r;
- return _BitScanReverse64(&r, val) ? (unsigned)(r >> 3) : 0;
+ if (val != 0) {
+ unsigned long r;
+ _BitScanReverse64(&r, val);
+ return (unsigned)(r >> 3);
+ } else {
+ /* Should not reach this code path */
+ __assume(0);
+ }
# elif defined(__GNUC__) && (__GNUC__ >= 3)
return (unsigned)(__builtin_clzll(val) >> 3);
# else
# endif
} else { /* 32 bits */
# if defined(_MSC_VER)
- unsigned long r;
- return _BitScanReverse(&r, (unsigned long)val) ? (unsigned)(r >> 3) : 0;
+ if (val != 0) {
+ unsigned long r;
+ _BitScanReverse(&r, (unsigned long)val);
+ return (unsigned)(r >> 3);
+ } else {
+ /* Should not reach this code path */
+ __assume(0);
+ }
# elif defined(__GNUC__) && (__GNUC__ >= 3)
return (unsigned)(__builtin_clz((U32)val) >> 3);
# else