#endif /* __has_header(<stdbit.h>) */
+#define ISC_ROTATE_LEFT8(x, n) \
+ ({ \
+ STATIC_ASSERT(n > 0 && n < 8, \
+ "rotation must be a constant between 0 and 8"); \
+ STATIC_ASSERT( \
+ __builtin_types_compatible_p(typeof(x), uint8_t), \
+ "rotated value must be uint8_t"); \
+ ((x) << (n) | (x) >> (8 - (n))); \
+ })
+
+#define ISC_ROTATE_LEFT16(x, n) \
+ ({ \
+ STATIC_ASSERT(n > 0 && n < 16, \
+ "rotation must be a constant between 0 and 16"); \
+ STATIC_ASSERT( \
+ __builtin_types_compatible_p(typeof(x), uint16_t), \
+ "rotated value must be uint16_t"); \
+ ((x) << (n) | (x) >> (16 - (n))); \
+ })
+
+#define ISC_ROTATE_LEFT32(x, n) \
+ ({ \
+ STATIC_ASSERT(n > 0 && n < 32, \
+ "rotation must be a constant between 0 and 32"); \
+ STATIC_ASSERT( \
+ __builtin_types_compatible_p(typeof(x), uint32_t), \
+ "rotated value must be uint32_t"); \
+ ((x) << (n) | (x) >> (32 - (n))); \
+ })
+
+#define ISC_ROTATE_LEFT64(x, n) \
+ ({ \
+ STATIC_ASSERT(n > 0 && n < 64, \
+ "rotation must be a constant between 0 and 64"); \
+ STATIC_ASSERT( \
+ __builtin_types_compatible_p(typeof(x), uint64_t), \
+ "rotated value must be uint64_t"); \
+ ((x) << (n) | (x) >> (64 - (n))); \
+ })
+
+#define ISC_ROTATE_RIGHT8(x, n) \
+ ({ \
+ STATIC_ASSERT(n > 0 && n < 8, \
+ "rotation must be a constant between 0 and 8"); \
+ STATIC_ASSERT( \
+ __builtin_types_compatible_p(typeof(x), uint8_t), \
+ "rotated value must be uint8_t"); \
+ ((x) >> (n) | (x) << (8 - (n))); \
+ })
+
+#define ISC_ROTATE_RIGHT16(x, n) \
+ ({ \
+ STATIC_ASSERT(n > 0 && n < 16, \
+ "rotation must be a constant between 0 and 16"); \
+ STATIC_ASSERT( \
+ __builtin_types_compatible_p(typeof(x), uint16_t), \
+ "rotated value must be uint16_t"); \
+ ((x) >> (n) | (x) << (16 - (n))); \
+ })
+
+#define ISC_ROTATE_RIGHT32(x, n) \
+ ({ \
+ STATIC_ASSERT(n > 0 && n < 32, \
+ "rotation must be a constant between 0 and 32"); \
+ STATIC_ASSERT( \
+ __builtin_types_compatible_p(typeof(x), uint32_t), \
+ "rotated value must be uint32_t"); \
+ ((x) >> (n) | (x) << (32 - (n))); \
+ })
+
+#define ISC_ROTATE_RIGHT64(x, n) \
+ ({ \
+ STATIC_ASSERT(n > 0 && n < 64, \
+ "rotation must be a constant between 0 and 64"); \
+ STATIC_ASSERT( \
+ __builtin_types_compatible_p(typeof(x), uint64_t), \
+ "rotated value must be uint64_t"); \
+ ((x) >> (n) | (x) << (64 - (n))); \
+ })
+
#if SIZE_MAX == UINT64_MAX
-#define isc_rotate_leftsize(x, n) isc_rotate_left64(x, n)
-#define isc_rotate_rightsize(x, n) isc_rotate_right64(x, n)
+#define ISC_ROTATE_LEFTSIZE(x, n) ISC_ROTATE_LEFT64(x, n)
+#define ISC_ROTATE_RIGHTSIZE(x, n) ISC_ROTATE_RIGHT64(x, n)
#elif SIZE_MAX == UINT32_MAX
-#define isc_rotate_leftsize(x, n) isc_rotate_left32(x, n)
-#define isc_rotate_rightsize(x, n) isc_rotate_right32(x, n)
+#define ISC_ROTATE_LEFTSIZE(x, n) ISC_ROTATE_LEFT32(x, n)
+#define ISC_ROTATE_RIGHTSIZE(x, n) ISC_ROTATE_RIGHT32(x, n)
#else
#error "size_t must be either 32 or 64-bits"
#endif
-
-static inline uint8_t __attribute__((always_inline))
-isc_rotate_left8(const uint8_t x, uint32_t n) {
- return (x << n) | (x >> (8 - n));
-}
-
-static inline uint16_t __attribute__((always_inline))
-isc_rotate_left16(const uint16_t x, uint32_t n) {
- return (x << n) | (x >> (16 - n));
-}
-
-static inline uint32_t __attribute__((always_inline))
-isc_rotate_left32(const uint32_t x, uint32_t n) {
- return (x << n) | (x >> (32 - n));
-}
-
-static inline uint64_t __attribute__((always_inline))
-isc_rotate_left64(const uint64_t x, uint32_t n) {
- return (x << n) | (x >> (64 - n));
-}
-
-static inline uint8_t __attribute__((always_inline))
-isc_rotate_right8(const uint8_t x, uint32_t n) {
- return (x >> n) | (x << (8 - n));
-}
-
-static inline uint16_t __attribute__((always_inline))
-isc_rotate_right16(const uint16_t x, uint32_t n) {
- return (x >> n) | (x << (16 - n));
-}
-
-static inline uint32_t __attribute__((always_inline))
-isc_rotate_right32(const uint32_t x, uint32_t n) {
- return (x >> n) | (x << (32 - n));
-}
-
-static inline uint64_t __attribute__((always_inline))
-isc_rotate_right64(const uint64_t x, uint32_t n) {
- return (x >> n) | (x << (64 - n));
-}
#define HALF_ROUND64(a, b, c, d, s, t) \
a += b; \
c += d; \
- b = isc_rotate_left64(b, s) ^ a; \
- d = isc_rotate_left64(d, t) ^ c; \
- a = isc_rotate_left64(a, 32);
+ b = ISC_ROTATE_LEFT64(b, s) ^ a; \
+ d = ISC_ROTATE_LEFT64(d, t) ^ c; \
+ a = ISC_ROTATE_LEFT64(a, 32);
#define FULL_ROUND64(v0, v1, v2, v3) \
HALF_ROUND64(v0, v1, v2, v3, 13, 16); \
#define HALF_ROUND32(a, b, c, d, s, t) \
a += b; \
c += d; \
- b = isc_rotate_left32(b, s) ^ a; \
- d = isc_rotate_left32(d, t) ^ c; \
- a = isc_rotate_left32(a, 16);
+ b = ISC_ROTATE_LEFT32(b, s) ^ a; \
+ d = ISC_ROTATE_LEFT32(d, t) ^ c; \
+ a = ISC_ROTATE_LEFT32(a, 16);
#define FULL_ROUND32(v0, v1, v2, v3) \
HALF_ROUND32(v0, v1, v2, v3, 5, 8); \