4 /* casts are necessary for constants, because we never know how for sure
5 * how U/UL/ULL map to __u16, __u32, __u64. At least not in a portable way.
11 (((__u16)(__x) & (__u16)0x00ffU) << 8) | \
12 (((__u16)(__x) & (__u16)0xff00U) >> 8) )); \
15 #define ___swab32(x) \
19 (((__u32)(__x) & (__u32)0x000000ffUL) << 24) | \
20 (((__u32)(__x) & (__u32)0x0000ff00UL) << 8) | \
21 (((__u32)(__x) & (__u32)0x00ff0000UL) >> 8) | \
22 (((__u32)(__x) & (__u32)0xff000000UL) >> 24) )); \
25 #define ___swab64(x) \
29 (__u64)(((__u64)(__x) & (__u64)0x00000000000000ffULL) << 56) | \
30 (__u64)(((__u64)(__x) & (__u64)0x000000000000ff00ULL) << 40) | \
31 (__u64)(((__u64)(__x) & (__u64)0x0000000000ff0000ULL) << 24) | \
32 (__u64)(((__u64)(__x) & (__u64)0x00000000ff000000ULL) << 8) | \
33 (__u64)(((__u64)(__x) & (__u64)0x000000ff00000000ULL) >> 8) | \
34 (__u64)(((__u64)(__x) & (__u64)0x0000ff0000000000ULL) >> 24) | \
35 (__u64)(((__u64)(__x) & (__u64)0x00ff000000000000ULL) >> 40) | \
36 (__u64)(((__u64)(__x) & (__u64)0xff00000000000000ULL) >> 56) )); \
39 #define ___constant_swab16(x) \
41 (((__u16)(x) & (__u16)0x00ffU) << 8) | \
42 (((__u16)(x) & (__u16)0xff00U) >> 8) ))
43 #define ___constant_swab32(x) \
45 (((__u32)(x) & (__u32)0x000000ffUL) << 24) | \
46 (((__u32)(x) & (__u32)0x0000ff00UL) << 8) | \
47 (((__u32)(x) & (__u32)0x00ff0000UL) >> 8) | \
48 (((__u32)(x) & (__u32)0xff000000UL) >> 24) ))
49 #define ___constant_swab64(x) \
51 (__u64)(((__u64)(x) & (__u64)0x00000000000000ffULL) << 56) | \
52 (__u64)(((__u64)(x) & (__u64)0x000000000000ff00ULL) << 40) | \
53 (__u64)(((__u64)(x) & (__u64)0x0000000000ff0000ULL) << 24) | \
54 (__u64)(((__u64)(x) & (__u64)0x00000000ff000000ULL) << 8) | \
55 (__u64)(((__u64)(x) & (__u64)0x000000ff00000000ULL) >> 8) | \
56 (__u64)(((__u64)(x) & (__u64)0x0000ff0000000000ULL) >> 24) | \
57 (__u64)(((__u64)(x) & (__u64)0x00ff000000000000ULL) >> 40) | \
58 (__u64)(((__u64)(x) & (__u64)0xff00000000000000ULL) >> 56) ))
61 * provide defaults when no architecture-specific optimization is detected
63 #ifndef __arch__swab16
64 # define __arch__swab16(x) ({ __u16 __tmp = (x) ; ___swab16(__tmp); })
66 #ifndef __arch__swab32
67 # define __arch__swab32(x) ({ __u32 __tmp = (x) ; ___swab32(__tmp); })
69 #ifndef __arch__swab64
70 # define __arch__swab64(x) ({ __u64 __tmp = (x) ; ___swab64(__tmp); })
73 #ifndef __arch__swab16p
74 # define __arch__swab16p(x) __arch__swab16(*(x))
76 #ifndef __arch__swab32p
77 # define __arch__swab32p(x) __arch__swab32(*(x))
79 #ifndef __arch__swab64p
80 # define __arch__swab64p(x) __arch__swab64(*(x))
83 #ifndef __arch__swab16s
84 # define __arch__swab16s(x) do { *(x) = __arch__swab16p((x)); } while (0)
86 #ifndef __arch__swab32s
87 # define __arch__swab32s(x) do { *(x) = __arch__swab32p((x)); } while (0)
89 #ifndef __arch__swab64s
90 # define __arch__swab64s(x) do { *(x) = __arch__swab64p((x)); } while (0)
95 * Allow constant folding
97 # define __swab16(x) \
98 (__builtin_constant_p((__u16)(x)) ? \
101 # define __swab32(x) \
102 (__builtin_constant_p((__u32)(x)) ? \
105 # define __swab64(x) \
106 (__builtin_constant_p((__u64)(x)) ? \
111 static __inline__ __const__ __u16
__fswab16(__u16 x
)
113 return __arch__swab16(x
);
115 static __inline__ __u16
__swab16p(__u16
*x
)
117 return __arch__swab16p(x
);
119 static __inline__
void __swab16s(__u16
*addr
)
121 __arch__swab16s(addr
);
124 static __inline__ __const__ __u32
__fswab32(__u32 x
)
126 return __arch__swab32(x
);
128 static __inline__ __u32
__swab32p(__u32
*x
)
130 return __arch__swab32p(x
);
132 static __inline__
void __swab32s(__u32
*addr
)
134 __arch__swab32s(addr
);
137 static __inline__ __const__ __u64
__fswab64(__u64 x
)
139 # ifdef __SWAB_64_THRU_32__
141 __u32 l
= x
& ((1ULL<<32)-1);
142 return (((__u64
)__swab32(l
)) << 32) | ((__u64
)(__swab32(h
)));
144 return __arch__swab64(x
);
147 static __inline__ __u64
__swab64p(__u64
*x
)
149 return __arch__swab64p(x
);
151 static __inline__
void __swab64s(__u64
*addr
)
153 __arch__swab64s(addr
);