]> git.ipfire.org Git - thirdparty/systemd.git/blame - src/fundamental/macro-fundamental.h
Merge pull request #28919 from fbuihuu/custom-config-file-install-path
[thirdparty/systemd.git] / src / fundamental / macro-fundamental.h
CommitLineData
e5bc5f1f
YW
1/* SPDX-License-Identifier: LGPL-2.1-or-later */
2#pragma once
3
493cd503 4#if !SD_BOOT
c3e4cbe0 5# include <assert.h>
e5bc5f1f
YW
6#endif
7
a36a0d15 8#include <limits.h>
1113e507 9#include <stdalign.h>
6b852d22
JJ
10#include <stdbool.h>
11#include <stddef.h>
12#include <stdint.h>
e5bc5f1f 13
9137c03c 14#define _align_(x) __attribute__((__aligned__(x)))
1113e507 15#define _alignas_(x) __attribute__((__aligned__(alignof(x))))
b41ebe3d
JJ
16#define _alignptr_ __attribute__((__aligned__(sizeof(void *))))
17#define _cleanup_(x) __attribute__((__cleanup__(x)))
e5bc5f1f 18#define _const_ __attribute__((__const__))
b41ebe3d
JJ
19#define _deprecated_ __attribute__((__deprecated__))
20#define _destructor_ __attribute__((__destructor__))
21#define _hidden_ __attribute__((__visibility__("hidden")))
22#define _likely_(x) (__builtin_expect(!!(x), 1))
23#define _malloc_ __attribute__((__malloc__))
4f79f545 24#define _noinline_ __attribute__((noinline))
b41ebe3d 25#define _noreturn_ _Noreturn
1328150d 26#define _packed_ __attribute__((__packed__))
b41ebe3d
JJ
27#define _printf_(a, b) __attribute__((__format__(printf, a, b)))
28#define _public_ __attribute__((__visibility__("default")))
29#define _pure_ __attribute__((__pure__))
a3aff1c4 30#define _retain_ __attribute__((__retain__))
9148312f 31#define _returns_nonnull_ __attribute__((__returns_nonnull__))
b41ebe3d
JJ
32#define _section_(x) __attribute__((__section__(x)))
33#define _sentinel_ __attribute__((__sentinel__))
f862e847 34#define _unlikely_(x) (__builtin_expect(!!(x), 0))
b41ebe3d
JJ
35#define _unused_ __attribute__((__unused__))
36#define _used_ __attribute__((__used__))
37#define _warn_unused_result_ __attribute__((__warn_unused_result__))
38#define _weak_ __attribute__((__weak__))
39#define _weakref_(x) __attribute__((__weakref__(#x)))
40
41#ifdef __clang__
42# define _alloc_(...)
43#else
44# define _alloc_(...) __attribute__((__alloc_size__(__VA_ARGS__)))
45#endif
46
da519f8c 47#if __GNUC__ >= 7 || (defined(__clang__) && __clang_major__ >= 10)
b41ebe3d 48# define _fallthrough_ __attribute__((__fallthrough__))
f862e847 49#else
b41ebe3d 50# define _fallthrough_
f862e847 51#endif
e5bc5f1f 52
9137c03c
DJL
53#define XSTRINGIFY(x) #x
54#define STRINGIFY(x) XSTRINGIFY(x)
55
e5bc5f1f
YW
56#ifndef __COVERITY__
57# define VOID_0 ((void)0)
58#else
59# define VOID_0 ((void*)0)
60#endif
61
62#define ELEMENTSOF(x) \
63 (__builtin_choose_expr( \
64 !__builtin_types_compatible_p(typeof(x), typeof(&*(x))), \
65 sizeof(x)/sizeof((x)[0]), \
66 VOID_0))
67
68#define XCONCATENATE(x, y) x ## y
69#define CONCATENATE(x, y) XCONCATENATE(x, y)
70
493cd503 71#if SD_BOOT
b1672234 72 _noreturn_ void efi_assert(const char *expr, const char *file, unsigned line, const char *function);
3b23a6c4 73
7a7267bf
JJ
74 #ifdef NDEBUG
75 #define assert(expr)
76 #define assert_not_reached() __builtin_unreachable()
77 #else
5a9b9157
JJ
78 #define assert(expr) ({ _likely_(expr) ? VOID_0 : efi_assert(#expr, __FILE__, __LINE__, __func__); })
79 #define assert_not_reached() efi_assert("Code should not be reached", __FILE__, __LINE__, __func__)
7a7267bf 80 #endif
6c405f20 81 #define static_assert _Static_assert
5a9b9157 82 #define assert_se(expr) ({ _likely_(expr) ? VOID_0 : efi_assert(#expr, __FILE__, __LINE__, __func__); })
e5bc5f1f
YW
83#endif
84
d821e40c 85/* This passes the argument through after (if asserts are enabled) checking that it is not null. */
23cd0025
DT
86#define ASSERT_PTR(expr) _ASSERT_PTR(expr, UNIQ_T(_expr_, UNIQ), assert)
87#define ASSERT_SE_PTR(expr) _ASSERT_PTR(expr, UNIQ_T(_expr_, UNIQ), assert_se)
88#define _ASSERT_PTR(expr, var, check) \
89 ({ \
90 typeof(expr) var = (expr); \
91 check(var); \
92 var; \
8890ec82
LP
93 })
94
724e13b3 95#define ASSERT_NONNEG(expr) \
96 ({ \
97 typeof(expr) _expr_ = (expr), _zero = 0; \
98 assert(_expr_ >= _zero); \
99 _expr_; \
100 })
101
102#define ASSERT_SE_NONNEG(expr) \
103 ({ \
104 typeof(expr) _expr_ = (expr), _zero = 0; \
105 assert_se(_expr_ >= _zero); \
106 _expr_; \
107 })
108
6c405f20
JJ
109#define assert_cc(expr) static_assert(expr, #expr)
110
e5bc5f1f
YW
111#define UNIQ_T(x, uniq) CONCATENATE(__unique_prefix_, CONCATENATE(x, uniq))
112#define UNIQ __COUNTER__
113
ea42da38
DS
114/* Note that this works differently from pthread_once(): this macro does
115 * not synchronize code execution, i.e. code that is run conditionalized
116 * on this macro will run concurrently to all other code conditionalized
117 * the same way, there's no ordering or completion enforced. */
118#define ONCE __ONCE(UNIQ_T(_once_, UNIQ))
9ddb63f5 119#define __ONCE(o) \
120 ({ \
121 static bool (o) = false; \
122 __atomic_exchange_n(&(o), true, __ATOMIC_SEQ_CST); \
ea42da38
DS
123 })
124
e5bc5f1f
YW
125#undef MAX
126#define MAX(a, b) __MAX(UNIQ, (a), UNIQ, (b))
127#define __MAX(aq, a, bq, b) \
128 ({ \
129 const typeof(a) UNIQ_T(A, aq) = (a); \
130 const typeof(b) UNIQ_T(B, bq) = (b); \
131 UNIQ_T(A, aq) > UNIQ_T(B, bq) ? UNIQ_T(A, aq) : UNIQ_T(B, bq); \
132 })
133
addae96a
LP
134#define IS_UNSIGNED_INTEGER_TYPE(type) \
135 (__builtin_types_compatible_p(typeof(type), unsigned char) || \
136 __builtin_types_compatible_p(typeof(type), unsigned short) || \
137 __builtin_types_compatible_p(typeof(type), unsigned) || \
138 __builtin_types_compatible_p(typeof(type), unsigned long) || \
139 __builtin_types_compatible_p(typeof(type), unsigned long long))
140
141#define IS_SIGNED_INTEGER_TYPE(type) \
142 (__builtin_types_compatible_p(typeof(type), signed char) || \
143 __builtin_types_compatible_p(typeof(type), signed short) || \
144 __builtin_types_compatible_p(typeof(type), signed) || \
145 __builtin_types_compatible_p(typeof(type), signed long) || \
146 __builtin_types_compatible_p(typeof(type), signed long long))
147
148/* Evaluates to (void) if _A or _B are not constant or of different types (being integers of different sizes
149 * is also OK as long as the signedness matches) */
e5bc5f1f
YW
150#define CONST_MAX(_A, _B) \
151 (__builtin_choose_expr( \
152 __builtin_constant_p(_A) && \
153 __builtin_constant_p(_B) && \
addae96a
LP
154 (__builtin_types_compatible_p(typeof(_A), typeof(_B)) || \
155 (IS_UNSIGNED_INTEGER_TYPE(_A) && IS_UNSIGNED_INTEGER_TYPE(_B)) || \
156 (IS_SIGNED_INTEGER_TYPE(_A) && IS_SIGNED_INTEGER_TYPE(_B))), \
e5bc5f1f
YW
157 ((_A) > (_B)) ? (_A) : (_B), \
158 VOID_0))
159
160/* takes two types and returns the size of the larger one */
161#define MAXSIZE(A, B) (sizeof(union _packed_ { typeof(A) a; typeof(B) b; }))
162
163#define MAX3(x, y, z) \
164 ({ \
165 const typeof(x) _c = MAX(x, y); \
166 MAX(_c, z); \
167 })
168
169#define MAX4(x, y, z, a) \
170 ({ \
171 const typeof(x) _d = MAX3(x, y, z); \
172 MAX(_d, a); \
173 })
174
175#undef MIN
176#define MIN(a, b) __MIN(UNIQ, (a), UNIQ, (b))
177#define __MIN(aq, a, bq, b) \
178 ({ \
179 const typeof(a) UNIQ_T(A, aq) = (a); \
180 const typeof(b) UNIQ_T(B, bq) = (b); \
181 UNIQ_T(A, aq) < UNIQ_T(B, bq) ? UNIQ_T(A, aq) : UNIQ_T(B, bq); \
182 })
183
184/* evaluates to (void) if _A or _B are not constant or of different types */
185#define CONST_MIN(_A, _B) \
186 (__builtin_choose_expr( \
187 __builtin_constant_p(_A) && \
188 __builtin_constant_p(_B) && \
189 __builtin_types_compatible_p(typeof(_A), typeof(_B)), \
190 ((_A) < (_B)) ? (_A) : (_B), \
191 VOID_0))
192
193#define MIN3(x, y, z) \
194 ({ \
195 const typeof(x) _c = MIN(x, y); \
196 MIN(_c, z); \
197 })
198
c51e4c79
LP
199/* Returns true if the passed integer is a positive power of two */
200#define CONST_ISPOWEROF2(x) \
201 ((x) > 0 && ((x) & ((x) - 1)) == 0)
202
203#define ISPOWEROF2(x) \
204 __builtin_choose_expr( \
205 __builtin_constant_p(x), \
206 CONST_ISPOWEROF2(x), \
207 ({ \
208 const typeof(x) _x = (x); \
209 CONST_ISPOWEROF2(_x); \
210 }))
211
e5bc5f1f
YW
212#define LESS_BY(a, b) __LESS_BY(UNIQ, (a), UNIQ, (b))
213#define __LESS_BY(aq, a, bq, b) \
214 ({ \
215 const typeof(a) UNIQ_T(A, aq) = (a); \
216 const typeof(b) UNIQ_T(B, bq) = (b); \
217 UNIQ_T(A, aq) > UNIQ_T(B, bq) ? UNIQ_T(A, aq) - UNIQ_T(B, bq) : 0; \
218 })
219
220#define CMP(a, b) __CMP(UNIQ, (a), UNIQ, (b))
221#define __CMP(aq, a, bq, b) \
222 ({ \
223 const typeof(a) UNIQ_T(A, aq) = (a); \
224 const typeof(b) UNIQ_T(B, bq) = (b); \
225 UNIQ_T(A, aq) < UNIQ_T(B, bq) ? -1 : \
226 UNIQ_T(A, aq) > UNIQ_T(B, bq) ? 1 : 0; \
227 })
228
229#undef CLAMP
230#define CLAMP(x, low, high) __CLAMP(UNIQ, (x), UNIQ, (low), UNIQ, (high))
231#define __CLAMP(xq, x, lowq, low, highq, high) \
232 ({ \
233 const typeof(x) UNIQ_T(X, xq) = (x); \
234 const typeof(low) UNIQ_T(LOW, lowq) = (low); \
235 const typeof(high) UNIQ_T(HIGH, highq) = (high); \
236 UNIQ_T(X, xq) > UNIQ_T(HIGH, highq) ? \
237 UNIQ_T(HIGH, highq) : \
238 UNIQ_T(X, xq) < UNIQ_T(LOW, lowq) ? \
239 UNIQ_T(LOW, lowq) : \
240 UNIQ_T(X, xq); \
241 })
242
243/* [(x + y - 1) / y] suffers from an integer overflow, even though the
244 * computation should be possible in the given type. Therefore, we use
245 * [x / y + !!(x % y)]. Note that on "Real CPUs" a division returns both the
246 * quotient and the remainder, so both should be equally fast. */
247#define DIV_ROUND_UP(x, y) __DIV_ROUND_UP(UNIQ, (x), UNIQ, (y))
248#define __DIV_ROUND_UP(xq, x, yq, y) \
249 ({ \
250 const typeof(x) UNIQ_T(X, xq) = (x); \
251 const typeof(y) UNIQ_T(Y, yq) = (y); \
252 (UNIQ_T(X, xq) / UNIQ_T(Y, yq) + !!(UNIQ_T(X, xq) % UNIQ_T(Y, yq))); \
253 })
254
beda8529
LP
255/* Rounds up x to the next multiple of y. Resolves to typeof(x) -1 in case of overflow */
256#define __ROUND_UP(q, x, y) \
257 ({ \
258 const typeof(y) UNIQ_T(A, q) = (y); \
259 const typeof(x) UNIQ_T(B, q) = DIV_ROUND_UP((x), UNIQ_T(A, q)); \
260 typeof(x) UNIQ_T(C, q); \
261 __builtin_mul_overflow(UNIQ_T(B, q), UNIQ_T(A, q), &UNIQ_T(C, q)) ? (typeof(x)) -1 : UNIQ_T(C, q); \
262 })
263#define ROUND_UP(x, y) __ROUND_UP(UNIQ, (x), (y))
264
790f4dda
JJ
265#define CASE_F_1(X) case X:
266#define CASE_F_2(X, ...) case X: CASE_F_1( __VA_ARGS__)
267#define CASE_F_3(X, ...) case X: CASE_F_2( __VA_ARGS__)
268#define CASE_F_4(X, ...) case X: CASE_F_3( __VA_ARGS__)
269#define CASE_F_5(X, ...) case X: CASE_F_4( __VA_ARGS__)
270#define CASE_F_6(X, ...) case X: CASE_F_5( __VA_ARGS__)
271#define CASE_F_7(X, ...) case X: CASE_F_6( __VA_ARGS__)
272#define CASE_F_8(X, ...) case X: CASE_F_7( __VA_ARGS__)
273#define CASE_F_9(X, ...) case X: CASE_F_8( __VA_ARGS__)
274#define CASE_F_10(X, ...) case X: CASE_F_9( __VA_ARGS__)
275#define CASE_F_11(X, ...) case X: CASE_F_10( __VA_ARGS__)
276#define CASE_F_12(X, ...) case X: CASE_F_11( __VA_ARGS__)
277#define CASE_F_13(X, ...) case X: CASE_F_12( __VA_ARGS__)
278#define CASE_F_14(X, ...) case X: CASE_F_13( __VA_ARGS__)
279#define CASE_F_15(X, ...) case X: CASE_F_14( __VA_ARGS__)
280#define CASE_F_16(X, ...) case X: CASE_F_15( __VA_ARGS__)
281#define CASE_F_17(X, ...) case X: CASE_F_16( __VA_ARGS__)
282#define CASE_F_18(X, ...) case X: CASE_F_17( __VA_ARGS__)
283#define CASE_F_19(X, ...) case X: CASE_F_18( __VA_ARGS__)
284#define CASE_F_20(X, ...) case X: CASE_F_19( __VA_ARGS__)
e5bc5f1f
YW
285
286#define GET_CASE_F(_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14,_15,_16,_17,_18,_19,_20,NAME,...) NAME
287#define FOR_EACH_MAKE_CASE(...) \
288 GET_CASE_F(__VA_ARGS__,CASE_F_20,CASE_F_19,CASE_F_18,CASE_F_17,CASE_F_16,CASE_F_15,CASE_F_14,CASE_F_13,CASE_F_12,CASE_F_11, \
289 CASE_F_10,CASE_F_9,CASE_F_8,CASE_F_7,CASE_F_6,CASE_F_5,CASE_F_4,CASE_F_3,CASE_F_2,CASE_F_1) \
790f4dda 290 (__VA_ARGS__)
e5bc5f1f 291
4f06325c 292#define IN_SET(x, first, ...) \
79893116 293 ({ \
6b852d22 294 bool _found = false; \
0bc4ac52
JJ
295 /* If the build breaks in the line below, you need to extend the case macros. We use typeof(+x) \
296 * here to widen the type of x if it is a bit-field as this would otherwise be illegal. */ \
4f06325c 297 static const typeof(+x) __assert_in_set[] _unused_ = { first, __VA_ARGS__ }; \
79893116
YW
298 assert_cc(ELEMENTSOF(__assert_in_set) <= 20); \
299 switch (x) { \
4f06325c 300 FOR_EACH_MAKE_CASE(first, __VA_ARGS__) \
6b852d22 301 _found = true; \
e967926b 302 break; \
79893116
YW
303 default: \
304 break; \
305 } \
306 _found; \
e5bc5f1f
YW
307 })
308
309/* Takes inspiration from Rust's Option::take() method: reads and returns a pointer, but at the same time
310 * resets it to NULL. See: https://doc.rust-lang.org/std/option/enum.Option.html#method.take */
40c5cc2b
DS
311#define TAKE_GENERIC(var, type, nullvalue) \
312 ({ \
313 type *_pvar_ = &(var); \
314 type _var_ = *_pvar_; \
315 type _nullvalue_ = nullvalue; \
316 *_pvar_ = _nullvalue_; \
317 _var_; \
e5bc5f1f 318 })
40c5cc2b
DS
319#define TAKE_PTR_TYPE(ptr, type) TAKE_GENERIC(ptr, type, NULL)
320#define TAKE_PTR(ptr) TAKE_PTR_TYPE(ptr, typeof(ptr))
321#define TAKE_STRUCT_TYPE(s, type) TAKE_GENERIC(s, type, {})
322#define TAKE_STRUCT(s) TAKE_STRUCT_TYPE(s, typeof(s))
f862e847
JJ
323
324/*
325 * STRLEN - return the length of a string literal, minus the trailing NUL byte.
326 * Contrary to strlen(), this is a constant expression.
327 * @x: a string literal.
328 */
329#define STRLEN(x) (sizeof(""x"") - sizeof(typeof(x[0])))
200b1d99
MR
330
331#define mfree(memory) \
332 ({ \
333 free(memory); \
334 (typeof(memory)) NULL; \
335 })
a36a0d15
JJ
336
337static inline size_t ALIGN_TO(size_t l, size_t ali) {
983ce0b5 338 assert(ISPOWEROF2(ali));
a36a0d15
JJ
339
340 if (l > SIZE_MAX - (ali - 1))
341 return SIZE_MAX; /* indicate overflow */
342
343 return ((l + ali - 1) & ~(ali - 1));
344}
345
4f073883 346#define ALIGN2(l) ALIGN_TO(l, 2)
4c8d7caf
YW
347#define ALIGN4(l) ALIGN_TO(l, 4)
348#define ALIGN8(l) ALIGN_TO(l, 8)
4f073883
LP
349#define ALIGN2_PTR(p) ((void*) ALIGN2((uintptr_t) p))
350#define ALIGN4_PTR(p) ((void*) ALIGN4((uintptr_t) p))
351#define ALIGN8_PTR(p) ((void*) ALIGN8((uintptr_t) p))
4c8d7caf
YW
352#define ALIGN(l) ALIGN_TO(l, sizeof(void*))
353#define ALIGN_PTR(p) ((void*) ALIGN((uintptr_t) (p)))
4c8d7caf 354
4f073883 355/* Checks if the specified pointer is aligned as appropriate for the specific type */
1113e507
YW
356#define IS_ALIGNED16(p) (((uintptr_t) p) % alignof(uint16_t) == 0)
357#define IS_ALIGNED32(p) (((uintptr_t) p) % alignof(uint32_t) == 0)
358#define IS_ALIGNED64(p) (((uintptr_t) p) % alignof(uint64_t) == 0)
4f073883 359
a36a0d15
JJ
360/* Same as ALIGN_TO but callable in constant contexts. */
361#define CONST_ALIGN_TO(l, ali) \
362 __builtin_choose_expr( \
363 __builtin_constant_p(l) && \
364 __builtin_constant_p(ali) && \
983ce0b5 365 CONST_ISPOWEROF2(ali) && \
a36a0d15
JJ
366 (l <= SIZE_MAX - (ali - 1)), /* overflow? */ \
367 ((l) + (ali) - 1) & ~((ali) - 1), \
368 VOID_0)
a8a7723b 369
86bdf117
TH
370/* Similar to ((t *) (void *) (p)) to cast a pointer. The macro asserts that the pointer has a suitable
371 * alignment for type "t". This exists for places where otherwise "-Wcast-align=strict" would issue a
372 * warning or if you want to assert that the cast gives a pointer of suitable alignment. */
373#define CAST_ALIGN_PTR(t, p) \
374 ({ \
375 const void *_p = (p); \
1113e507 376 assert(((uintptr_t) _p) % alignof(t) == 0); \
86bdf117
TH
377 (t *) _p; \
378 })
379
a8a7723b
JJ
380#define UPDATE_FLAG(orig, flag, b) \
381 ((b) ? ((orig) | (flag)) : ((orig) & ~(flag)))
382#define SET_FLAG(v, flag, b) \
383 (v) = UPDATE_FLAG(v, flag, b)
384#define FLAGS_SET(v, flags) \
385 ((~(v) & (flags)) == 0)
da8587b2
ZJS
386
387/* Declare a flexible array usable in a union.
388 * This is essentially a work-around for a pointless constraint in C99
389 * and might go away in some future version of the standard.
390 *
391 * See https://git.kernel.org/cgit/linux/kernel/git/torvalds/linux.git/commit/?id=3080ea5553cc909b000d1f1d964a9041962f2c5b
392 */
393#define DECLARE_FLEX_ARRAY(type, name) \
394 struct { \
395 dummy_t __empty__ ## name; \
396 type name[]; \
397 }
c9bebec8
LB
398
399#ifdef SBAT_DISTRO
400 #define DECLARE_SBAT(text) \
401 static const char sbat[] _used_ _section_(".sbat") = (text)
402#else
403 #define DECLARE_SBAT(text)
404#endif