]>
Commit | Line | Data |
---|---|---|
717313c5 MW |
1 | /* |
2 | * Copyright (C) 2008-2014 Tobias Brunner | |
3 | * Copyright (C) 2008 Martin Willi | |
19ef2aec TB |
4 | * |
5 | * Copyright (C) secunet Security Networks AG | |
717313c5 MW |
6 | * |
7 | * This program is free software; you can redistribute it and/or modify it | |
8 | * under the terms of the GNU General Public License as published by the | |
9 | * Free Software Foundation; either version 2 of the License, or (at your | |
10 | * option) any later version. See <http://www.fsf.org/copyleft/gpl.txt>. | |
11 | * | |
12 | * This program is distributed in the hope that it will be useful, but | |
13 | * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY | |
14 | * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | * for more details. | |
16 | */ | |
17 | ||
18 | /** | |
19 | * @defgroup atomics_i atomics | |
20 | * @{ @ingroup utils_i | |
21 | */ | |
22 | ||
23 | #ifndef ATOMICS_H_ | |
24 | #define ATOMICS_H_ | |
25 | ||
26 | /** | |
27 | * Special type to count references | |
28 | */ | |
29 | typedef u_int refcount_t; | |
30 | ||
942599b6 TB |
31 | /* use __atomic* built-ins with clang, if available (note that clang also |
32 | * defines __GNUC__, however only claims to be GCC 4.2) */ | |
33 | #if defined(__clang__) | |
34 | # if __has_builtin(__atomic_add_fetch) | |
35 | # define HAVE_GCC_ATOMIC_OPERATIONS | |
36 | # endif | |
717313c5 | 37 | /* use __atomic* built-ins with GCC 4.7 and newer */ |
942599b6 | 38 | #elif defined(__GNUC__) |
717313c5 MW |
39 | # if (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ > 6)) |
40 | # define HAVE_GCC_ATOMIC_OPERATIONS | |
41 | # endif | |
42 | #endif | |
43 | ||
44 | #ifdef HAVE_GCC_ATOMIC_OPERATIONS | |
45 | ||
46 | #define ref_get(ref) __atomic_add_fetch(ref, 1, __ATOMIC_RELAXED) | |
47 | /* The relaxed memory model works fine for increments as these (usually) don't | |
48 | * change the state of refcounted objects. But here we have to ensure that we | |
49 | * free the right stuff if ref counted objects are mutable. So we have to sync | |
50 | * with other threads that call ref_put(). It would be sufficient to use | |
51 | * __ATOMIC_RELEASE here and then call __atomic_thread_fence() with | |
52 | * __ATOMIC_ACQUIRE if we reach 0, but since we don't have control over the use | |
53 | * of ref_put() we have to make sure. */ | |
54 | #define ref_put(ref) (!__atomic_sub_fetch(ref, 1, __ATOMIC_ACQ_REL)) | |
55 | #define ref_cur(ref) __atomic_load_n(ref, __ATOMIC_RELAXED) | |
56 | ||
2afdb92d | 57 | #define _cas_impl(ptr, oldval, newval) ({ typeof(*ptr) _old = oldval; \ |
717313c5 MW |
58 | __atomic_compare_exchange_n(ptr, &_old, newval, FALSE, \ |
59 | __ATOMIC_SEQ_CST, __ATOMIC_RELAXED); }) | |
60 | #define cas_bool(ptr, oldval, newval) _cas_impl(ptr, oldval, newval) | |
61 | #define cas_ptr(ptr, oldval, newval) _cas_impl(ptr, oldval, newval) | |
62 | ||
63 | #elif defined(HAVE_GCC_SYNC_OPERATIONS) | |
64 | ||
65 | #define ref_get(ref) __sync_add_and_fetch(ref, 1) | |
66 | #define ref_put(ref) (!__sync_sub_and_fetch(ref, 1)) | |
67 | #define ref_cur(ref) __sync_fetch_and_add(ref, 0) | |
68 | ||
69 | #define cas_bool(ptr, oldval, newval) \ | |
70 | (__sync_bool_compare_and_swap(ptr, oldval, newval)) | |
71 | #define cas_ptr(ptr, oldval, newval) \ | |
72 | (__sync_bool_compare_and_swap(ptr, oldval, newval)) | |
73 | ||
74 | #else /* !HAVE_GCC_ATOMIC_OPERATIONS && !HAVE_GCC_SYNC_OPERATIONS */ | |
75 | ||
76 | /** | |
77 | * Get a new reference. | |
78 | * | |
79 | * Increments the reference counter atomically. | |
80 | * | |
81 | * @param ref pointer to ref counter | |
82 | * @return new value of ref | |
83 | */ | |
84 | refcount_t ref_get(refcount_t *ref); | |
85 | ||
86 | /** | |
87 | * Put back a unused reference. | |
88 | * | |
89 | * Decrements the reference counter atomically and | |
90 | * says if more references available. | |
91 | * | |
92 | * @param ref pointer to ref counter | |
93 | * @return TRUE if no more references counted | |
94 | */ | |
95 | bool ref_put(refcount_t *ref); | |
96 | ||
97 | /** | |
98 | * Get the current value of the reference counter. | |
99 | * | |
100 | * @param ref pointer to ref counter | |
101 | * @return current value of ref | |
102 | */ | |
103 | refcount_t ref_cur(refcount_t *ref); | |
104 | ||
105 | /** | |
106 | * Atomically replace value of ptr with newval if it currently equals oldval. | |
107 | * | |
108 | * @param ptr pointer to variable | |
109 | * @param oldval old value of the variable | |
110 | * @param newval new value set if possible | |
111 | * @return TRUE if value equaled oldval and newval was written | |
112 | */ | |
113 | bool cas_bool(bool *ptr, bool oldval, bool newval); | |
114 | ||
115 | /** | |
116 | * Atomically replace value of ptr with newval if it currently equals oldval. | |
117 | * | |
118 | * @param ptr pointer to variable | |
119 | * @param oldval old value of the variable | |
120 | * @param newval new value set if possible | |
121 | * @return TRUE if value equaled oldval and newval was written | |
122 | */ | |
123 | bool cas_ptr(void **ptr, void *oldval, void *newval); | |
124 | ||
125 | #endif /* HAVE_GCC_ATOMIC_OPERATIONS */ | |
126 | ||
0cd46df3 MW |
127 | /** |
128 | * Get a new reference, but skip zero on overflow. | |
129 | * | |
130 | * If a reference counter is used to allocate unique identifiers, the | |
131 | * refcount value may overflow if it is never decremented. The 0 identifier | |
132 | * may have special semantics, hence returning can be problematic for some | |
133 | * users. | |
134 | * | |
135 | * This call does an additional ref_get() if ref_get() overflows and returns | |
136 | * zero. This ensures that zero is never returned, in the assumption that it | |
137 | * has special meaning. | |
138 | * | |
139 | * @param ref pointer to ref counter | |
140 | * @return new value of ref | |
141 | */ | |
142 | static inline refcount_t ref_get_nonzero(refcount_t *ref) | |
143 | { | |
144 | refcount_t v; | |
145 | ||
146 | v = ref_get(ref); | |
147 | if (v == 0) | |
148 | { | |
149 | v = ref_get(ref); | |
150 | } | |
151 | return v; | |
152 | } | |
153 | ||
717313c5 MW |
154 | /** |
155 | * Initialize atomics utility functions | |
156 | */ | |
157 | void atomics_init(); | |
158 | ||
159 | /** | |
160 | * Clean up atomics utility functions | |
161 | */ | |
162 | void atomics_deinit(); | |
163 | ||
164 | #endif /** ATOMICS_H_ @} */ |