]>
Commit | Line | Data |
---|---|---|
717313c5 MW |
1 | /* |
2 | * Copyright (C) 2008-2014 Tobias Brunner | |
3 | * Copyright (C) 2008 Martin Willi | |
1b671669 | 4 | * HSR Hochschule fuer Technik Rapperswil |
717313c5 MW |
5 | * |
6 | * This program is free software; you can redistribute it and/or modify it | |
7 | * under the terms of the GNU General Public License as published by the | |
8 | * Free Software Foundation; either version 2 of the License, or (at your | |
9 | * option) any later version. See <http://www.fsf.org/copyleft/gpl.txt>. | |
10 | * | |
11 | * This program is distributed in the hope that it will be useful, but | |
12 | * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY | |
13 | * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | * for more details. | |
15 | */ | |
16 | ||
17 | /** | |
18 | * @defgroup atomics_i atomics | |
19 | * @{ @ingroup utils_i | |
20 | */ | |
21 | ||
22 | #ifndef ATOMICS_H_ | |
23 | #define ATOMICS_H_ | |
24 | ||
25 | /** | |
26 | * Special type to count references | |
27 | */ | |
28 | typedef u_int refcount_t; | |
29 | ||
942599b6 TB |
30 | /* use __atomic* built-ins with clang, if available (note that clang also |
31 | * defines __GNUC__, however only claims to be GCC 4.2) */ | |
32 | #if defined(__clang__) | |
33 | # if __has_builtin(__atomic_add_fetch) | |
34 | # define HAVE_GCC_ATOMIC_OPERATIONS | |
35 | # endif | |
717313c5 | 36 | /* use __atomic* built-ins with GCC 4.7 and newer */ |
942599b6 | 37 | #elif defined(__GNUC__) |
717313c5 MW |
38 | # if (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ > 6)) |
39 | # define HAVE_GCC_ATOMIC_OPERATIONS | |
40 | # endif | |
41 | #endif | |
42 | ||
43 | #ifdef HAVE_GCC_ATOMIC_OPERATIONS | |
44 | ||
45 | #define ref_get(ref) __atomic_add_fetch(ref, 1, __ATOMIC_RELAXED) | |
46 | /* The relaxed memory model works fine for increments as these (usually) don't | |
47 | * change the state of refcounted objects. But here we have to ensure that we | |
48 | * free the right stuff if ref counted objects are mutable. So we have to sync | |
49 | * with other threads that call ref_put(). It would be sufficient to use | |
50 | * __ATOMIC_RELEASE here and then call __atomic_thread_fence() with | |
51 | * __ATOMIC_ACQUIRE if we reach 0, but since we don't have control over the use | |
52 | * of ref_put() we have to make sure. */ | |
53 | #define ref_put(ref) (!__atomic_sub_fetch(ref, 1, __ATOMIC_ACQ_REL)) | |
54 | #define ref_cur(ref) __atomic_load_n(ref, __ATOMIC_RELAXED) | |
55 | ||
56 | #define _cas_impl(ptr, oldval, newval) ({ typeof(oldval) _old = oldval; \ | |
57 | __atomic_compare_exchange_n(ptr, &_old, newval, FALSE, \ | |
58 | __ATOMIC_SEQ_CST, __ATOMIC_RELAXED); }) | |
59 | #define cas_bool(ptr, oldval, newval) _cas_impl(ptr, oldval, newval) | |
60 | #define cas_ptr(ptr, oldval, newval) _cas_impl(ptr, oldval, newval) | |
61 | ||
62 | #elif defined(HAVE_GCC_SYNC_OPERATIONS) | |
63 | ||
64 | #define ref_get(ref) __sync_add_and_fetch(ref, 1) | |
65 | #define ref_put(ref) (!__sync_sub_and_fetch(ref, 1)) | |
66 | #define ref_cur(ref) __sync_fetch_and_add(ref, 0) | |
67 | ||
68 | #define cas_bool(ptr, oldval, newval) \ | |
69 | (__sync_bool_compare_and_swap(ptr, oldval, newval)) | |
70 | #define cas_ptr(ptr, oldval, newval) \ | |
71 | (__sync_bool_compare_and_swap(ptr, oldval, newval)) | |
72 | ||
73 | #else /* !HAVE_GCC_ATOMIC_OPERATIONS && !HAVE_GCC_SYNC_OPERATIONS */ | |
74 | ||
75 | /** | |
76 | * Get a new reference. | |
77 | * | |
78 | * Increments the reference counter atomically. | |
79 | * | |
80 | * @param ref pointer to ref counter | |
81 | * @return new value of ref | |
82 | */ | |
83 | refcount_t ref_get(refcount_t *ref); | |
84 | ||
85 | /** | |
86 | * Put back a unused reference. | |
87 | * | |
88 | * Decrements the reference counter atomically and | |
89 | * says if more references available. | |
90 | * | |
91 | * @param ref pointer to ref counter | |
92 | * @return TRUE if no more references counted | |
93 | */ | |
94 | bool ref_put(refcount_t *ref); | |
95 | ||
96 | /** | |
97 | * Get the current value of the reference counter. | |
98 | * | |
99 | * @param ref pointer to ref counter | |
100 | * @return current value of ref | |
101 | */ | |
102 | refcount_t ref_cur(refcount_t *ref); | |
103 | ||
104 | /** | |
105 | * Atomically replace value of ptr with newval if it currently equals oldval. | |
106 | * | |
107 | * @param ptr pointer to variable | |
108 | * @param oldval old value of the variable | |
109 | * @param newval new value set if possible | |
110 | * @return TRUE if value equaled oldval and newval was written | |
111 | */ | |
112 | bool cas_bool(bool *ptr, bool oldval, bool newval); | |
113 | ||
114 | /** | |
115 | * Atomically replace value of ptr with newval if it currently equals oldval. | |
116 | * | |
117 | * @param ptr pointer to variable | |
118 | * @param oldval old value of the variable | |
119 | * @param newval new value set if possible | |
120 | * @return TRUE if value equaled oldval and newval was written | |
121 | */ | |
122 | bool cas_ptr(void **ptr, void *oldval, void *newval); | |
123 | ||
124 | #endif /* HAVE_GCC_ATOMIC_OPERATIONS */ | |
125 | ||
126 | /** | |
127 | * Initialize atomics utility functions | |
128 | */ | |
129 | void atomics_init(); | |
130 | ||
131 | /** | |
132 | * Clean up atomics utility functions | |
133 | */ | |
134 | void atomics_deinit(); | |
135 | ||
136 | #endif /** ATOMICS_H_ @} */ |