]>
git.ipfire.org Git - thirdparty/glibc.git/blob - sysdeps/powerpc/atomic-machine.h
1 /* Atomic operations. PowerPC Common version.
2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
4 Contributed by Paul Mackerras <paulus@au.ibm.com>, 2003.
6 The GNU C Library is free software; you can redistribute it and/or
7 modify it under the terms of the GNU Lesser General Public
8 License as published by the Free Software Foundation; either
9 version 2.1 of the License, or (at your option) any later version.
11 The GNU C Library is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 Lesser General Public License for more details.
16 You should have received a copy of the GNU Lesser General Public
17 License along with the GNU C Library; if not, see
18 <http://www.gnu.org/licenses/>. */
21 * Never include sysdeps/powerpc/atomic-machine.h directly.
22 * Alway use include/atomic.h which will include either
23 * sysdeps/powerpc/powerpc32/atomic-machine.h
25 * sysdeps/powerpc/powerpc64/atomic-machine.h
26 * as appropriate and which in turn include this file.
31 typedef int32_t atomic32_t
;
32 typedef uint32_t uatomic32_t
;
33 typedef int_fast32_t atomic_fast32_t
;
34 typedef uint_fast32_t uatomic_fast32_t
;
36 typedef int64_t atomic64_t
;
37 typedef uint64_t uatomic64_t
;
38 typedef int_fast64_t atomic_fast64_t
;
39 typedef uint_fast64_t uatomic_fast64_t
;
41 typedef intptr_t atomicptr_t
;
42 typedef uintptr_t uatomicptr_t
;
43 typedef intmax_t atomic_max_t
;
44 typedef uintmax_t uatomic_max_t
;
47 * Powerpc does not have byte and halfword forms of load and reserve and
48 * store conditional. So for powerpc we stub out the 8- and 16-bit forms.
50 #define __arch_compare_and_exchange_bool_8_acq(mem, newval, oldval) \
53 #define __arch_compare_and_exchange_bool_16_acq(mem, newval, oldval) \
57 # define __ARCH_ACQ_INSTR ""
58 # define __ARCH_REL_INSTR ""
60 # define __ARCH_ACQ_INSTR "isync"
61 # ifndef __ARCH_REL_INSTR
62 # define __ARCH_REL_INSTR "sync"
66 #ifndef MUTEX_HINT_ACQ
67 # define MUTEX_HINT_ACQ
69 #ifndef MUTEX_HINT_REL
70 # define MUTEX_HINT_REL
73 #define atomic_full_barrier() __asm ("sync" ::: "memory")
75 #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
77 __typeof (*(mem)) __tmp; \
78 __typeof (mem) __memp = (mem); \
80 "1: lwarx %0,0,%1" MUTEX_HINT_ACQ "\n" \
85 "2: " __ARCH_ACQ_INSTR \
87 : "b" (__memp), "r" (oldval), "r" (newval) \
92 #define __arch_compare_and_exchange_val_32_rel(mem, newval, oldval) \
94 __typeof (*(mem)) __tmp; \
95 __typeof (mem) __memp = (mem); \
96 __asm __volatile (__ARCH_REL_INSTR "\n" \
97 "1: lwarx %0,0,%1" MUTEX_HINT_REL "\n" \
100 " stwcx. %3,0,%1\n" \
104 : "b" (__memp), "r" (oldval), "r" (newval) \
105 : "cr0", "memory"); \
109 #define __arch_atomic_exchange_32_acq(mem, value) \
111 __typeof (*mem) __val; \
113 "1: lwarx %0,0,%2" MUTEX_HINT_ACQ "\n" \
114 " stwcx. %3,0,%2\n" \
116 " " __ARCH_ACQ_INSTR \
117 : "=&r" (__val), "=m" (*mem) \
118 : "b" (mem), "r" (value), "m" (*mem) \
119 : "cr0", "memory"); \
123 #define __arch_atomic_exchange_32_rel(mem, value) \
125 __typeof (*mem) __val; \
126 __asm __volatile (__ARCH_REL_INSTR "\n" \
127 "1: lwarx %0,0,%2" MUTEX_HINT_REL "\n" \
128 " stwcx. %3,0,%2\n" \
130 : "=&r" (__val), "=m" (*mem) \
131 : "b" (mem), "r" (value), "m" (*mem) \
132 : "cr0", "memory"); \
136 #define __arch_atomic_exchange_and_add_32(mem, value) \
138 __typeof (*mem) __val, __tmp; \
139 __asm __volatile ("1: lwarx %0,0,%3\n" \
141 " stwcx. %1,0,%3\n" \
143 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
144 : "b" (mem), "r" (value), "m" (*mem) \
145 : "cr0", "memory"); \
149 #define __arch_atomic_exchange_and_add_32_acq(mem, value) \
151 __typeof (*mem) __val, __tmp; \
152 __asm __volatile ("1: lwarx %0,0,%3" MUTEX_HINT_ACQ "\n" \
154 " stwcx. %1,0,%3\n" \
157 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
158 : "b" (mem), "r" (value), "m" (*mem) \
159 : "cr0", "memory"); \
163 #define __arch_atomic_exchange_and_add_32_rel(mem, value) \
165 __typeof (*mem) __val, __tmp; \
166 __asm __volatile (__ARCH_REL_INSTR "\n" \
167 "1: lwarx %0,0,%3" MUTEX_HINT_REL "\n" \
169 " stwcx. %1,0,%3\n" \
171 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
172 : "b" (mem), "r" (value), "m" (*mem) \
173 : "cr0", "memory"); \
177 #define __arch_atomic_increment_val_32(mem) \
179 __typeof (*(mem)) __val; \
180 __asm __volatile ("1: lwarx %0,0,%2\n" \
182 " stwcx. %0,0,%2\n" \
184 : "=&b" (__val), "=m" (*mem) \
185 : "b" (mem), "m" (*mem) \
186 : "cr0", "memory"); \
190 #define __arch_atomic_decrement_val_32(mem) \
192 __typeof (*(mem)) __val; \
193 __asm __volatile ("1: lwarx %0,0,%2\n" \
195 " stwcx. %0,0,%2\n" \
197 : "=&b" (__val), "=m" (*mem) \
198 : "b" (mem), "m" (*mem) \
199 : "cr0", "memory"); \
203 #define __arch_atomic_decrement_if_positive_32(mem) \
204 ({ int __val, __tmp; \
205 __asm __volatile ("1: lwarx %0,0,%3\n" \
209 " stwcx. %1,0,%3\n" \
211 "2: " __ARCH_ACQ_INSTR \
212 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
213 : "b" (mem), "m" (*mem) \
214 : "cr0", "memory"); \
218 #define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
220 __typeof (*(mem)) __result; \
221 if (sizeof (*mem) == 4) \
222 __result = __arch_compare_and_exchange_val_32_acq(mem, newval, oldval); \
223 else if (sizeof (*mem) == 8) \
224 __result = __arch_compare_and_exchange_val_64_acq(mem, newval, oldval); \
230 #define atomic_compare_and_exchange_val_rel(mem, newval, oldval) \
232 __typeof (*(mem)) __result; \
233 if (sizeof (*mem) == 4) \
234 __result = __arch_compare_and_exchange_val_32_rel(mem, newval, oldval); \
235 else if (sizeof (*mem) == 8) \
236 __result = __arch_compare_and_exchange_val_64_rel(mem, newval, oldval); \
242 #define atomic_exchange_acq(mem, value) \
244 __typeof (*(mem)) __result; \
245 if (sizeof (*mem) == 4) \
246 __result = __arch_atomic_exchange_32_acq (mem, value); \
247 else if (sizeof (*mem) == 8) \
248 __result = __arch_atomic_exchange_64_acq (mem, value); \
254 #define atomic_exchange_rel(mem, value) \
256 __typeof (*(mem)) __result; \
257 if (sizeof (*mem) == 4) \
258 __result = __arch_atomic_exchange_32_rel (mem, value); \
259 else if (sizeof (*mem) == 8) \
260 __result = __arch_atomic_exchange_64_rel (mem, value); \
266 #define atomic_exchange_and_add(mem, value) \
268 __typeof (*(mem)) __result; \
269 if (sizeof (*mem) == 4) \
270 __result = __arch_atomic_exchange_and_add_32 (mem, value); \
271 else if (sizeof (*mem) == 8) \
272 __result = __arch_atomic_exchange_and_add_64 (mem, value); \
277 #define atomic_exchange_and_add_acq(mem, value) \
279 __typeof (*(mem)) __result; \
280 if (sizeof (*mem) == 4) \
281 __result = __arch_atomic_exchange_and_add_32_acq (mem, value); \
282 else if (sizeof (*mem) == 8) \
283 __result = __arch_atomic_exchange_and_add_64_acq (mem, value); \
288 #define atomic_exchange_and_add_rel(mem, value) \
290 __typeof (*(mem)) __result; \
291 if (sizeof (*mem) == 4) \
292 __result = __arch_atomic_exchange_and_add_32_rel (mem, value); \
293 else if (sizeof (*mem) == 8) \
294 __result = __arch_atomic_exchange_and_add_64_rel (mem, value); \
300 #define atomic_increment_val(mem) \
302 __typeof (*(mem)) __result; \
303 if (sizeof (*(mem)) == 4) \
304 __result = __arch_atomic_increment_val_32 (mem); \
305 else if (sizeof (*(mem)) == 8) \
306 __result = __arch_atomic_increment_val_64 (mem); \
312 #define atomic_increment(mem) ({ atomic_increment_val (mem); (void) 0; })
314 #define atomic_decrement_val(mem) \
316 __typeof (*(mem)) __result; \
317 if (sizeof (*(mem)) == 4) \
318 __result = __arch_atomic_decrement_val_32 (mem); \
319 else if (sizeof (*(mem)) == 8) \
320 __result = __arch_atomic_decrement_val_64 (mem); \
326 #define atomic_decrement(mem) ({ atomic_decrement_val (mem); (void) 0; })
329 /* Decrement *MEM if it is > 0, and return the old value. */
330 #define atomic_decrement_if_positive(mem) \
331 ({ __typeof (*(mem)) __result; \
332 if (sizeof (*mem) == 4) \
333 __result = __arch_atomic_decrement_if_positive_32 (mem); \
334 else if (sizeof (*mem) == 8) \
335 __result = __arch_atomic_decrement_if_positive_64 (mem); \