]>
git.ipfire.org Git - thirdparty/glibc.git/blob - sysdeps/powerpc/atomic-machine.h
1 /* Atomic operations. PowerPC Common version.
2 Copyright (C) 2003-2021 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
4 Contributed by Paul Mackerras <paulus@au.ibm.com>, 2003.
6 The GNU C Library is free software; you can redistribute it and/or
7 modify it under the terms of the GNU Lesser General Public
8 License as published by the Free Software Foundation; either
9 version 2.1 of the License, or (at your option) any later version.
11 The GNU C Library is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 Lesser General Public License for more details.
16 You should have received a copy of the GNU Lesser General Public
17 License along with the GNU C Library; if not, see
18 <https://www.gnu.org/licenses/>. */
21 * Never include sysdeps/powerpc/atomic-machine.h directly.
22 * Alway use include/atomic.h which will include either
23 * sysdeps/powerpc/powerpc32/atomic-machine.h
25 * sysdeps/powerpc/powerpc64/atomic-machine.h
26 * as appropriate and which in turn include this file.
31 typedef int32_t atomic32_t
;
32 typedef uint32_t uatomic32_t
;
33 typedef int_fast32_t atomic_fast32_t
;
34 typedef uint_fast32_t uatomic_fast32_t
;
36 typedef int64_t atomic64_t
;
37 typedef uint64_t uatomic64_t
;
38 typedef int_fast64_t atomic_fast64_t
;
39 typedef uint_fast64_t uatomic_fast64_t
;
41 typedef intptr_t atomicptr_t
;
42 typedef uintptr_t uatomicptr_t
;
43 typedef intmax_t atomic_max_t
;
44 typedef uintmax_t uatomic_max_t
;
47 * Powerpc does not have byte and halfword forms of load and reserve and
48 * store conditional. So for powerpc we stub out the 8- and 16-bit forms.
50 #define __arch_compare_and_exchange_bool_8_acq(mem, newval, oldval) \
53 #define __arch_compare_and_exchange_bool_16_acq(mem, newval, oldval) \
56 #define __ARCH_ACQ_INSTR "isync"
57 #ifndef __ARCH_REL_INSTR
58 # define __ARCH_REL_INSTR "sync"
61 #ifndef MUTEX_HINT_ACQ
62 # define MUTEX_HINT_ACQ
64 #ifndef MUTEX_HINT_REL
65 # define MUTEX_HINT_REL
68 #define atomic_full_barrier() __asm ("sync" ::: "memory")
70 #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
72 __typeof (*(mem)) __tmp; \
73 __typeof (mem) __memp = (mem); \
75 "1: lwarx %0,0,%1" MUTEX_HINT_ACQ "\n" \
80 "2: " __ARCH_ACQ_INSTR \
82 : "b" (__memp), "r" (oldval), "r" (newval) \
87 #define __arch_compare_and_exchange_val_32_rel(mem, newval, oldval) \
89 __typeof (*(mem)) __tmp; \
90 __typeof (mem) __memp = (mem); \
91 __asm __volatile (__ARCH_REL_INSTR "\n" \
92 "1: lwarx %0,0,%1" MUTEX_HINT_REL "\n" \
99 : "b" (__memp), "r" (oldval), "r" (newval) \
100 : "cr0", "memory"); \
104 #define __arch_atomic_exchange_32_acq(mem, value) \
106 __typeof (*mem) __val; \
108 "1: lwarx %0,0,%2" MUTEX_HINT_ACQ "\n" \
109 " stwcx. %3,0,%2\n" \
111 " " __ARCH_ACQ_INSTR \
112 : "=&r" (__val), "=m" (*mem) \
113 : "b" (mem), "r" (value), "m" (*mem) \
114 : "cr0", "memory"); \
118 #define __arch_atomic_exchange_32_rel(mem, value) \
120 __typeof (*mem) __val; \
121 __asm __volatile (__ARCH_REL_INSTR "\n" \
122 "1: lwarx %0,0,%2" MUTEX_HINT_REL "\n" \
123 " stwcx. %3,0,%2\n" \
125 : "=&r" (__val), "=m" (*mem) \
126 : "b" (mem), "r" (value), "m" (*mem) \
127 : "cr0", "memory"); \
131 #define __arch_atomic_exchange_and_add_32(mem, value) \
133 __typeof (*mem) __val, __tmp; \
134 __asm __volatile ("1: lwarx %0,0,%3\n" \
136 " stwcx. %1,0,%3\n" \
138 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
139 : "b" (mem), "r" (value), "m" (*mem) \
140 : "cr0", "memory"); \
144 #define __arch_atomic_exchange_and_add_32_acq(mem, value) \
146 __typeof (*mem) __val, __tmp; \
147 __asm __volatile ("1: lwarx %0,0,%3" MUTEX_HINT_ACQ "\n" \
149 " stwcx. %1,0,%3\n" \
152 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
153 : "b" (mem), "r" (value), "m" (*mem) \
154 : "cr0", "memory"); \
158 #define __arch_atomic_exchange_and_add_32_rel(mem, value) \
160 __typeof (*mem) __val, __tmp; \
161 __asm __volatile (__ARCH_REL_INSTR "\n" \
162 "1: lwarx %0,0,%3" MUTEX_HINT_REL "\n" \
164 " stwcx. %1,0,%3\n" \
166 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
167 : "b" (mem), "r" (value), "m" (*mem) \
168 : "cr0", "memory"); \
172 #define __arch_atomic_increment_val_32(mem) \
174 __typeof (*(mem)) __val; \
175 __asm __volatile ("1: lwarx %0,0,%2\n" \
177 " stwcx. %0,0,%2\n" \
179 : "=&b" (__val), "=m" (*mem) \
180 : "b" (mem), "m" (*mem) \
181 : "cr0", "memory"); \
185 #define __arch_atomic_decrement_val_32(mem) \
187 __typeof (*(mem)) __val; \
188 __asm __volatile ("1: lwarx %0,0,%2\n" \
190 " stwcx. %0,0,%2\n" \
192 : "=&b" (__val), "=m" (*mem) \
193 : "b" (mem), "m" (*mem) \
194 : "cr0", "memory"); \
198 #define __arch_atomic_decrement_if_positive_32(mem) \
199 ({ int __val, __tmp; \
200 __asm __volatile ("1: lwarx %0,0,%3\n" \
204 " stwcx. %1,0,%3\n" \
206 "2: " __ARCH_ACQ_INSTR \
207 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
208 : "b" (mem), "m" (*mem) \
209 : "cr0", "memory"); \
213 #define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
215 __typeof (*(mem)) __result; \
216 if (sizeof (*mem) == 4) \
217 __result = __arch_compare_and_exchange_val_32_acq(mem, newval, oldval); \
218 else if (sizeof (*mem) == 8) \
219 __result = __arch_compare_and_exchange_val_64_acq(mem, newval, oldval); \
225 #define atomic_compare_and_exchange_val_rel(mem, newval, oldval) \
227 __typeof (*(mem)) __result; \
228 if (sizeof (*mem) == 4) \
229 __result = __arch_compare_and_exchange_val_32_rel(mem, newval, oldval); \
230 else if (sizeof (*mem) == 8) \
231 __result = __arch_compare_and_exchange_val_64_rel(mem, newval, oldval); \
237 #define atomic_exchange_acq(mem, value) \
239 __typeof (*(mem)) __result; \
240 if (sizeof (*mem) == 4) \
241 __result = __arch_atomic_exchange_32_acq (mem, value); \
242 else if (sizeof (*mem) == 8) \
243 __result = __arch_atomic_exchange_64_acq (mem, value); \
249 #define atomic_exchange_rel(mem, value) \
251 __typeof (*(mem)) __result; \
252 if (sizeof (*mem) == 4) \
253 __result = __arch_atomic_exchange_32_rel (mem, value); \
254 else if (sizeof (*mem) == 8) \
255 __result = __arch_atomic_exchange_64_rel (mem, value); \
261 #define atomic_exchange_and_add(mem, value) \
263 __typeof (*(mem)) __result; \
264 if (sizeof (*mem) == 4) \
265 __result = __arch_atomic_exchange_and_add_32 (mem, value); \
266 else if (sizeof (*mem) == 8) \
267 __result = __arch_atomic_exchange_and_add_64 (mem, value); \
272 #define atomic_exchange_and_add_acq(mem, value) \
274 __typeof (*(mem)) __result; \
275 if (sizeof (*mem) == 4) \
276 __result = __arch_atomic_exchange_and_add_32_acq (mem, value); \
277 else if (sizeof (*mem) == 8) \
278 __result = __arch_atomic_exchange_and_add_64_acq (mem, value); \
283 #define atomic_exchange_and_add_rel(mem, value) \
285 __typeof (*(mem)) __result; \
286 if (sizeof (*mem) == 4) \
287 __result = __arch_atomic_exchange_and_add_32_rel (mem, value); \
288 else if (sizeof (*mem) == 8) \
289 __result = __arch_atomic_exchange_and_add_64_rel (mem, value); \
295 #define atomic_increment_val(mem) \
297 __typeof (*(mem)) __result; \
298 if (sizeof (*(mem)) == 4) \
299 __result = __arch_atomic_increment_val_32 (mem); \
300 else if (sizeof (*(mem)) == 8) \
301 __result = __arch_atomic_increment_val_64 (mem); \
307 #define atomic_increment(mem) ({ atomic_increment_val (mem); (void) 0; })
309 #define atomic_decrement_val(mem) \
311 __typeof (*(mem)) __result; \
312 if (sizeof (*(mem)) == 4) \
313 __result = __arch_atomic_decrement_val_32 (mem); \
314 else if (sizeof (*(mem)) == 8) \
315 __result = __arch_atomic_decrement_val_64 (mem); \
321 #define atomic_decrement(mem) ({ atomic_decrement_val (mem); (void) 0; })
324 /* Decrement *MEM if it is > 0, and return the old value. */
325 #define atomic_decrement_if_positive(mem) \
326 ({ __typeof (*(mem)) __result; \
327 if (sizeof (*mem) == 4) \
328 __result = __arch_atomic_decrement_if_positive_32 (mem); \
329 else if (sizeof (*mem) == 8) \
330 __result = __arch_atomic_decrement_if_positive_64 (mem); \