]> git.ipfire.org Git - thirdparty/glibc.git/blame - sysdeps/powerpc/bits/atomic.h
Update.
[thirdparty/glibc.git] / sysdeps / powerpc / bits / atomic.h
CommitLineData
f3c13160
RM
1/* Atomic operations. PowerPC version.
2 Copyright (C) 2003 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
4 Contributed by Paul Mackerras <paulus@au.ibm.com>, 2003.
5
6 The GNU C Library is free software; you can redistribute it and/or
7 modify it under the terms of the GNU Lesser General Public
8 License as published by the Free Software Foundation; either
9 version 2.1 of the License, or (at your option) any later version.
10
11 The GNU C Library is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 Lesser General Public License for more details.
15
16 You should have received a copy of the GNU Lesser General Public
17 License along with the GNU C Library; if not, write to the Free
18 Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
19 02111-1307 USA. */
20
21#include <stdint.h>
22
23typedef int8_t atomic8_t;
24typedef uint8_t uatomic8_t;
25typedef int_fast8_t atomic_fast8_t;
26typedef uint_fast8_t uatomic_fast8_t;
27
28typedef int16_t atomic16_t;
29typedef uint16_t uatomic16_t;
30typedef int_fast16_t atomic_fast16_t;
31typedef uint_fast16_t uatomic_fast16_t;
32
33typedef int32_t atomic32_t;
34typedef uint32_t uatomic32_t;
35typedef int_fast32_t atomic_fast32_t;
36typedef uint_fast32_t uatomic_fast32_t;
37
38typedef int64_t atomic64_t;
39typedef uint64_t uatomic64_t;
40typedef int_fast64_t atomic_fast64_t;
41typedef uint_fast64_t uatomic_fast64_t;
42
43typedef intptr_t atomicptr_t;
44typedef uintptr_t uatomicptr_t;
45typedef intmax_t atomic_max_t;
46typedef uintmax_t uatomic_max_t;
47
48
5a3ab2fc 49#define __arch_compare_and_exchange_bool_8_acq(mem, newval, oldval) \
f3c13160
RM
50 (abort (), 0)
51
5a3ab2fc 52#define __arch_compare_and_exchange_bool_16_acq(mem, newval, oldval) \
f3c13160
RM
53 (abort (), 0)
54
55#ifdef UP
f79466a8
UD
56# define __ARCH_ACQ_INSTR ""
57# define __ARCH_REL_INSTR ""
f3c13160 58#else
f79466a8
UD
59# define __ARCH_ACQ_INSTR "isync"
60# define __ARCH_REL_INSTR "sync"
f3c13160
RM
61#endif
62
63/*
64 * XXX At present these have both acquire and release semantics.
65 * Ultimately we should do separate _acq and _rel versions.
66 */
67
3171ae99
UD
68#ifdef __powerpc64__
69
f3c13160 70/*
448163ba
RM
71 * The 32-bit exchange_bool is different on powerpc64 because the subf
72 * does signed 64-bit arthmatic while the lwarx is 32-bit unsigned
3171ae99
UD
73 * (a load word and zero (high 32) form).
74 * In powerpc64 register values are 64-bit by default, including oldval.
75 * Net we need to extend sign word the result of lwarx to 64-bit so the
76 * 64-bit subtract from gives the expected result and sets the condition
448163ba 77 * correctly.
f3c13160 78 */
3171ae99 79# define __arch_compare_and_exchange_bool_32_acq(mem, newval, oldval) \
5a3ab2fc
UD
80({ \
81 unsigned int __tmp; \
82 __asm __volatile (__ARCH_REL_INSTR "\n" \
83 "1: lwarx %0,0,%1\n" \
3171ae99 84 " extsw %0,%0\n" \
5a3ab2fc
UD
85 " subf. %0,%2,%0\n" \
86 " bne 2f\n" \
87 " stwcx. %3,0,%1\n" \
88 " bne- 1b\n" \
89 "2: " __ARCH_ACQ_INSTR \
90 : "=&r" (__tmp) \
3171ae99 91 : "b" (mem), "r" (oldval), "r" (newval) \
5a3ab2fc
UD
92 : "cr0", "memory"); \
93 __tmp != 0; \
f3c13160
RM
94})
95
5a3ab2fc
UD
96# define __arch_compare_and_exchange_bool_64_acq(mem, newval, oldval) \
97({ \
98 unsigned long __tmp; \
99 __asm __volatile (__ARCH_REL_INSTR "\n" \
100 "1: ldarx %0,0,%1\n" \
101 " subf. %0,%2,%0\n" \
102 " bne 2f\n" \
103 " stdcx. %3,0,%1\n" \
104 " bne- 1b\n" \
105 "2: " __ARCH_ACQ_INSTR \
106 : "=&r" (__tmp) \
3171ae99 107 : "b" (mem), "r" (oldval), "r" (newval) \
5a3ab2fc
UD
108 : "cr0", "memory"); \
109 __tmp != 0; \
f3c13160
RM
110})
111
3e195d93
RM
112# define __arch_atomic_exchange_64(mem, value) \
113 ({ \
114 __typeof (*mem) __val; \
115 __asm __volatile (__ARCH_REL_INSTR "\n" \
116 "1: ldarx %0,0,%2\n" \
117 " stdcx. %3,0,%2\n" \
118 " bne- 1b" \
119 : "=&r" (__val), "=m" (*mem) \
3171ae99 120 : "b" (mem), "r" (value), "1" (*mem) \
3e195d93
RM
121 : "cr0"); \
122 __val; \
123 })
124
125# define __arch_atomic_exchange_and_add_64(mem, value) \
126 ({ \
127 __typeof (*mem) __val, __tmp; \
128 __asm __volatile ("1: ldarx %0,0,%3\n" \
3171ae99 129 " add %1,%0,%4\n" \
3e195d93
RM
130 " stdcx. %1,0,%3\n" \
131 " bne- 1b" \
132 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
3171ae99 133 : "b" (mem), "r" (value), "2" (*mem) \
3e195d93
RM
134 : "cr0"); \
135 __val; \
136 })
448163ba 137
3171ae99
UD
138# define __arch_atomic_decrement_if_positive_64(mem) \
139 ({ int __val, __tmp; \
140 __asm __volatile ("1: ldarx %0,0,%3\n" \
141 " cmpdi 0,%0,0\n" \
142 " addi %1,%0,-1\n" \
143 " ble 2f\n" \
144 " stdcx. %1,0,%3\n" \
145 " bne- 1b\n" \
146 "2: " __ARCH_ACQ_INSTR \
147 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
148 : "b" (mem), "2" (*mem) \
149 : "cr0"); \
150 __val; \
151 })
3e195d93 152
f3c13160 153#else /* powerpc32 */
3171ae99
UD
154# define __arch_compare_and_exchange_bool_32_acq(mem, newval, oldval) \
155({ \
156 unsigned int __tmp; \
157 __asm __volatile (__ARCH_REL_INSTR "\n" \
158 "1: lwarx %0,0,%1\n" \
159 " subf. %0,%2,%0\n" \
160 " bne 2f\n" \
161 " stwcx. %3,0,%1\n" \
162 " bne- 1b\n" \
163 "2: " __ARCH_ACQ_INSTR \
164 : "=&r" (__tmp) \
165 : "b" (mem), "r" (oldval), "r" (newval) \
166 : "cr0", "memory"); \
167 __tmp != 0; \
168})
169
5a3ab2fc 170# define __arch_compare_and_exchange_bool_64_acq(mem, newval, oldval) \
f3c13160 171 (abort (), 0)
3e195d93
RM
172
173# define __arch_atomic_exchange_64(mem, value) \
174 ({ abort (); (*mem) = (value); })
175# define __arch_atomic_exchange_and_add_64(mem, value) \
176 ({ abort (); (*mem) = (value); })
3171ae99 177# define __arch_atomic_decrement_if_positive_64(mem) \
448163ba 178 ({ abort (); (*mem)--; })
f3c13160
RM
179#endif
180
3e195d93
RM
181#define __arch_atomic_exchange_32(mem, value) \
182 ({ \
183 __typeof (*mem) __val; \
184 __asm __volatile (__ARCH_REL_INSTR "\n" \
185 "1: lwarx %0,0,%2\n" \
859e708f
RM
186 " stwcx. %3,0,%2\n" \
187 " bne- 1b" \
3e195d93 188 : "=&r" (__val), "=m" (*mem) \
3171ae99 189 : "b" (mem), "r" (value), "1" (*mem) \
3e195d93
RM
190 : "cr0"); \
191 __val; \
192 })
f79466a8 193
3e195d93
RM
194#define __arch_atomic_exchange_and_add_32(mem, value) \
195 ({ \
196 __typeof (*mem) __val, __tmp; \
197 __asm __volatile ("1: lwarx %0,0,%3\n" \
859e708f
RM
198 " add %1,%0,%4\n" \
199 " stwcx. %1,0,%3\n" \
200 " bne- 1b" \
3e195d93 201 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
3171ae99 202 : "b" (mem), "r" (value), "2" (*mem) \
3e195d93
RM
203 : "cr0"); \
204 __val; \
205 })
448163ba
RM
206
207#define __arch_atomic_decrement_if_positive_32(mem) \
3171ae99
UD
208 ({ int __val, __tmp; \
209 __asm __volatile ("1: lwarx %0,0,%3\n" \
210 " cmpwi 0,%0,0\n" \
211 " addi %1,%0,-1\n" \
212 " ble 2f\n" \
213 " stwcx. %1,0,%3\n" \
214 " bne- 1b\n" \
215 "2: " __ARCH_ACQ_INSTR \
216 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
217 : "b" (mem), "2" (*mem) \
218 : "cr0"); \
219 __val; \
220 })
221
f79466a8 222
3e195d93
RM
223#define atomic_exchange(mem, value) \
224 ({ \
225 __typeof (*(mem)) __result; \
226 if (sizeof (*mem) == 4) \
227 __result = __arch_atomic_exchange_32 ((mem), (value)); \
228 else if (sizeof (*mem) == 8) \
229 __result = __arch_atomic_exchange_64 ((mem), (value)); \
230 else \
231 abort (); \
232 __result; \
233 })
f79466a8 234
3e195d93
RM
235#define atomic_exchange_and_add(mem, value) \
236 ({ \
237 __typeof (*(mem)) __result; \
238 if (sizeof (*mem) == 4) \
239 __result = __arch_atomic_exchange_and_add_32 ((mem), (value)); \
240 else if (sizeof (*mem) == 8) \
241 __result = __arch_atomic_exchange_and_add_64 ((mem), (value)); \
242 else \
f79466a8 243 abort (); \
3e195d93 244 __result; \
f79466a8
UD
245 })
246
247
248/* Decrement *MEM if it is > 0, and return the old value. */
448163ba
RM
249#define atomic_decrement_if_positive(mem) \
250 ({ __typeof (*(mem)) __result; \
3171ae99 251 if (sizeof (*mem) == 4) \
448163ba 252 __result = __arch_atomic_decrement_if_positive_32 (mem); \
3171ae99 253 else if (sizeof (*mem) == 8) \
448163ba
RM
254 __result = __arch_atomic_decrement_if_positive_64 (mem); \
255 else \
f79466a8 256 abort (); \
3171ae99 257 __result; \
f79466a8
UD
258 })
259
260
f3c13160
RM
261#define atomic_full_barrier() __asm ("sync" ::: "memory")
262#ifdef __powerpc64__
f79466a8 263# define atomic_read_barrier() __asm ("lwsync" ::: "memory")
f3c13160 264#else
f79466a8 265# define atomic_read_barrier() __asm ("sync" ::: "memory")
f3c13160
RM
266#endif
267#define atomic_write_barrier() __asm ("eieio" ::: "memory")