]> git.ipfire.org Git - thirdparty/glibc.git/blob - sysdeps/powerpc/atomic-machine.h
Update copyright dates with scripts/update-copyrights.
[thirdparty/glibc.git] / sysdeps / powerpc / atomic-machine.h
1 /* Atomic operations. PowerPC Common version.
2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
4 Contributed by Paul Mackerras <paulus@au.ibm.com>, 2003.
5
6 The GNU C Library is free software; you can redistribute it and/or
7 modify it under the terms of the GNU Lesser General Public
8 License as published by the Free Software Foundation; either
9 version 2.1 of the License, or (at your option) any later version.
10
11 The GNU C Library is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 Lesser General Public License for more details.
15
16 You should have received a copy of the GNU Lesser General Public
17 License along with the GNU C Library; if not, see
18 <http://www.gnu.org/licenses/>. */
19
20 /*
21 * Never include sysdeps/powerpc/atomic-machine.h directly.
22 * Alway use include/atomic.h which will include either
23 * sysdeps/powerpc/powerpc32/atomic-machine.h
24 * or
25 * sysdeps/powerpc/powerpc64/atomic-machine.h
26 * as appropriate and which in turn include this file.
27 */
28
29 #include <stdint.h>
30
31 typedef int32_t atomic32_t;
32 typedef uint32_t uatomic32_t;
33 typedef int_fast32_t atomic_fast32_t;
34 typedef uint_fast32_t uatomic_fast32_t;
35
36 typedef int64_t atomic64_t;
37 typedef uint64_t uatomic64_t;
38 typedef int_fast64_t atomic_fast64_t;
39 typedef uint_fast64_t uatomic_fast64_t;
40
41 typedef intptr_t atomicptr_t;
42 typedef uintptr_t uatomicptr_t;
43 typedef intmax_t atomic_max_t;
44 typedef uintmax_t uatomic_max_t;
45
46 /*
47 * Powerpc does not have byte and halfword forms of load and reserve and
48 * store conditional. So for powerpc we stub out the 8- and 16-bit forms.
49 */
50 #define __arch_compare_and_exchange_bool_8_acq(mem, newval, oldval) \
51 (abort (), 0)
52
53 #define __arch_compare_and_exchange_bool_16_acq(mem, newval, oldval) \
54 (abort (), 0)
55
56 #ifdef UP
57 # define __ARCH_ACQ_INSTR ""
58 # define __ARCH_REL_INSTR ""
59 #else
60 # define __ARCH_ACQ_INSTR "isync"
61 # ifndef __ARCH_REL_INSTR
62 # define __ARCH_REL_INSTR "sync"
63 # endif
64 #endif
65
66 #ifndef MUTEX_HINT_ACQ
67 # define MUTEX_HINT_ACQ
68 #endif
69 #ifndef MUTEX_HINT_REL
70 # define MUTEX_HINT_REL
71 #endif
72
73 #define atomic_full_barrier() __asm ("sync" ::: "memory")
74
75 #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
76 ({ \
77 __typeof (*(mem)) __tmp; \
78 __typeof (mem) __memp = (mem); \
79 __asm __volatile ( \
80 "1: lwarx %0,0,%1" MUTEX_HINT_ACQ "\n" \
81 " cmpw %0,%2\n" \
82 " bne 2f\n" \
83 " stwcx. %3,0,%1\n" \
84 " bne- 1b\n" \
85 "2: " __ARCH_ACQ_INSTR \
86 : "=&r" (__tmp) \
87 : "b" (__memp), "r" (oldval), "r" (newval) \
88 : "cr0", "memory"); \
89 __tmp; \
90 })
91
92 #define __arch_compare_and_exchange_val_32_rel(mem, newval, oldval) \
93 ({ \
94 __typeof (*(mem)) __tmp; \
95 __typeof (mem) __memp = (mem); \
96 __asm __volatile (__ARCH_REL_INSTR "\n" \
97 "1: lwarx %0,0,%1" MUTEX_HINT_REL "\n" \
98 " cmpw %0,%2\n" \
99 " bne 2f\n" \
100 " stwcx. %3,0,%1\n" \
101 " bne- 1b\n" \
102 "2: " \
103 : "=&r" (__tmp) \
104 : "b" (__memp), "r" (oldval), "r" (newval) \
105 : "cr0", "memory"); \
106 __tmp; \
107 })
108
109 #define __arch_atomic_exchange_32_acq(mem, value) \
110 ({ \
111 __typeof (*mem) __val; \
112 __asm __volatile ( \
113 "1: lwarx %0,0,%2" MUTEX_HINT_ACQ "\n" \
114 " stwcx. %3,0,%2\n" \
115 " bne- 1b\n" \
116 " " __ARCH_ACQ_INSTR \
117 : "=&r" (__val), "=m" (*mem) \
118 : "b" (mem), "r" (value), "m" (*mem) \
119 : "cr0", "memory"); \
120 __val; \
121 })
122
123 #define __arch_atomic_exchange_32_rel(mem, value) \
124 ({ \
125 __typeof (*mem) __val; \
126 __asm __volatile (__ARCH_REL_INSTR "\n" \
127 "1: lwarx %0,0,%2" MUTEX_HINT_REL "\n" \
128 " stwcx. %3,0,%2\n" \
129 " bne- 1b" \
130 : "=&r" (__val), "=m" (*mem) \
131 : "b" (mem), "r" (value), "m" (*mem) \
132 : "cr0", "memory"); \
133 __val; \
134 })
135
136 #define __arch_atomic_exchange_and_add_32(mem, value) \
137 ({ \
138 __typeof (*mem) __val, __tmp; \
139 __asm __volatile ("1: lwarx %0,0,%3\n" \
140 " add %1,%0,%4\n" \
141 " stwcx. %1,0,%3\n" \
142 " bne- 1b" \
143 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
144 : "b" (mem), "r" (value), "m" (*mem) \
145 : "cr0", "memory"); \
146 __val; \
147 })
148
149 #define __arch_atomic_exchange_and_add_32_acq(mem, value) \
150 ({ \
151 __typeof (*mem) __val, __tmp; \
152 __asm __volatile ("1: lwarx %0,0,%3" MUTEX_HINT_ACQ "\n" \
153 " add %1,%0,%4\n" \
154 " stwcx. %1,0,%3\n" \
155 " bne- 1b\n" \
156 __ARCH_ACQ_INSTR \
157 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
158 : "b" (mem), "r" (value), "m" (*mem) \
159 : "cr0", "memory"); \
160 __val; \
161 })
162
163 #define __arch_atomic_exchange_and_add_32_rel(mem, value) \
164 ({ \
165 __typeof (*mem) __val, __tmp; \
166 __asm __volatile (__ARCH_REL_INSTR "\n" \
167 "1: lwarx %0,0,%3" MUTEX_HINT_REL "\n" \
168 " add %1,%0,%4\n" \
169 " stwcx. %1,0,%3\n" \
170 " bne- 1b" \
171 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
172 : "b" (mem), "r" (value), "m" (*mem) \
173 : "cr0", "memory"); \
174 __val; \
175 })
176
177 #define __arch_atomic_increment_val_32(mem) \
178 ({ \
179 __typeof (*(mem)) __val; \
180 __asm __volatile ("1: lwarx %0,0,%2\n" \
181 " addi %0,%0,1\n" \
182 " stwcx. %0,0,%2\n" \
183 " bne- 1b" \
184 : "=&b" (__val), "=m" (*mem) \
185 : "b" (mem), "m" (*mem) \
186 : "cr0", "memory"); \
187 __val; \
188 })
189
190 #define __arch_atomic_decrement_val_32(mem) \
191 ({ \
192 __typeof (*(mem)) __val; \
193 __asm __volatile ("1: lwarx %0,0,%2\n" \
194 " subi %0,%0,1\n" \
195 " stwcx. %0,0,%2\n" \
196 " bne- 1b" \
197 : "=&b" (__val), "=m" (*mem) \
198 : "b" (mem), "m" (*mem) \
199 : "cr0", "memory"); \
200 __val; \
201 })
202
203 #define __arch_atomic_decrement_if_positive_32(mem) \
204 ({ int __val, __tmp; \
205 __asm __volatile ("1: lwarx %0,0,%3\n" \
206 " cmpwi 0,%0,0\n" \
207 " addi %1,%0,-1\n" \
208 " ble 2f\n" \
209 " stwcx. %1,0,%3\n" \
210 " bne- 1b\n" \
211 "2: " __ARCH_ACQ_INSTR \
212 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
213 : "b" (mem), "m" (*mem) \
214 : "cr0", "memory"); \
215 __val; \
216 })
217
218 #define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
219 ({ \
220 __typeof (*(mem)) __result; \
221 if (sizeof (*mem) == 4) \
222 __result = __arch_compare_and_exchange_val_32_acq(mem, newval, oldval); \
223 else if (sizeof (*mem) == 8) \
224 __result = __arch_compare_and_exchange_val_64_acq(mem, newval, oldval); \
225 else \
226 abort (); \
227 __result; \
228 })
229
230 #define atomic_compare_and_exchange_val_rel(mem, newval, oldval) \
231 ({ \
232 __typeof (*(mem)) __result; \
233 if (sizeof (*mem) == 4) \
234 __result = __arch_compare_and_exchange_val_32_rel(mem, newval, oldval); \
235 else if (sizeof (*mem) == 8) \
236 __result = __arch_compare_and_exchange_val_64_rel(mem, newval, oldval); \
237 else \
238 abort (); \
239 __result; \
240 })
241
242 #define atomic_exchange_acq(mem, value) \
243 ({ \
244 __typeof (*(mem)) __result; \
245 if (sizeof (*mem) == 4) \
246 __result = __arch_atomic_exchange_32_acq (mem, value); \
247 else if (sizeof (*mem) == 8) \
248 __result = __arch_atomic_exchange_64_acq (mem, value); \
249 else \
250 abort (); \
251 __result; \
252 })
253
254 #define atomic_exchange_rel(mem, value) \
255 ({ \
256 __typeof (*(mem)) __result; \
257 if (sizeof (*mem) == 4) \
258 __result = __arch_atomic_exchange_32_rel (mem, value); \
259 else if (sizeof (*mem) == 8) \
260 __result = __arch_atomic_exchange_64_rel (mem, value); \
261 else \
262 abort (); \
263 __result; \
264 })
265
266 #define atomic_exchange_and_add(mem, value) \
267 ({ \
268 __typeof (*(mem)) __result; \
269 if (sizeof (*mem) == 4) \
270 __result = __arch_atomic_exchange_and_add_32 (mem, value); \
271 else if (sizeof (*mem) == 8) \
272 __result = __arch_atomic_exchange_and_add_64 (mem, value); \
273 else \
274 abort (); \
275 __result; \
276 })
277 #define atomic_exchange_and_add_acq(mem, value) \
278 ({ \
279 __typeof (*(mem)) __result; \
280 if (sizeof (*mem) == 4) \
281 __result = __arch_atomic_exchange_and_add_32_acq (mem, value); \
282 else if (sizeof (*mem) == 8) \
283 __result = __arch_atomic_exchange_and_add_64_acq (mem, value); \
284 else \
285 abort (); \
286 __result; \
287 })
288 #define atomic_exchange_and_add_rel(mem, value) \
289 ({ \
290 __typeof (*(mem)) __result; \
291 if (sizeof (*mem) == 4) \
292 __result = __arch_atomic_exchange_and_add_32_rel (mem, value); \
293 else if (sizeof (*mem) == 8) \
294 __result = __arch_atomic_exchange_and_add_64_rel (mem, value); \
295 else \
296 abort (); \
297 __result; \
298 })
299
300 #define atomic_increment_val(mem) \
301 ({ \
302 __typeof (*(mem)) __result; \
303 if (sizeof (*(mem)) == 4) \
304 __result = __arch_atomic_increment_val_32 (mem); \
305 else if (sizeof (*(mem)) == 8) \
306 __result = __arch_atomic_increment_val_64 (mem); \
307 else \
308 abort (); \
309 __result; \
310 })
311
312 #define atomic_increment(mem) ({ atomic_increment_val (mem); (void) 0; })
313
314 #define atomic_decrement_val(mem) \
315 ({ \
316 __typeof (*(mem)) __result; \
317 if (sizeof (*(mem)) == 4) \
318 __result = __arch_atomic_decrement_val_32 (mem); \
319 else if (sizeof (*(mem)) == 8) \
320 __result = __arch_atomic_decrement_val_64 (mem); \
321 else \
322 abort (); \
323 __result; \
324 })
325
326 #define atomic_decrement(mem) ({ atomic_decrement_val (mem); (void) 0; })
327
328
329 /* Decrement *MEM if it is > 0, and return the old value. */
330 #define atomic_decrement_if_positive(mem) \
331 ({ __typeof (*(mem)) __result; \
332 if (sizeof (*mem) == 4) \
333 __result = __arch_atomic_decrement_if_positive_32 (mem); \
334 else if (sizeof (*mem) == 8) \
335 __result = __arch_atomic_decrement_if_positive_64 (mem); \
336 else \
337 abort (); \
338 __result; \
339 })