]> git.ipfire.org Git - thirdparty/glibc.git/blame - sysdeps/x86_64/bits/atomic.h
* nis/nis_subr.c (nis_getnames): Add trailing dot to NIS_PATH
[thirdparty/glibc.git] / sysdeps / x86_64 / bits / atomic.h
CommitLineData
1100f849 1/* Copyright (C) 2002, 2003, 2004, 2006 Free Software Foundation, Inc.
c10c099c
UD
2 This file is part of the GNU C Library.
3 Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
4
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
9
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
14
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, write to the Free
17 Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
18 02111-1307 USA. */
19
20#include <stdint.h>
21
22
23typedef int8_t atomic8_t;
24typedef uint8_t uatomic8_t;
25typedef int_fast8_t atomic_fast8_t;
26typedef uint_fast8_t uatomic_fast8_t;
27
28typedef int16_t atomic16_t;
29typedef uint16_t uatomic16_t;
30typedef int_fast16_t atomic_fast16_t;
31typedef uint_fast16_t uatomic_fast16_t;
32
33typedef int32_t atomic32_t;
34typedef uint32_t uatomic32_t;
35typedef int_fast32_t atomic_fast32_t;
36typedef uint_fast32_t uatomic_fast32_t;
37
38typedef int64_t atomic64_t;
39typedef uint64_t uatomic64_t;
40typedef int_fast64_t atomic_fast64_t;
41typedef uint_fast64_t uatomic_fast64_t;
42
43typedef intptr_t atomicptr_t;
44typedef uintptr_t uatomicptr_t;
45typedef intmax_t atomic_max_t;
46typedef uintmax_t uatomic_max_t;
47
48
bd4f43b4 49#ifndef LOCK_PREFIX
c10c099c 50# ifdef UP
bd4f43b4 51# define LOCK_PREFIX /* nothing */
c10c099c 52# else
bd4f43b4 53# define LOCK_PREFIX "lock;"
c10c099c
UD
54# endif
55#endif
56
57
5a3ab2fc
UD
58#define __arch_compare_and_exchange_val_8_acq(mem, newval, oldval) \
59 ({ __typeof (*mem) ret; \
bd4f43b4 60 __asm __volatile (LOCK_PREFIX "cmpxchgb %b2, %1" \
c10c099c 61 : "=a" (ret), "=m" (*mem) \
abfd53d1 62 : "q" (newval), "m" (*mem), "0" (oldval)); \
c10c099c
UD
63 ret; })
64
5a3ab2fc
UD
65#define __arch_compare_and_exchange_val_16_acq(mem, newval, oldval) \
66 ({ __typeof (*mem) ret; \
bd4f43b4 67 __asm __volatile (LOCK_PREFIX "cmpxchgw %w2, %1" \
c10c099c 68 : "=a" (ret), "=m" (*mem) \
abfd53d1 69 : "r" (newval), "m" (*mem), "0" (oldval)); \
c10c099c
UD
70 ret; })
71
5a3ab2fc
UD
72#define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
73 ({ __typeof (*mem) ret; \
bd4f43b4 74 __asm __volatile (LOCK_PREFIX "cmpxchgl %2, %1" \
c10c099c 75 : "=a" (ret), "=m" (*mem) \
abfd53d1 76 : "r" (newval), "m" (*mem), "0" (oldval)); \
c10c099c
UD
77 ret; })
78
5a3ab2fc
UD
79#define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \
80 ({ __typeof (*mem) ret; \
bd4f43b4 81 __asm __volatile (LOCK_PREFIX "cmpxchgq %q2, %1" \
c10c099c 82 : "=a" (ret), "=m" (*mem) \
859e708f
RM
83 : "r" ((long) (newval)), "m" (*mem), \
84 "0" ((long) (oldval))); \
c10c099c
UD
85 ret; })
86
87
f79466a8 88/* Note that we need no lock prefix. */
949ec764 89#define atomic_exchange_acq(mem, newvalue) \
f79466a8
UD
90 ({ __typeof (*mem) result; \
91 if (sizeof (*mem) == 1) \
92 __asm __volatile ("xchgb %b0, %1" \
93 : "=r" (result), "=m" (*mem) \
abfd53d1 94 : "0" (newvalue), "m" (*mem)); \
f79466a8
UD
95 else if (sizeof (*mem) == 2) \
96 __asm __volatile ("xchgw %w0, %1" \
97 : "=r" (result), "=m" (*mem) \
abfd53d1 98 : "0" (newvalue), "m" (*mem)); \
f79466a8
UD
99 else if (sizeof (*mem) == 4) \
100 __asm __volatile ("xchgl %0, %1" \
101 : "=r" (result), "=m" (*mem) \
abfd53d1 102 : "0" (newvalue), "m" (*mem)); \
f79466a8
UD
103 else \
104 __asm __volatile ("xchgq %q0, %1" \
105 : "=r" (result), "=m" (*mem) \
859e708f 106 : "0" ((long) (newvalue)), "m" (*mem)); \
f79466a8
UD
107 result; })
108
109
c10c099c
UD
110#define atomic_exchange_and_add(mem, value) \
111 ({ __typeof (*mem) result; \
112 if (sizeof (*mem) == 1) \
bd4f43b4 113 __asm __volatile (LOCK_PREFIX "xaddb %b0, %1" \
c10c099c 114 : "=r" (result), "=m" (*mem) \
abfd53d1 115 : "0" (value), "m" (*mem)); \
c10c099c 116 else if (sizeof (*mem) == 2) \
bd4f43b4 117 __asm __volatile (LOCK_PREFIX "xaddw %w0, %1" \
c10c099c 118 : "=r" (result), "=m" (*mem) \
abfd53d1 119 : "0" (value), "m" (*mem)); \
c10c099c 120 else if (sizeof (*mem) == 4) \
bd4f43b4 121 __asm __volatile (LOCK_PREFIX "xaddl %0, %1" \
c10c099c 122 : "=r" (result), "=m" (*mem) \
abfd53d1 123 : "0" (value), "m" (*mem)); \
c10c099c 124 else \
bd4f43b4 125 __asm __volatile (LOCK_PREFIX "xaddq %q0, %1" \
c10c099c 126 : "=r" (result), "=m" (*mem) \
859e708f 127 : "0" ((long) (value)), "m" (*mem)); \
c10c099c
UD
128 result; })
129
130
131#define atomic_add(mem, value) \
132 (void) ({ if (__builtin_constant_p (value) && (value) == 1) \
133 atomic_increment (mem); \
134 else if (__builtin_constant_p (value) && (value) == 1) \
135 atomic_decrement (mem); \
136 else if (sizeof (*mem) == 1) \
bd4f43b4 137 __asm __volatile (LOCK_PREFIX "addb %b1, %0" \
c10c099c 138 : "=m" (*mem) \
abfd53d1 139 : "ir" (value), "m" (*mem)); \
c10c099c 140 else if (sizeof (*mem) == 2) \
bd4f43b4 141 __asm __volatile (LOCK_PREFIX "addw %w1, %0" \
c10c099c 142 : "=m" (*mem) \
abfd53d1 143 : "ir" (value), "m" (*mem)); \
c10c099c 144 else if (sizeof (*mem) == 4) \
bd4f43b4 145 __asm __volatile (LOCK_PREFIX "addl %1, %0" \
c10c099c 146 : "=m" (*mem) \
abfd53d1 147 : "ir" (value), "m" (*mem)); \
c10c099c 148 else \
bd4f43b4 149 __asm __volatile (LOCK_PREFIX "addq %q1, %0" \
c10c099c 150 : "=m" (*mem) \
859e708f 151 : "ir" ((long) (value)), "m" (*mem)); \
c10c099c
UD
152 })
153
154
155#define atomic_add_negative(mem, value) \
156 ({ unsigned char __result; \
157 if (sizeof (*mem) == 1) \
bd4f43b4 158 __asm __volatile (LOCK_PREFIX "addb %b2, %0; sets %1" \
c10c099c 159 : "=m" (*mem), "=qm" (__result) \
abfd53d1 160 : "ir" (value), "m" (*mem)); \
c10c099c 161 else if (sizeof (*mem) == 2) \
bd4f43b4 162 __asm __volatile (LOCK_PREFIX "addw %w2, %0; sets %1" \
c10c099c 163 : "=m" (*mem), "=qm" (__result) \
abfd53d1 164 : "ir" (value), "m" (*mem)); \
c10c099c 165 else if (sizeof (*mem) == 4) \
bd4f43b4 166 __asm __volatile (LOCK_PREFIX "addl %2, %0; sets %1" \
c10c099c 167 : "=m" (*mem), "=qm" (__result) \
abfd53d1 168 : "ir" (value), "m" (*mem)); \
c10c099c 169 else \
bd4f43b4 170 __asm __volatile (LOCK_PREFIX "addq %q2, %0; sets %1" \
c10c099c 171 : "=m" (*mem), "=qm" (__result) \
859e708f 172 : "ir" ((long) (value)), "m" (*mem)); \
c10c099c
UD
173 __result; })
174
175
176#define atomic_add_zero(mem, value) \
177 ({ unsigned char __result; \
178 if (sizeof (*mem) == 1) \
bd4f43b4 179 __asm __volatile (LOCK_PREFIX "addb %b2, %0; setz %1" \
c10c099c 180 : "=m" (*mem), "=qm" (__result) \
abfd53d1 181 : "ir" (value), "m" (*mem)); \
c10c099c 182 else if (sizeof (*mem) == 2) \
bd4f43b4 183 __asm __volatile (LOCK_PREFIX "addw %w2, %0; setz %1" \
c10c099c 184 : "=m" (*mem), "=qm" (__result) \
abfd53d1 185 : "ir" (value), "m" (*mem)); \
c10c099c 186 else if (sizeof (*mem) == 4) \
bd4f43b4 187 __asm __volatile (LOCK_PREFIX "addl %2, %0; setz %1" \
c10c099c 188 : "=m" (*mem), "=qm" (__result) \
abfd53d1 189 : "ir" (value), "m" (*mem)); \
c10c099c 190 else \
bd4f43b4 191 __asm __volatile (LOCK_PREFIX "addq %q2, %0; setz %1" \
c10c099c 192 : "=m" (*mem), "=qm" (__result) \
859e708f 193 : "ir" ((long) (value)), "m" (*mem)); \
c10c099c
UD
194 __result; })
195
196
197#define atomic_increment(mem) \
1100f849
UD
198 do { \
199 if (sizeof (*mem) == 1) \
200 __asm __volatile (LOCK_PREFIX "incb %b0" \
201 : "=m" (*mem) \
202 : "m" (*mem)); \
203 else if (sizeof (*mem) == 2) \
204 __asm __volatile (LOCK_PREFIX "incw %w0" \
205 : "=m" (*mem) \
206 : "m" (*mem)); \
207 else if (sizeof (*mem) == 4) \
208 __asm __volatile (LOCK_PREFIX "incl %0" \
209 : "=m" (*mem) \
210 : "m" (*mem)); \
211 else \
212 __asm __volatile (LOCK_PREFIX "incq %q0" \
213 : "=m" (*mem) \
214 : "m" (*mem)); \
215 } while (0)
c10c099c
UD
216
217
218#define atomic_increment_and_test(mem) \
219 ({ unsigned char __result; \
220 if (sizeof (*mem) == 1) \
bd4f43b4 221 __asm __volatile (LOCK_PREFIX "incb %b0; sete %1" \
c10c099c 222 : "=m" (*mem), "=qm" (__result) \
abfd53d1 223 : "m" (*mem)); \
c10c099c 224 else if (sizeof (*mem) == 2) \
bd4f43b4 225 __asm __volatile (LOCK_PREFIX "incw %w0; sete %1" \
c10c099c 226 : "=m" (*mem), "=qm" (__result) \
abfd53d1 227 : "m" (*mem)); \
c10c099c 228 else if (sizeof (*mem) == 4) \
bd4f43b4 229 __asm __volatile (LOCK_PREFIX "incl %0; sete %1" \
c10c099c 230 : "=m" (*mem), "=qm" (__result) \
abfd53d1 231 : "m" (*mem)); \
c10c099c 232 else \
bd4f43b4 233 __asm __volatile (LOCK_PREFIX "incq %q0; sete %1" \
c10c099c 234 : "=m" (*mem), "=qm" (__result) \
abfd53d1 235 : "m" (*mem)); \
c10c099c
UD
236 __result; })
237
238
239#define atomic_decrement(mem) \
1100f849
UD
240 do { \
241 if (sizeof (*mem) == 1) \
242 __asm __volatile (LOCK_PREFIX "decb %b0" \
243 : "=m" (*mem) \
244 : "m" (*mem)); \
245 else if (sizeof (*mem) == 2) \
246 __asm __volatile (LOCK_PREFIX "decw %w0" \
247 : "=m" (*mem) \
248 : "m" (*mem)); \
249 else if (sizeof (*mem) == 4) \
250 __asm __volatile (LOCK_PREFIX "decl %0" \
251 : "=m" (*mem) \
252 : "m" (*mem)); \
253 else \
254 __asm __volatile (LOCK_PREFIX "decq %q0" \
255 : "=m" (*mem) \
256 : "m" (*mem)); \
257 } while (0)
c10c099c
UD
258
259
260#define atomic_decrement_and_test(mem) \
261 ({ unsigned char __result; \
262 if (sizeof (*mem) == 1) \
bd4f43b4 263 __asm __volatile (LOCK_PREFIX "decb %b0; sete %1" \
c10c099c 264 : "=m" (*mem), "=qm" (__result) \
abfd53d1 265 : "m" (*mem)); \
c10c099c 266 else if (sizeof (*mem) == 2) \
bd4f43b4 267 __asm __volatile (LOCK_PREFIX "decw %w0; sete %1" \
c10c099c 268 : "=m" (*mem), "=qm" (__result) \
abfd53d1 269 : "m" (*mem)); \
c10c099c 270 else if (sizeof (*mem) == 4) \
bd4f43b4 271 __asm __volatile (LOCK_PREFIX "decl %0; sete %1" \
c10c099c 272 : "=m" (*mem), "=qm" (__result) \
abfd53d1 273 : "m" (*mem)); \
c10c099c 274 else \
bd4f43b4 275 __asm __volatile (LOCK_PREFIX "decq %q0; sete %1" \
c10c099c 276 : "=m" (*mem), "=qm" (__result) \
abfd53d1 277 : "m" (*mem)); \
c10c099c
UD
278 __result; })
279
280
281#define atomic_bit_set(mem, bit) \
1100f849
UD
282 do { \
283 if (sizeof (*mem) == 1) \
284 __asm __volatile (LOCK_PREFIX "orb %b2, %0" \
285 : "=m" (*mem) \
286 : "m" (*mem), "ir" (1L << (bit))); \
287 else if (sizeof (*mem) == 2) \
288 __asm __volatile (LOCK_PREFIX "orw %w2, %0" \
289 : "=m" (*mem) \
290 : "m" (*mem), "ir" (1L << (bit))); \
291 else if (sizeof (*mem) == 4) \
292 __asm __volatile (LOCK_PREFIX "orl %2, %0" \
293 : "=m" (*mem) \
294 : "m" (*mem), "ir" (1L << (bit))); \
295 else if (__builtin_constant_p (bit) && (bit) < 32) \
296 __asm __volatile (LOCK_PREFIX "orq %2, %0" \
297 : "=m" (*mem) \
298 : "m" (*mem), "i" (1L << (bit))); \
299 else \
300 __asm __volatile (LOCK_PREFIX "orq %q2, %0" \
301 : "=m" (*mem) \
302 : "m" (*mem), "r" (1UL << (bit))); \
303 } while (0)
c10c099c
UD
304
305
306#define atomic_bit_test_set(mem, bit) \
307 ({ unsigned char __result; \
308 if (sizeof (*mem) == 1) \
bd4f43b4 309 __asm __volatile (LOCK_PREFIX "btsb %3, %1; setc %0" \
c10c099c 310 : "=q" (__result), "=m" (*mem) \
002ff853 311 : "m" (*mem), "ir" (bit)); \
c10c099c 312 else if (sizeof (*mem) == 2) \
bd4f43b4 313 __asm __volatile (LOCK_PREFIX "btsw %3, %1; setc %0" \
c10c099c 314 : "=q" (__result), "=m" (*mem) \
002ff853 315 : "m" (*mem), "ir" (bit)); \
c10c099c 316 else if (sizeof (*mem) == 4) \
bd4f43b4 317 __asm __volatile (LOCK_PREFIX "btsl %3, %1; setc %0" \
c10c099c 318 : "=q" (__result), "=m" (*mem) \
002ff853 319 : "m" (*mem), "ir" (bit)); \
c10c099c 320 else \
bd4f43b4 321 __asm __volatile (LOCK_PREFIX "btsq %3, %1; setc %0" \
c10c099c 322 : "=q" (__result), "=m" (*mem) \
002ff853 323 : "m" (*mem), "ir" (bit)); \
c10c099c 324 __result; })
f377d022
UD
325
326
327#define atomic_delay() asm ("rep; nop")
1100f849
UD
328
329
330#define atomic_and(mem, mask) \
331 do { \
332 if (sizeof (*mem) == 1) \
333 __asm __volatile (LOCK_PREFIX "andb %1, %b0" \
334 : "=m" (*mem) \
335 : "ir" (mask), "m" (*mem)); \
336 else if (sizeof (*mem) == 2) \
337 __asm __volatile (LOCK_PREFIX "andw %1, %w0" \
338 : "=m" (*mem) \
339 : "ir" (mask), "m" (*mem)); \
340 else if (sizeof (*mem) == 4) \
341 __asm __volatile (LOCK_PREFIX "andl %1, %0" \
342 : "=m" (*mem) \
343 : "ir" (mask), "m" (*mem)); \
344 else \
345 __asm __volatile (LOCK_PREFIX "andq %1, %q0" \
346 : "=m" (*mem) \
347 : "ir" (mask), "m" (*mem)); \
348 } while (0)
349
350
351#define atomic_or(mem, mask) \
352 do { \
353 if (sizeof (*mem) == 1) \
354 __asm __volatile (LOCK_PREFIX "orb %1, %b0" \
355 : "=m" (*mem) \
356 : "ir" (mask), "m" (*mem)); \
357 else if (sizeof (*mem) == 2) \
358 __asm __volatile (LOCK_PREFIX "orw %1, %w0" \
359 : "=m" (*mem) \
360 : "ir" (mask), "m" (*mem)); \
361 else if (sizeof (*mem) == 4) \
362 __asm __volatile (LOCK_PREFIX "orl %1, %0" \
363 : "=m" (*mem) \
364 : "ir" (mask), "m" (*mem)); \
365 else \
366 __asm __volatile (LOCK_PREFIX "orq %1, %q0" \
367 : "=m" (*mem) \
368 : "ir" (mask), "m" (*mem)); \
369 } while (0)