]> git.ipfire.org Git - thirdparty/glibc.git/blob - sysdeps/unix/sysv/linux/i386/lowlevellock.h
Assume LLL_LOCK_INITIALIZER is 0
[thirdparty/glibc.git] / sysdeps / unix / sysv / linux / i386 / lowlevellock.h
1 /* Copyright (C) 2002-2019 Free Software Foundation, Inc.
2 This file is part of the GNU C Library.
3 Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
4
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
9
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
14
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <http://www.gnu.org/licenses/>. */
18
19 #ifndef _LOWLEVELLOCK_H
20 #define _LOWLEVELLOCK_H 1
21
22 #ifndef __ASSEMBLER__
23 # include <time.h>
24 # include <sys/param.h>
25 # include <bits/pthreadtypes.h>
26 # include <kernel-features.h>
27 /* <tcb-offsets.h> is generated from tcb-offsets.sym to define offsets
28 and sizes of types in <tls.h> as well as <pthread.h> which includes
29 <lowlevellock.h> via nptl/descr.h. Don't include <tcb-offsets.h>
30 when generating <tcb-offsets.h> to avoid circular dependency which
31 may lead to build hang on a many-core machine. */
32 # ifndef GEN_AS_CONST_HEADERS
33 # include <tcb-offsets.h>
34 # endif
35
36 # ifndef LOCK_INSTR
37 # ifdef UP
38 # define LOCK_INSTR /* nothing */
39 # else
40 # define LOCK_INSTR "lock;"
41 # endif
42 # endif
43 #else
44 # ifndef LOCK
45 # ifdef UP
46 # define LOCK
47 # else
48 # define LOCK lock
49 # endif
50 # endif
51 #endif
52
53 #include <lowlevellock-futex.h>
54
55 /* XXX Remove when no assembler code uses futexes anymore. */
56 #define SYS_futex __NR_futex
57
58 #ifndef __ASSEMBLER__
59
60 /* Initializer for compatibility lock. */
61 #define LLL_LOCK_INITIALIZER (0)
62 #define LLL_LOCK_INITIALIZER_LOCKED (1)
63 #define LLL_LOCK_INITIALIZER_WAITERS (2)
64
65
66 /* NB: in the lll_trylock macro we simply return the value in %eax
67 after the cmpxchg instruction. In case the operation succeded this
68 value is zero. In case the operation failed, the cmpxchg instruction
69 has loaded the current value of the memory work which is guaranteed
70 to be nonzero. */
71 #if !IS_IN (libc) || defined UP
72 # define __lll_trylock_asm LOCK_INSTR "cmpxchgl %2, %1"
73 #else
74 # define __lll_trylock_asm "cmpl $0, %%gs:%P5\n\t" \
75 "je 0f\n\t" \
76 "lock\n" \
77 "0:\tcmpxchgl %2, %1"
78 #endif
79
80 #define lll_trylock(futex) \
81 ({ int ret; \
82 __asm __volatile (__lll_trylock_asm \
83 : "=a" (ret), "=m" (futex) \
84 : "r" (LLL_LOCK_INITIALIZER_LOCKED), "m" (futex), \
85 "0" (LLL_LOCK_INITIALIZER), \
86 "i" (MULTIPLE_THREADS_OFFSET) \
87 : "memory"); \
88 ret; })
89
90
91 #define lll_cond_trylock(futex) \
92 ({ int ret; \
93 __asm __volatile (LOCK_INSTR "cmpxchgl %2, %1" \
94 : "=a" (ret), "=m" (futex) \
95 : "r" (LLL_LOCK_INITIALIZER_WAITERS), \
96 "m" (futex), "0" (LLL_LOCK_INITIALIZER) \
97 : "memory"); \
98 ret; })
99
100 #if !IS_IN (libc) || defined UP
101 # define __lll_lock_asm_start LOCK_INSTR "cmpxchgl %1, %2\n\t"
102 #else
103 # define __lll_lock_asm_start "cmpl $0, %%gs:%P6\n\t" \
104 "je 0f\n\t" \
105 "lock\n" \
106 "0:\tcmpxchgl %1, %2\n\t"
107 #endif
108
109 #define lll_lock(futex, private) \
110 (void) \
111 ({ int ignore1, ignore2; \
112 if (__builtin_constant_p (private) && (private) == LLL_PRIVATE) \
113 __asm __volatile (__lll_lock_asm_start \
114 "jz 18f\n\t" \
115 "1:\tleal %2, %%ecx\n" \
116 "2:\tcall __lll_lock_wait_private\n" \
117 "18:" \
118 : "=a" (ignore1), "=c" (ignore2), "=m" (futex) \
119 : "0" (0), "1" (1), "m" (futex), \
120 "i" (MULTIPLE_THREADS_OFFSET) \
121 : "memory"); \
122 else \
123 { \
124 int ignore3; \
125 __asm __volatile (__lll_lock_asm_start \
126 "jz 18f\n\t" \
127 "1:\tleal %2, %%edx\n" \
128 "0:\tmovl %8, %%ecx\n" \
129 "2:\tcall __lll_lock_wait\n" \
130 "18:" \
131 : "=a" (ignore1), "=c" (ignore2), \
132 "=m" (futex), "=&d" (ignore3) \
133 : "1" (1), "m" (futex), \
134 "i" (MULTIPLE_THREADS_OFFSET), "0" (0), \
135 "g" ((int) (private)) \
136 : "memory"); \
137 } \
138 })
139
140
141 /* Special version of lll_lock which causes the unlock function to
142 always wakeup waiters. */
143 #define lll_cond_lock(futex, private) \
144 (void) \
145 ({ int ignore1, ignore2, ignore3; \
146 __asm __volatile (LOCK_INSTR "cmpxchgl %1, %2\n\t" \
147 "jz 18f\n\t" \
148 "1:\tleal %2, %%edx\n" \
149 "0:\tmovl %7, %%ecx\n" \
150 "2:\tcall __lll_lock_wait\n" \
151 "18:" \
152 : "=a" (ignore1), "=c" (ignore2), "=m" (futex), \
153 "=&d" (ignore3) \
154 : "0" (0), "1" (2), "m" (futex), "g" ((int) (private))\
155 : "memory"); \
156 })
157
158
159 #define lll_timedlock(futex, timeout, private) \
160 ({ int result, ignore1, ignore2, ignore3; \
161 __asm __volatile (LOCK_INSTR "cmpxchgl %1, %3\n\t" \
162 "jz 18f\n\t" \
163 "1:\tleal %3, %%ecx\n" \
164 "0:\tmovl %8, %%edx\n" \
165 "2:\tcall __lll_timedlock_wait\n" \
166 "18:" \
167 : "=a" (result), "=c" (ignore1), "=&d" (ignore2), \
168 "=m" (futex), "=S" (ignore3) \
169 : "0" (0), "1" (1), "m" (futex), "m" (timeout), \
170 "4" ((int) (private)) \
171 : "memory"); \
172 result; })
173
174 extern int __lll_timedlock_elision (int *futex, short *adapt_count,
175 const struct timespec *timeout,
176 int private) attribute_hidden;
177
178 #define lll_timedlock_elision(futex, adapt_count, timeout, private) \
179 __lll_timedlock_elision(&(futex), &(adapt_count), timeout, private)
180
181 #if !IS_IN (libc) || defined UP
182 # define __lll_unlock_asm LOCK_INSTR "subl $1, %0\n\t"
183 #else
184 # define __lll_unlock_asm "cmpl $0, %%gs:%P3\n\t" \
185 "je 0f\n\t" \
186 "lock\n" \
187 "0:\tsubl $1,%0\n\t"
188 #endif
189
190 #define lll_unlock(futex, private) \
191 (void) \
192 ({ int ignore; \
193 if (__builtin_constant_p (private) && (private) == LLL_PRIVATE) \
194 __asm __volatile (__lll_unlock_asm \
195 "je 18f\n\t" \
196 "1:\tleal %0, %%eax\n" \
197 "2:\tcall __lll_unlock_wake_private\n" \
198 "18:" \
199 : "=m" (futex), "=&a" (ignore) \
200 : "m" (futex), "i" (MULTIPLE_THREADS_OFFSET) \
201 : "memory"); \
202 else \
203 { \
204 int ignore2; \
205 __asm __volatile (__lll_unlock_asm \
206 "je 18f\n\t" \
207 "1:\tleal %0, %%eax\n" \
208 "0:\tmovl %5, %%ecx\n" \
209 "2:\tcall __lll_unlock_wake\n" \
210 "18:" \
211 : "=m" (futex), "=&a" (ignore), "=&c" (ignore2) \
212 : "i" (MULTIPLE_THREADS_OFFSET), "m" (futex), \
213 "g" ((int) (private)) \
214 : "memory"); \
215 } \
216 })
217
218
219 #define lll_islocked(futex) \
220 (futex != LLL_LOCK_INITIALIZER)
221
222 extern int __lll_lock_elision (int *futex, short *adapt_count, int private)
223 attribute_hidden;
224
225 extern int __lll_unlock_elision(int *lock, int private)
226 attribute_hidden;
227
228 extern int __lll_trylock_elision(int *lock, short *adapt_count)
229 attribute_hidden;
230
231 #define lll_lock_elision(futex, adapt_count, private) \
232 __lll_lock_elision (&(futex), &(adapt_count), private)
233 #define lll_unlock_elision(futex, adapt_count, private) \
234 __lll_unlock_elision (&(futex), private)
235 #define lll_trylock_elision(futex, adapt_count) \
236 __lll_trylock_elision(&(futex), &(adapt_count))
237
238 #endif /* !__ASSEMBLER__ */
239
240 #endif /* lowlevellock.h */