]> git.ipfire.org Git - thirdparty/glibc.git/blob - sysdeps/unix/sysv/linux/x86_64/lowlevellock.h
8cbc1caa5b0370f75bfc5a57beb9b1dd7796eec9
[thirdparty/glibc.git] / sysdeps / unix / sysv / linux / x86_64 / lowlevellock.h
1 /* Copyright (C) 2002-2019 Free Software Foundation, Inc.
2 This file is part of the GNU C Library.
3 Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
4
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
9
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
14
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <http://www.gnu.org/licenses/>. */
18
19 #ifndef _LOWLEVELLOCK_H
20 #define _LOWLEVELLOCK_H 1
21
22 #ifndef __ASSEMBLER__
23 # include <time.h>
24 # include <sys/param.h>
25 # include <bits/pthreadtypes.h>
26 # include <kernel-features.h>
27
28 # ifndef LOCK_INSTR
29 # ifdef UP
30 # define LOCK_INSTR /* nothing */
31 # else
32 # define LOCK_INSTR "lock;"
33 # endif
34 # endif
35 #else
36 # ifndef LOCK
37 # ifdef UP
38 # define LOCK
39 # else
40 # define LOCK lock
41 # endif
42 # endif
43 #endif
44
45 #include <lowlevellock-futex.h>
46
47 /* XXX Remove when no assembler code uses futexes anymore. */
48 #define SYS_futex __NR_futex
49
50 #ifndef __ASSEMBLER__
51
52 /* Initializer for lock. */
53 #define LLL_LOCK_INITIALIZER (0)
54 #define LLL_LOCK_INITIALIZER_LOCKED (1)
55 #define LLL_LOCK_INITIALIZER_WAITERS (2)
56
57
58 /* NB: in the lll_trylock macro we simply return the value in %eax
59 after the cmpxchg instruction. In case the operation succeded this
60 value is zero. In case the operation failed, the cmpxchg instruction
61 has loaded the current value of the memory work which is guaranteed
62 to be nonzero. */
63 #if !IS_IN (libc) || defined UP
64 # define __lll_trylock_asm LOCK_INSTR "cmpxchgl %2, %1"
65 #else
66 # define __lll_trylock_asm "cmpl $0, __libc_multiple_threads(%%rip)\n\t" \
67 "je 0f\n\t" \
68 "lock; cmpxchgl %2, %1\n\t" \
69 "jmp 1f\n\t" \
70 "0:\tcmpxchgl %2, %1\n\t" \
71 "1:"
72 #endif
73
74 #define lll_trylock(futex) \
75 ({ int ret; \
76 __asm __volatile (__lll_trylock_asm \
77 : "=a" (ret), "=m" (futex) \
78 : "r" (LLL_LOCK_INITIALIZER_LOCKED), "m" (futex), \
79 "0" (LLL_LOCK_INITIALIZER) \
80 : "memory"); \
81 ret; })
82
83 #define lll_cond_trylock(futex) \
84 ({ int ret; \
85 __asm __volatile (LOCK_INSTR "cmpxchgl %2, %1" \
86 : "=a" (ret), "=m" (futex) \
87 : "r" (LLL_LOCK_INITIALIZER_WAITERS), \
88 "m" (futex), "0" (LLL_LOCK_INITIALIZER) \
89 : "memory"); \
90 ret; })
91
92 #if !IS_IN (libc) || defined UP
93 # define __lll_lock_asm_start LOCK_INSTR "cmpxchgl %4, %2\n\t" \
94 "jz 24f\n\t"
95 #else
96 # define __lll_lock_asm_start "cmpl $0, __libc_multiple_threads(%%rip)\n\t" \
97 "je 0f\n\t" \
98 "lock; cmpxchgl %4, %2\n\t" \
99 "jnz 1f\n\t" \
100 "jmp 24f\n" \
101 "0:\tcmpxchgl %4, %2\n\t" \
102 "jz 24f\n\t"
103 #endif
104
105 #define lll_lock(futex, private) \
106 (void) \
107 ({ int ignore1, ignore2, ignore3; \
108 if (__builtin_constant_p (private) && (private) == LLL_PRIVATE) \
109 __asm __volatile (__lll_lock_asm_start \
110 "1:\tlea %2, %%" RDI_LP "\n" \
111 "2:\tsub $128, %%" RSP_LP "\n" \
112 ".cfi_adjust_cfa_offset 128\n" \
113 "3:\tcallq __lll_lock_wait_private\n" \
114 "4:\tadd $128, %%" RSP_LP "\n" \
115 ".cfi_adjust_cfa_offset -128\n" \
116 "24:" \
117 : "=S" (ignore1), "=&D" (ignore2), "=m" (futex), \
118 "=a" (ignore3) \
119 : "0" (1), "m" (futex), "3" (0) \
120 : "cx", "r11", "cc", "memory"); \
121 else \
122 __asm __volatile (__lll_lock_asm_start \
123 "1:\tlea %2, %%" RDI_LP "\n" \
124 "2:\tsub $128, %%" RSP_LP "\n" \
125 ".cfi_adjust_cfa_offset 128\n" \
126 "3:\tcallq __lll_lock_wait\n" \
127 "4:\tadd $128, %%" RSP_LP "\n" \
128 ".cfi_adjust_cfa_offset -128\n" \
129 "24:" \
130 : "=S" (ignore1), "=D" (ignore2), "=m" (futex), \
131 "=a" (ignore3) \
132 : "1" (1), "m" (futex), "3" (0), "0" (private) \
133 : "cx", "r11", "cc", "memory"); \
134 }) \
135
136 #define lll_cond_lock(futex, private) \
137 (void) \
138 ({ int ignore1, ignore2, ignore3; \
139 __asm __volatile (LOCK_INSTR "cmpxchgl %4, %2\n\t" \
140 "jz 24f\n" \
141 "1:\tlea %2, %%" RDI_LP "\n" \
142 "2:\tsub $128, %%" RSP_LP "\n" \
143 ".cfi_adjust_cfa_offset 128\n" \
144 "3:\tcallq __lll_lock_wait\n" \
145 "4:\tadd $128, %%" RSP_LP "\n" \
146 ".cfi_adjust_cfa_offset -128\n" \
147 "24:" \
148 : "=S" (ignore1), "=D" (ignore2), "=m" (futex), \
149 "=a" (ignore3) \
150 : "1" (2), "m" (futex), "3" (0), "0" (private) \
151 : "cx", "r11", "cc", "memory"); \
152 })
153
154 #define lll_timedlock(futex, timeout, private) \
155 ({ int result, ignore1, ignore2, ignore3; \
156 __asm __volatile (LOCK_INSTR "cmpxchgl %1, %4\n\t" \
157 "jz 24f\n" \
158 "1:\tlea %4, %%" RDI_LP "\n" \
159 "0:\tmov %8, %%" RDX_LP "\n" \
160 "2:\tsub $128, %%" RSP_LP "\n" \
161 ".cfi_adjust_cfa_offset 128\n" \
162 "3:\tcallq __lll_timedlock_wait\n" \
163 "4:\tadd $128, %%" RSP_LP "\n" \
164 ".cfi_adjust_cfa_offset -128\n" \
165 "24:" \
166 : "=a" (result), "=D" (ignore1), "=S" (ignore2), \
167 "=&d" (ignore3), "=m" (futex) \
168 : "0" (0), "1" (1), "m" (futex), "m" (timeout), \
169 "2" (private) \
170 : "memory", "cx", "cc", "r10", "r11"); \
171 result; })
172
173 extern int __lll_timedlock_elision (int *futex, short *adapt_count,
174 const struct timespec *timeout,
175 int private) attribute_hidden;
176
177 #define lll_timedlock_elision(futex, adapt_count, timeout, private) \
178 __lll_timedlock_elision(&(futex), &(adapt_count), timeout, private)
179
180 #if !IS_IN (libc) || defined UP
181 # define __lll_unlock_asm_start LOCK_INSTR "decl %0\n\t" \
182 "je 24f\n\t"
183 #else
184 # define __lll_unlock_asm_start "cmpl $0, __libc_multiple_threads(%%rip)\n\t" \
185 "je 0f\n\t" \
186 "lock; decl %0\n\t" \
187 "jne 1f\n\t" \
188 "jmp 24f\n\t" \
189 "0:\tdecl %0\n\t" \
190 "je 24f\n\t"
191 #endif
192
193 #define lll_unlock(futex, private) \
194 (void) \
195 ({ int ignore; \
196 if (__builtin_constant_p (private) && (private) == LLL_PRIVATE) \
197 __asm __volatile (__lll_unlock_asm_start \
198 "1:\tlea %0, %%" RDI_LP "\n" \
199 "2:\tsub $128, %%" RSP_LP "\n" \
200 ".cfi_adjust_cfa_offset 128\n" \
201 "3:\tcallq __lll_unlock_wake_private\n" \
202 "4:\tadd $128, %%" RSP_LP "\n" \
203 ".cfi_adjust_cfa_offset -128\n" \
204 "24:" \
205 : "=m" (futex), "=&D" (ignore) \
206 : "m" (futex) \
207 : "ax", "cx", "r11", "cc", "memory"); \
208 else \
209 __asm __volatile (__lll_unlock_asm_start \
210 "1:\tlea %0, %%" RDI_LP "\n" \
211 "2:\tsub $128, %%" RSP_LP "\n" \
212 ".cfi_adjust_cfa_offset 128\n" \
213 "3:\tcallq __lll_unlock_wake\n" \
214 "4:\tadd $128, %%" RSP_LP "\n" \
215 ".cfi_adjust_cfa_offset -128\n" \
216 "24:" \
217 : "=m" (futex), "=&D" (ignore) \
218 : "m" (futex), "S" (private) \
219 : "ax", "cx", "r11", "cc", "memory"); \
220 })
221
222 #define lll_islocked(futex) \
223 (futex != LLL_LOCK_INITIALIZER)
224
225 extern int __lll_lock_elision (int *futex, short *adapt_count, int private)
226 attribute_hidden;
227
228 extern int __lll_unlock_elision (int *lock, int private)
229 attribute_hidden;
230
231 extern int __lll_trylock_elision (int *lock, short *adapt_count)
232 attribute_hidden;
233
234 #define lll_lock_elision(futex, adapt_count, private) \
235 __lll_lock_elision (&(futex), &(adapt_count), private)
236 #define lll_unlock_elision(futex, adapt_count, private) \
237 __lll_unlock_elision (&(futex), private)
238 #define lll_trylock_elision(futex, adapt_count) \
239 __lll_trylock_elision (&(futex), &(adapt_count))
240
241 #endif /* !__ASSEMBLER__ */
242
243 #endif /* lowlevellock.h */