]> git.ipfire.org Git - thirdparty/glibc.git/blob - nptl/sysdeps/x86_64/tls.h
Update copyright notices with scripts/update-copyrights.
[thirdparty/glibc.git] / nptl / sysdeps / x86_64 / tls.h
1 /* Definition for thread-local data handling. nptl/x86_64 version.
2 Copyright (C) 2002-2013 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
4
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
9
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
14
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <http://www.gnu.org/licenses/>. */
18
19 #ifndef _TLS_H
20 #define _TLS_H 1
21
22 #ifndef __ASSEMBLER__
23 # include <asm/prctl.h> /* For ARCH_SET_FS. */
24 # include <stdbool.h>
25 # include <stddef.h>
26 # include <stdint.h>
27 # include <stdlib.h>
28 # include <sysdep.h>
29 # include <libc-internal.h>
30 # include <kernel-features.h>
31
32 /* Replacement type for __m128 since this file is included by ld.so,
33 which is compiled with -mno-sse. It must not change the alignment
34 of rtld_savespace_sse. */
35 typedef struct
36 {
37 int i[4];
38 } __128bits;
39
40
41 /* Type for the dtv. */
42 typedef union dtv
43 {
44 size_t counter;
45 struct
46 {
47 void *val;
48 bool is_static;
49 } pointer;
50 } dtv_t;
51
52
53 typedef struct
54 {
55 void *tcb; /* Pointer to the TCB. Not necessarily the
56 thread descriptor used by libpthread. */
57 dtv_t *dtv;
58 void *self; /* Pointer to the thread descriptor. */
59 int multiple_threads;
60 int gscope_flag;
61 uintptr_t sysinfo;
62 uintptr_t stack_guard;
63 uintptr_t pointer_guard;
64 unsigned long int vgetcpu_cache[2];
65 # ifndef __ASSUME_PRIVATE_FUTEX
66 int private_futex;
67 # else
68 int __unused1;
69 # endif
70 int rtld_must_xmm_save;
71 /* Reservation of some values for the TM ABI. */
72 void *__private_tm[5];
73 long int __unused2;
74 /* Have space for the post-AVX register size. */
75 __128bits rtld_savespace_sse[8][4] __attribute__ ((aligned (32)));
76
77 void *__padding[8];
78 } tcbhead_t;
79
80 #else /* __ASSEMBLER__ */
81 # include <tcb-offsets.h>
82 #endif
83
84
85 /* Alignment requirement for the stack. */
86 #define STACK_ALIGN 16
87
88
89 #ifndef __ASSEMBLER__
90 /* Get system call information. */
91 # include <sysdep.h>
92
93
94 /* Get the thread descriptor definition. */
95 # include <nptl/descr.h>
96
97 #ifndef LOCK_PREFIX
98 # ifdef UP
99 # define LOCK_PREFIX /* nothing */
100 # else
101 # define LOCK_PREFIX "lock;"
102 # endif
103 #endif
104
105 /* This is the size of the initial TCB. Can't be just sizeof (tcbhead_t),
106 because NPTL getpid, __libc_alloca_cutoff etc. need (almost) the whole
107 struct pthread even when not linked with -lpthread. */
108 # define TLS_INIT_TCB_SIZE sizeof (struct pthread)
109
110 /* Alignment requirements for the initial TCB. */
111 # define TLS_INIT_TCB_ALIGN __alignof__ (struct pthread)
112
113 /* This is the size of the TCB. */
114 # define TLS_TCB_SIZE sizeof (struct pthread)
115
116 /* Alignment requirements for the TCB. */
117 # define TLS_TCB_ALIGN __alignof__ (struct pthread)
118
119 /* The TCB can have any size and the memory following the address the
120 thread pointer points to is unspecified. Allocate the TCB there. */
121 # define TLS_TCB_AT_TP 1
122
123
124 /* Install the dtv pointer. The pointer passed is to the element with
125 index -1 which contain the length. */
126 # define INSTALL_DTV(descr, dtvp) \
127 ((tcbhead_t *) (descr))->dtv = (dtvp) + 1
128
129 /* Install new dtv for current thread. */
130 # define INSTALL_NEW_DTV(dtvp) \
131 ({ struct pthread *__pd; \
132 THREAD_SETMEM (__pd, header.dtv, (dtvp)); })
133
134 /* Return dtv of given thread descriptor. */
135 # define GET_DTV(descr) \
136 (((tcbhead_t *) (descr))->dtv)
137
138
139 /* Code to initially initialize the thread pointer. This might need
140 special attention since 'errno' is not yet available and if the
141 operation can cause a failure 'errno' must not be touched.
142
143 We have to make the syscall for both uses of the macro since the
144 address might be (and probably is) different. */
145 # define TLS_INIT_TP(thrdescr, secondcall) \
146 ({ void *_thrdescr = (thrdescr); \
147 tcbhead_t *_head = _thrdescr; \
148 int _result; \
149 \
150 _head->tcb = _thrdescr; \
151 /* For now the thread descriptor is at the same address. */ \
152 _head->self = _thrdescr; \
153 \
154 /* It is a simple syscall to set the %fs value for the thread. */ \
155 asm volatile ("syscall" \
156 : "=a" (_result) \
157 : "0" ((unsigned long int) __NR_arch_prctl), \
158 "D" ((unsigned long int) ARCH_SET_FS), \
159 "S" (_thrdescr) \
160 : "memory", "cc", "r11", "cx"); \
161 \
162 _result ? "cannot set %fs base address for thread-local storage" : 0; \
163 })
164
165
166 /* Return the address of the dtv for the current thread. */
167 # define THREAD_DTV() \
168 ({ struct pthread *__pd; \
169 THREAD_GETMEM (__pd, header.dtv); })
170
171
172 /* Return the thread descriptor for the current thread.
173
174 The contained asm must *not* be marked volatile since otherwise
175 assignments like
176 pthread_descr self = thread_self();
177 do not get optimized away. */
178 # define THREAD_SELF \
179 ({ struct pthread *__self; \
180 asm ("mov %%fs:%c1,%0" : "=r" (__self) \
181 : "i" (offsetof (struct pthread, header.self))); \
182 __self;})
183
184 /* Magic for libthread_db to know how to do THREAD_SELF. */
185 # define DB_THREAD_SELF_INCLUDE <sys/reg.h> /* For the FS constant. */
186 # define DB_THREAD_SELF CONST_THREAD_AREA (64, FS)
187
188 /* Read member of the thread descriptor directly. */
189 # define THREAD_GETMEM(descr, member) \
190 ({ __typeof (descr->member) __value; \
191 if (sizeof (__value) == 1) \
192 asm volatile ("movb %%fs:%P2,%b0" \
193 : "=q" (__value) \
194 : "0" (0), "i" (offsetof (struct pthread, member))); \
195 else if (sizeof (__value) == 4) \
196 asm volatile ("movl %%fs:%P1,%0" \
197 : "=r" (__value) \
198 : "i" (offsetof (struct pthread, member))); \
199 else \
200 { \
201 if (sizeof (__value) != 8) \
202 /* There should not be any value with a size other than 1, \
203 4 or 8. */ \
204 abort (); \
205 \
206 asm volatile ("movq %%fs:%P1,%q0" \
207 : "=r" (__value) \
208 : "i" (offsetof (struct pthread, member))); \
209 } \
210 __value; })
211
212
213 /* Same as THREAD_GETMEM, but the member offset can be non-constant. */
214 # define THREAD_GETMEM_NC(descr, member, idx) \
215 ({ __typeof (descr->member[0]) __value; \
216 if (sizeof (__value) == 1) \
217 asm volatile ("movb %%fs:%P2(%q3),%b0" \
218 : "=q" (__value) \
219 : "0" (0), "i" (offsetof (struct pthread, member[0])), \
220 "r" (idx)); \
221 else if (sizeof (__value) == 4) \
222 asm volatile ("movl %%fs:%P1(,%q2,4),%0" \
223 : "=r" (__value) \
224 : "i" (offsetof (struct pthread, member[0])), "r" (idx));\
225 else \
226 { \
227 if (sizeof (__value) != 8) \
228 /* There should not be any value with a size other than 1, \
229 4 or 8. */ \
230 abort (); \
231 \
232 asm volatile ("movq %%fs:%P1(,%q2,8),%q0" \
233 : "=r" (__value) \
234 : "i" (offsetof (struct pthread, member[0])), \
235 "r" (idx)); \
236 } \
237 __value; })
238
239
240 /* Loading addresses of objects on x86-64 needs to be treated special
241 when generating PIC code. */
242 #ifdef __pic__
243 # define IMM_MODE "nr"
244 #else
245 # define IMM_MODE "ir"
246 #endif
247
248
249 /* Same as THREAD_SETMEM, but the member offset can be non-constant. */
250 # define THREAD_SETMEM(descr, member, value) \
251 ({ if (sizeof (descr->member) == 1) \
252 asm volatile ("movb %b0,%%fs:%P1" : \
253 : "iq" (value), \
254 "i" (offsetof (struct pthread, member))); \
255 else if (sizeof (descr->member) == 4) \
256 asm volatile ("movl %0,%%fs:%P1" : \
257 : IMM_MODE (value), \
258 "i" (offsetof (struct pthread, member))); \
259 else \
260 { \
261 if (sizeof (descr->member) != 8) \
262 /* There should not be any value with a size other than 1, \
263 4 or 8. */ \
264 abort (); \
265 \
266 asm volatile ("movq %q0,%%fs:%P1" : \
267 : IMM_MODE ((uint64_t) cast_to_integer (value)), \
268 "i" (offsetof (struct pthread, member))); \
269 }})
270
271
272 /* Set member of the thread descriptor directly. */
273 # define THREAD_SETMEM_NC(descr, member, idx, value) \
274 ({ if (sizeof (descr->member[0]) == 1) \
275 asm volatile ("movb %b0,%%fs:%P1(%q2)" : \
276 : "iq" (value), \
277 "i" (offsetof (struct pthread, member[0])), \
278 "r" (idx)); \
279 else if (sizeof (descr->member[0]) == 4) \
280 asm volatile ("movl %0,%%fs:%P1(,%q2,4)" : \
281 : IMM_MODE (value), \
282 "i" (offsetof (struct pthread, member[0])), \
283 "r" (idx)); \
284 else \
285 { \
286 if (sizeof (descr->member[0]) != 8) \
287 /* There should not be any value with a size other than 1, \
288 4 or 8. */ \
289 abort (); \
290 \
291 asm volatile ("movq %q0,%%fs:%P1(,%q2,8)" : \
292 : IMM_MODE ((uint64_t) cast_to_integer (value)), \
293 "i" (offsetof (struct pthread, member[0])), \
294 "r" (idx)); \
295 }})
296
297
298 /* Atomic compare and exchange on TLS, returning old value. */
299 # define THREAD_ATOMIC_CMPXCHG_VAL(descr, member, newval, oldval) \
300 ({ __typeof (descr->member) __ret; \
301 __typeof (oldval) __old = (oldval); \
302 if (sizeof (descr->member) == 4) \
303 asm volatile (LOCK_PREFIX "cmpxchgl %2, %%fs:%P3" \
304 : "=a" (__ret) \
305 : "0" (__old), "r" (newval), \
306 "i" (offsetof (struct pthread, member))); \
307 else \
308 /* Not necessary for other sizes in the moment. */ \
309 abort (); \
310 __ret; })
311
312
313 /* Atomic logical and. */
314 # define THREAD_ATOMIC_AND(descr, member, val) \
315 (void) ({ if (sizeof ((descr)->member) == 4) \
316 asm volatile (LOCK_PREFIX "andl %1, %%fs:%P0" \
317 :: "i" (offsetof (struct pthread, member)), \
318 "ir" (val)); \
319 else \
320 /* Not necessary for other sizes in the moment. */ \
321 abort (); })
322
323
324 /* Atomic set bit. */
325 # define THREAD_ATOMIC_BIT_SET(descr, member, bit) \
326 (void) ({ if (sizeof ((descr)->member) == 4) \
327 asm volatile (LOCK_PREFIX "orl %1, %%fs:%P0" \
328 :: "i" (offsetof (struct pthread, member)), \
329 "ir" (1 << (bit))); \
330 else \
331 /* Not necessary for other sizes in the moment. */ \
332 abort (); })
333
334
335 # define CALL_THREAD_FCT(descr) \
336 ({ void *__res; \
337 asm volatile ("movq %%fs:%P2, %%rdi\n\t" \
338 "callq *%%fs:%P1" \
339 : "=a" (__res) \
340 : "i" (offsetof (struct pthread, start_routine)), \
341 "i" (offsetof (struct pthread, arg)) \
342 : "di", "si", "cx", "dx", "r8", "r9", "r10", "r11", \
343 "memory", "cc"); \
344 __res; })
345
346
347 /* Set the stack guard field in TCB head. */
348 # define THREAD_SET_STACK_GUARD(value) \
349 THREAD_SETMEM (THREAD_SELF, header.stack_guard, value)
350 # define THREAD_COPY_STACK_GUARD(descr) \
351 ((descr)->header.stack_guard \
352 = THREAD_GETMEM (THREAD_SELF, header.stack_guard))
353
354
355 /* Set the pointer guard field in the TCB head. */
356 # define THREAD_SET_POINTER_GUARD(value) \
357 THREAD_SETMEM (THREAD_SELF, header.pointer_guard, value)
358 # define THREAD_COPY_POINTER_GUARD(descr) \
359 ((descr)->header.pointer_guard \
360 = THREAD_GETMEM (THREAD_SELF, header.pointer_guard))
361
362
363 /* Get and set the global scope generation counter in the TCB head. */
364 # define THREAD_GSCOPE_FLAG_UNUSED 0
365 # define THREAD_GSCOPE_FLAG_USED 1
366 # define THREAD_GSCOPE_FLAG_WAIT 2
367 # define THREAD_GSCOPE_RESET_FLAG() \
368 do \
369 { int __res; \
370 asm volatile ("xchgl %0, %%fs:%P1" \
371 : "=r" (__res) \
372 : "i" (offsetof (struct pthread, header.gscope_flag)), \
373 "0" (THREAD_GSCOPE_FLAG_UNUSED)); \
374 if (__res == THREAD_GSCOPE_FLAG_WAIT) \
375 lll_futex_wake (&THREAD_SELF->header.gscope_flag, 1, LLL_PRIVATE); \
376 } \
377 while (0)
378 # define THREAD_GSCOPE_SET_FLAG() \
379 THREAD_SETMEM (THREAD_SELF, header.gscope_flag, THREAD_GSCOPE_FLAG_USED)
380 # define THREAD_GSCOPE_WAIT() \
381 GL(dl_wait_lookup_done) ()
382
383
384 # ifdef SHARED
385 /* Defined in dl-trampoline.S. */
386 extern void _dl_x86_64_save_sse (void);
387 extern void _dl_x86_64_restore_sse (void);
388
389 # define RTLD_CHECK_FOREIGN_CALL \
390 (THREAD_GETMEM (THREAD_SELF, header.rtld_must_xmm_save) != 0)
391
392 /* NB: Don't use the xchg operation because that would imply a lock
393 prefix which is expensive and unnecessary. The cache line is also
394 not contested at all. */
395 # define RTLD_ENABLE_FOREIGN_CALL \
396 int old_rtld_must_xmm_save = THREAD_GETMEM (THREAD_SELF, \
397 header.rtld_must_xmm_save); \
398 THREAD_SETMEM (THREAD_SELF, header.rtld_must_xmm_save, 1)
399
400 # define RTLD_PREPARE_FOREIGN_CALL \
401 do if (THREAD_GETMEM (THREAD_SELF, header.rtld_must_xmm_save)) \
402 { \
403 _dl_x86_64_save_sse (); \
404 THREAD_SETMEM (THREAD_SELF, header.rtld_must_xmm_save, 0); \
405 } \
406 while (0)
407
408 # define RTLD_FINALIZE_FOREIGN_CALL \
409 do { \
410 if (THREAD_GETMEM (THREAD_SELF, header.rtld_must_xmm_save) == 0) \
411 _dl_x86_64_restore_sse (); \
412 THREAD_SETMEM (THREAD_SELF, header.rtld_must_xmm_save, \
413 old_rtld_must_xmm_save); \
414 } while (0)
415 # endif
416
417
418 #endif /* __ASSEMBLER__ */
419
420 #endif /* tls.h */