/* If we've already used the variable with dynamic access, or if the
alignment requirements are too high, fail. */
if (map->l_tls_offset == FORCED_DYNAMIC_TLS_OFFSET
- || map->l_tls_align > GL(dl_tls_static_align))
+ || map->l_tls_align > GLRO (dl_tls_static_align))
{
fail:
return -1;
}
#if TLS_TCB_AT_TP
- size_t freebytes = GL(dl_tls_static_size) - GL(dl_tls_static_used);
+ size_t freebytes = GLRO (dl_tls_static_size) - GL(dl_tls_static_used);
if (freebytes < TLS_TCB_SIZE)
goto fail;
freebytes -= TLS_TCB_SIZE;
+ map->l_tls_firstbyte_offset);
size_t used = offset + map->l_tls_blocksize;
- if (used > GL(dl_tls_static_size))
+ if (used > GLRO (dl_tls_static_size))
goto fail;
/* Account optional static TLS surplus usage. */
}
GL(dl_tls_static_used) = offset;
- GL(dl_tls_static_size) = (roundup (offset + GLRO(dl_tls_static_surplus),
- max_align)
- + TLS_TCB_SIZE);
+ GLRO (dl_tls_static_size) = (roundup (offset + GLRO(dl_tls_static_surplus),
+ max_align)
+ + TLS_TCB_SIZE);
#elif TLS_DTV_AT_TP
/* The TLS blocks start right after the TCB. */
size_t offset = TLS_TCB_SIZE;
}
GL(dl_tls_static_used) = offset;
- GL(dl_tls_static_size) = roundup (offset + GLRO(dl_tls_static_surplus),
- TLS_TCB_ALIGN);
+ GLRO (dl_tls_static_size) = roundup (offset + GLRO(dl_tls_static_surplus),
+ TLS_TCB_ALIGN);
#else
# error "Either TLS_TCB_AT_TP or TLS_DTV_AT_TP must be defined"
#endif
/* The alignment requirement for the static TLS block. */
- GL(dl_tls_static_align) = max_align;
+ GLRO (dl_tls_static_align) = max_align;
}
#endif /* SHARED */
void
_dl_get_tls_static_info (size_t *sizep, size_t *alignp)
{
- *sizep = GL(dl_tls_static_size);
- *alignp = GL(dl_tls_static_align);
+ *sizep = GLRO (dl_tls_static_size);
+ *alignp = GLRO (dl_tls_static_align);
}
/* Derive the location of the pointer to the start of the original
_dl_allocate_tls_storage (void)
{
void *result;
- size_t size = GL(dl_tls_static_size);
+ size_t size = GLRO (dl_tls_static_size);
#if TLS_DTV_AT_TP
/* Memory layout is:
/* Perform the allocation. Reserve space for the required alignment
and the pointer to the original allocation. */
- size_t alignment = GL(dl_tls_static_align);
+ size_t alignment = GLRO (dl_tls_static_align);
void *allocated = malloc (size + alignment + sizeof (void *));
if (__glibc_unlikely (allocated == NULL))
return NULL;
#if TLS_TCB_AT_TP
/* The TCB follows the TLS blocks, which determine the alignment.
(TCB alignment requirements have been taken into account when
- calculating GL(dl_tls_static_align).) */
+ calculating GLRO (dl_tls_static_align).) */
void *aligned = (void *) roundup ((uintptr_t) allocated, alignment);
result = aligned + size - TLS_TCB_SIZE;
dl->_dl_hwcap2 = _dl_hwcap2;
extern __typeof (dl->_dl_pagesize) _dl_pagesize attribute_hidden;
dl->_dl_pagesize = _dl_pagesize;
+ extern __typeof (dl->_dl_tls_static_align) _dl_tls_static_align
+ attribute_hidden;
+ dl->_dl_tls_static_align = _dl_tls_static_align;
+ extern __typeof (dl->_dl_tls_static_size) _dl_tls_static_size
+ attribute_hidden;
+ dl->_dl_tls_static_size = _dl_tls_static_size;
__rtld_static_init_arch (map, dl);
}
} *_dl_tls_dtv_slotinfo_list;
/* Number of modules in the static TLS block. */
EXTERN size_t _dl_tls_static_nelem;
- /* Size of the static TLS block. */
- EXTERN size_t _dl_tls_static_size;
/* Size actually allocated in the static TLS block. */
EXTERN size_t _dl_tls_static_used;
- /* Alignment requirement of the static TLS block. */
- EXTERN size_t _dl_tls_static_align;
/* Remaining amount of static TLS that may be used for optimizing
dynamic TLS access (e.g. with TLSDESC). */
EXTERN size_t _dl_tls_static_optional;
binaries, don't honor for PIEs). */
EXTERN ElfW(Addr) _dl_use_load_bias;
+ /* Size of the static TLS block. */
+ EXTERN size_t _dl_tls_static_size;
+
+ /* Alignment requirement of the static TLS block. */
+ EXTERN size_t _dl_tls_static_align;
+
/* Size of surplus space in the static TLS area for dynamically
loaded modules with IE-model TLS or for TLSDESC optimization.
See comments in elf/dl-tls.c where it is initialized. */