/* Relocate a shared object and resolve its references to other loaded objects.
- Copyright (C) 1995-2020 Free Software Foundation, Inc.
+ Copyright (C) 1995-2023 Free Software Foundation, Inc.
+ Copyright The GNU Toolchain Authors.
This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or
/* If we've already used the variable with dynamic access, or if the
alignment requirements are too high, fail. */
if (map->l_tls_offset == FORCED_DYNAMIC_TLS_OFFSET
- || map->l_tls_align > GL(dl_tls_static_align))
+ || map->l_tls_align > GLRO (dl_tls_static_align))
{
fail:
return -1;
}
#if TLS_TCB_AT_TP
- size_t freebytes = GL(dl_tls_static_size) - GL(dl_tls_static_used);
+ size_t freebytes = GLRO (dl_tls_static_size) - GL(dl_tls_static_used);
if (freebytes < TLS_TCB_SIZE)
goto fail;
freebytes -= TLS_TCB_SIZE;
+ map->l_tls_firstbyte_offset);
size_t used = offset + map->l_tls_blocksize;
- if (used > GL(dl_tls_static_size))
+ if (used > GLRO (dl_tls_static_size))
goto fail;
/* Account optional static TLS surplus usage. */
(void) _dl_update_slotinfo (map->l_tls_modid);
#endif
- GL(dl_init_static_tls) (map);
+ dl_init_static_tls (map);
}
else
map->l_need_tls_init = 1;
}
}
+#if !PTHREAD_IN_LIBC
/* Initialize static TLS area and DTV for current (only) thread.
libpthread implementations should provide their own hook
to handle all threads. */
memset (__mempcpy (dest, map->l_tls_initimage, map->l_tls_initimage_size),
'\0', map->l_tls_blocksize - map->l_tls_initimage_size);
}
+#endif /* !PTHREAD_IN_LIBC */
+static __always_inline lookup_t
+resolve_map (lookup_t l, struct r_scope_elem *scope[], const ElfW(Sym) **ref,
+ const struct r_found_version *version, unsigned long int r_type)
+{
+ if (ELFW(ST_BIND) ((*ref)->st_info) == STB_LOCAL
+ || __glibc_unlikely (dl_symbol_visibility_binds_local_p (*ref)))
+ return l;
+
+ if (__glibc_unlikely (*ref == l->l_lookup_cache.sym)
+ && elf_machine_type_class (r_type) == l->l_lookup_cache.type_class)
+ {
+ bump_num_cache_relocations ();
+ *ref = l->l_lookup_cache.ret;
+ }
+ else
+ {
+ const int tc = elf_machine_type_class (r_type);
+ l->l_lookup_cache.type_class = tc;
+ l->l_lookup_cache.sym = *ref;
+ const char *undef_name
+ = (const char *) D_PTR (l, l_info[DT_STRTAB]) + (*ref)->st_name;
+ const struct r_found_version *v = NULL;
+ if (version != NULL && version->hash != 0)
+ v = version;
+ lookup_t lr = _dl_lookup_symbol_x (
+ undef_name, l, ref, scope, v, tc,
+ DL_LOOKUP_ADD_DEPENDENCY | DL_LOOKUP_FOR_RELOCATE, NULL);
+ l->l_lookup_cache.ret = *ref;
+ l->l_lookup_cache.value = lr;
+ }
+ return l->l_lookup_cache.value;
+}
+
+/* This macro is used as a callback from the ELF_DYNAMIC_RELOCATE code. */
+#define RESOLVE_MAP resolve_map
+
+#include "dynamic-link.h"
void
_dl_relocate_object (struct link_map *l, struct r_scope_elem *scope[],
int skip_ifunc = reloc_mode & __RTLD_NOIFUNC;
#ifdef SHARED
+ bool consider_symbind = false;
/* If we are auditing, install the same handlers we need for profiling. */
if ((reloc_mode & __RTLD_AUDIT) == 0)
- consider_profiling |= GLRO(dl_audit) != NULL;
+ {
+ struct audit_ifaces *afct = GLRO(dl_audit);
+ for (unsigned int cnt = 0; cnt < GLRO(dl_naudit); ++cnt)
+ {
+ /* Profiling is needed only if PLT hooks are provided. */
+ if (afct->ARCH_LA_PLTENTER != NULL
+ || afct->ARCH_LA_PLTEXIT != NULL)
+ consider_profiling = 1;
+ if (afct->symbind != NULL)
+ consider_symbind = true;
+
+ afct = afct->next;
+ }
+ }
#elif defined PROF
/* Never use dynamic linker profiling for gprof profiling code. */
# define consider_profiling 0
+#else
+# define consider_symbind 0
#endif
if (l->l_relocated)
{
/* Do the actual relocation of the object's GOT and other data. */
- /* String table object symbols. */
- const char *strtab = (const void *) D_PTR (l, l_info[DT_STRTAB]);
-
- /* This macro is used as a callback from the ELF_DYNAMIC_RELOCATE code. */
-#define RESOLVE_MAP(ref, version, r_type) \
- ((ELFW(ST_BIND) ((*ref)->st_info) != STB_LOCAL \
- && __glibc_likely (!dl_symbol_visibility_binds_local_p (*ref))) \
- ? ((__builtin_expect ((*ref) == l->l_lookup_cache.sym, 0) \
- && elf_machine_type_class (r_type) == l->l_lookup_cache.type_class) \
- ? (bump_num_cache_relocations (), \
- (*ref) = l->l_lookup_cache.ret, \
- l->l_lookup_cache.value) \
- : ({ lookup_t _lr; \
- int _tc = elf_machine_type_class (r_type); \
- l->l_lookup_cache.type_class = _tc; \
- l->l_lookup_cache.sym = (*ref); \
- const struct r_found_version *v = NULL; \
- if ((version) != NULL && (version)->hash != 0) \
- v = (version); \
- _lr = _dl_lookup_symbol_x (strtab + (*ref)->st_name, l, (ref), \
- scope, v, _tc, \
- DL_LOOKUP_ADD_DEPENDENCY \
- | DL_LOOKUP_FOR_RELOCATE, NULL); \
- l->l_lookup_cache.ret = (*ref); \
- l->l_lookup_cache.value = _lr; })) \
- : l)
-
-#include "dynamic-link.h"
-
- ELF_DYNAMIC_RELOCATE (l, lazy, consider_profiling, skip_ifunc);
+ ELF_DYNAMIC_RELOCATE (l, scope, lazy, consider_profiling, skip_ifunc);
#ifndef PROF
- if (__glibc_unlikely (consider_profiling)
+ if ((consider_profiling || consider_symbind)
&& l->l_info[DT_PLTRELSZ] != NULL)
{
/* Allocate the array which will contain the already found