+2015-10-17 Florian Weimer <fweimer@redhat.com>
+
+ malloc: Rewrite with explicit TLS access using __thread.
+ * sysdeps/generic/malloc-machine.h (tsd_key_t, tsd_key_create)
+ (tsd_setspecific, tsd_getspecific): Remove.
+ * sysdeps/mach/hurd/malloc-machine.h (tsd_key_t, tsd_key_create)
+ (tsd_setspecific, tsd_getspecific): Likewise.
+ * sysdeps/nptl/malloc-machine.h (tsd_key_t, tsd_key_create)
+ (tsd_setspecific, tsd_getspecific): Likewise.
+ * malloc/arena.c (thread_arena): New TLS variable.
+ (arena_key): Remove variable.
+ (arena_get): Use thread_arena.
+ (arena_lookup): Remove macro.
+ (malloc_atfork, free_atfork, ptmalloc_lock_all)
+ (ptmalloc_unlock_all, ptmalloc_unlock_all2, ptmalloc_init)
+ (_int_new_arena, get_free_list, reused_arena)
+ (arena_thread_freeres): Use thread_arena.
+ * manual/memory.texi (Basic Allocation): Remove arena_lookup,
+ tsd_getspecific, tsd_setspecific from safety annotations.
+ (Allocating Cleared Space): Remove arena_lookup from safety
+ annotations.
+
2015-10-17 Florian Weimer <fweimer@redhat.com>
* stdio-common/vfprintf.c (printf_positional): Rewrite to use
+ 2 * SIZE_SZ) % MALLOC_ALIGNMENT
? -1 : 1];
-/* Thread specific data */
+/* Thread specific data. */
+
+static __thread mstate thread_arena attribute_tls_model_ie;
+
+/* Arena free list. */
-static tsd_key_t arena_key;
static mutex_t list_lock = MUTEX_INITIALIZER;
static size_t narenas = 1;
static mstate free_list;
in the new arena. */
#define arena_get(ptr, size) do { \
- arena_lookup (ptr); \
+ ptr = thread_arena; \
arena_lock (ptr, size); \
} while (0)
-#define arena_lookup(ptr) do { \
- void *vptr = NULL; \
- ptr = (mstate) tsd_getspecific (arena_key, vptr); \
- } while (0)
-
#define arena_lock(ptr, size) do { \
if (ptr && !arena_is_corrupt (ptr)) \
(void) mutex_lock (&ptr->mutex); \
static void *
malloc_atfork (size_t sz, const void *caller)
{
- void *vptr = NULL;
void *victim;
- tsd_getspecific (arena_key, vptr);
- if (vptr == ATFORK_ARENA_PTR)
+ if (thread_arena == ATFORK_ARENA_PTR)
{
/* We are the only thread that may allocate at all. */
if (save_malloc_hook != malloc_check)
static void
free_atfork (void *mem, const void *caller)
{
- void *vptr = NULL;
mstate ar_ptr;
mchunkptr p; /* chunk corresponding to mem */
}
ar_ptr = arena_for_chunk (p);
- tsd_getspecific (arena_key, vptr);
- _int_free (ar_ptr, p, vptr == ATFORK_ARENA_PTR);
+ _int_free (ar_ptr, p, thread_arena == ATFORK_ARENA_PTR);
}
if (mutex_trylock (&list_lock))
{
- void *my_arena;
- tsd_getspecific (arena_key, my_arena);
- if (my_arena == ATFORK_ARENA_PTR)
+ if (thread_arena == ATFORK_ARENA_PTR)
/* This is the same thread which already locks the global list.
Just bump the counter. */
goto out;
__malloc_hook = malloc_atfork;
__free_hook = free_atfork;
/* Only the current thread may perform malloc/free calls now. */
- tsd_getspecific (arena_key, save_arena);
- tsd_setspecific (arena_key, ATFORK_ARENA_PTR);
+ save_arena = thread_arena;
+ thread_arena = ATFORK_ARENA_PTR;
out:
++atfork_recursive_cntr;
}
if (--atfork_recursive_cntr != 0)
return;
- tsd_setspecific (arena_key, save_arena);
+ thread_arena = save_arena;
__malloc_hook = save_malloc_hook;
__free_hook = save_free_hook;
for (ar_ptr = &main_arena;; )
if (__malloc_initialized < 1)
return;
- tsd_setspecific (arena_key, save_arena);
+ thread_arena = save_arena;
__malloc_hook = save_malloc_hook;
__free_hook = save_free_hook;
free_list = NULL;
__morecore = __failing_morecore;
#endif
- tsd_key_create (&arena_key, NULL);
- tsd_setspecific (arena_key, (void *) &main_arena);
+ thread_arena = &main_arena;
thread_atfork (ptmalloc_lock_all, ptmalloc_unlock_all, ptmalloc_unlock_all2);
const char *s = NULL;
if (__glibc_likely (_environ != NULL))
set_head (top (a), (((char *) h + h->size) - ptr) | PREV_INUSE);
LIBC_PROBE (memory_arena_new, 2, a, size);
- tsd_setspecific (arena_key, (void *) a);
+ thread_arena = a;
mutex_init (&a->mutex);
(void) mutex_lock (&a->mutex);
{
LIBC_PROBE (memory_arena_reuse_free_list, 1, result);
(void) mutex_lock (&result->mutex);
- tsd_setspecific (arena_key, (void *) result);
+ thread_arena = result;
}
}
out:
LIBC_PROBE (memory_arena_reuse, 2, result, avoid_arena);
- tsd_setspecific (arena_key, (void *) result);
+ thread_arena = result;
next_to_use = result->next;
return result;
static void __attribute__ ((section ("__libc_thread_freeres_fn")))
arena_thread_freeres (void)
{
- void *vptr = NULL;
- mstate a = tsd_getspecific (arena_key, vptr);
- tsd_setspecific (arena_key, NULL);
+ mstate a = thread_arena;
+ thread_arena = NULL;
if (a != NULL)
{
@c __libc_malloc @asulock @aculock @acsfd @acsmem
@c force_reg ok
@c *malloc_hook unguarded
-@c arena_lookup ok
-@c tsd_getspecific ok, TLS
@c arena_lock @asulock @aculock @acsfd @acsmem
@c mutex_lock @asulock @aculock
@c arena_get2 @asulock @aculock @acsfd @acsmem
@c mutex_lock (list_lock) dup @asulock @aculock
@c mutex_unlock (list_lock) dup @aculock
@c mutex_lock (arena lock) dup @asulock @aculock [returns locked]
-@c tsd_setspecific ok, TLS
@c __get_nprocs ext ok @acsfd
@c NARENAS_FROM_NCORES ok
@c catomic_compare_and_exchange_bool_acq ok
@c *__malloc_hook dup unguarded
@c memset dup ok
@c arena_get @asulock @aculock @acsfd @acsmem
-@c arena_lookup dup ok
@c arena_lock dup @asulock @aculock @acsfd @acsmem
@c top dup ok
@c chunksize dup ok