From: Wilco Dijkstra Date: Thu, 1 May 2025 19:58:38 +0000 (+0000) Subject: malloc: Improve malloc initialization X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=25d37948c9f3;p=thirdparty%2Fglibc.git malloc: Improve malloc initialization Move malloc initialization to __libc_early_init. Use a hidden __ptmalloc_init for initialization and a weak call to avoid pulling in the system malloc in a static binary. All previous initialization checks can now be removed. Reviewed-by: Florian Weimer --- diff --git a/elf/libc_early_init.c b/elf/libc_early_init.c index 0720231753..24b99d82fe 100644 --- a/elf/libc_early_init.c +++ b/elf/libc_early_init.c @@ -24,6 +24,7 @@ #include #include #include +#include #ifdef SHARED _Bool __libc_initial; @@ -32,6 +33,9 @@ _Bool __libc_initial; void __libc_early_init (_Bool initial) { + /* Initialize system malloc. */ + call_function_static_weak (__ptmalloc_init); + /* Initialize ctype data. */ __ctype_init (); diff --git a/malloc/arena.c b/malloc/arena.c index 5672c699aa..90c526f23b 100644 --- a/malloc/arena.c +++ b/malloc/arena.c @@ -113,9 +113,6 @@ static mstate free_list; acquired. */ __libc_lock_define_initialized (static, list_lock); -/* Already initialized? */ -static bool __malloc_initialized = false; - /**************************************************************************/ @@ -168,9 +165,6 @@ arena_for_chunk (mchunkptr ptr) void __malloc_fork_lock_parent (void) { - if (!__malloc_initialized) - return; - /* We do not acquire free_list_lock here because we completely reconstruct free_list in __malloc_fork_unlock_child. */ @@ -188,9 +182,6 @@ __malloc_fork_lock_parent (void) void __malloc_fork_unlock_parent (void) { - if (!__malloc_initialized) - return; - for (mstate ar_ptr = &main_arena;; ) { __libc_lock_unlock (ar_ptr->mutex); @@ -204,9 +195,6 @@ __malloc_fork_unlock_parent (void) void __malloc_fork_unlock_child (void) { - if (!__malloc_initialized) - return; - /* Push all arenas to the free list, except thread_arena, which is attached to the current thread. */ __libc_lock_init (free_list_lock); @@ -259,14 +247,9 @@ TUNABLE_CALLBACK_FNDECL (set_hugetlb, size_t) static void tcache_key_initialize (void); #endif -static void -ptmalloc_init (void) +void +__ptmalloc_init (void) { - if (__malloc_initialized) - return; - - __malloc_initialized = true; - #if USE_TCACHE tcache_key_initialize (); #endif diff --git a/malloc/malloc-check.c b/malloc/malloc-check.c index c5265ecb91..fbb030116c 100644 --- a/malloc/malloc-check.c +++ b/malloc/malloc-check.c @@ -389,7 +389,7 @@ initialize_malloc_check (void) { /* This is the copy of the malloc initializer that we pulled in along with malloc-check. This does not affect any of the libc malloc structures. */ - ptmalloc_init (); + __ptmalloc_init (); TUNABLE_GET (check, int32_t, TUNABLE_CALLBACK (set_mallopt_check)); return __is_malloc_debug_enabled (MALLOC_CHECK_HOOK); } diff --git a/malloc/malloc-internal.h b/malloc/malloc-internal.h index d88ed20c46..0f1b3a1d5d 100644 --- a/malloc/malloc-internal.h +++ b/malloc/malloc-internal.h @@ -40,4 +40,7 @@ void __malloc_arena_thread_freeres (void) attribute_hidden; /* Activate a standard set of debugging hooks. */ void __malloc_check_init (void) attribute_hidden; +/* Initialize malloc. */ +void __ptmalloc_init (void) attribute_hidden; + #endif /* _MALLOC_INTERNAL_H */ diff --git a/malloc/malloc.c b/malloc/malloc.c index 9f44f5ab07..afb74d0665 100644 --- a/malloc/malloc.c +++ b/malloc/malloc.c @@ -1937,7 +1937,7 @@ static struct malloc_par mp_ = /* Initialize a malloc_state struct. - This is called from ptmalloc_init () or from _int_new_arena () + This is called from __ptmalloc_init () or from _int_new_arena () when creating a new arena. */ @@ -3347,9 +3347,6 @@ __libc_malloc2 (size_t bytes) mstate ar_ptr; void *victim; - if (!__malloc_initialized) - ptmalloc_init (); - MAYBE_INIT_TCACHE (); if (SINGLE_THREAD_P) @@ -3455,9 +3452,6 @@ __libc_realloc (void *oldmem, size_t bytes) void *newp; /* chunk to return */ - if (!__malloc_initialized) - ptmalloc_init (); - #if REALLOC_ZERO_BYTES_FREES if (bytes == 0 && oldmem != NULL) { @@ -3583,9 +3577,6 @@ libc_hidden_def (__libc_realloc) void * __libc_memalign (size_t alignment, size_t bytes) { - if (!__malloc_initialized) - ptmalloc_init (); - void *address = RETURN_ADDRESS (0); return _mid_memalign (alignment, bytes, address); } @@ -3596,9 +3587,6 @@ void * weak_function aligned_alloc (size_t alignment, size_t bytes) { - if (!__malloc_initialized) - ptmalloc_init (); - /* Similar to memalign, but starting with ISO C17 the standard requires an error for alignments that are not supported by the implementation. Valid alignments for the current implementation @@ -3698,9 +3686,6 @@ _mid_memalign (size_t alignment, size_t bytes, void *address) void * __libc_valloc (size_t bytes) { - if (!__malloc_initialized) - ptmalloc_init (); - void *address = RETURN_ADDRESS (0); size_t pagesize = GLRO (dl_pagesize); return _mid_memalign (pagesize, bytes, address); @@ -3709,9 +3694,6 @@ __libc_valloc (size_t bytes) void * __libc_pvalloc (size_t bytes) { - if (!__malloc_initialized) - ptmalloc_init (); - void *address = RETURN_ADDRESS (0); size_t pagesize = GLRO (dl_pagesize); size_t rounded_bytes; @@ -3746,9 +3728,6 @@ __libc_calloc (size_t n, size_t elem_size) sz = bytes; - if (!__malloc_initialized) - ptmalloc_init (); - #if USE_TCACHE size_t tc_idx = usize2tidx (bytes); if (tcache_available (tc_idx)) @@ -5211,9 +5190,6 @@ __malloc_trim (size_t s) { int result = 0; - if (!__malloc_initialized) - ptmalloc_init (); - mstate ar_ptr = &main_arena; do { @@ -5330,9 +5306,6 @@ __libc_mallinfo2 (void) struct mallinfo2 m; mstate ar_ptr; - if (!__malloc_initialized) - ptmalloc_init (); - memset (&m, 0, sizeof (m)); ar_ptr = &main_arena; do @@ -5381,8 +5354,6 @@ __malloc_stats (void) mstate ar_ptr; unsigned int in_use_b = mp_.mmapped_mem, system_b = in_use_b; - if (!__malloc_initialized) - ptmalloc_init (); _IO_flockfile (stderr); int old_flags2 = stderr->_flags2; stderr->_flags2 |= _IO_FLAGS2_NOTCANCEL; @@ -5563,8 +5534,6 @@ __libc_mallopt (int param_number, int value) mstate av = &main_arena; int res = 1; - if (!__malloc_initialized) - ptmalloc_init (); __libc_lock_lock (av->mutex); LIBC_PROBE (memory_mallopt, 2, param_number, value); @@ -5780,11 +5749,14 @@ malloc_printerr (const char *str) } #if USE_TCACHE + +static volatile int dummy_var; + static __attribute_noinline__ void malloc_printerr_tail (const char *str) { /* Ensure this cannot be a no-return function. */ - if (!__malloc_initialized) + if (dummy_var) return; malloc_printerr (str); } @@ -5797,9 +5769,6 @@ __posix_memalign (void **memptr, size_t alignment, size_t size) { void *mem; - if (!__malloc_initialized) - ptmalloc_init (); - /* Test whether the SIZE argument is valid. It must be a power of two multiple of sizeof (void *). */ if (alignment % sizeof (void *) != 0 @@ -5840,11 +5809,6 @@ __malloc_info (int options, FILE *fp) size_t total_aspace = 0; size_t total_aspace_mprotect = 0; - - - if (!__malloc_initialized) - ptmalloc_init (); - fputs ("\n", fp); /* Iterate over all arenas currently in use. */