1 diff -rup a/malloc/arena.c b/malloc/arena.c
2 --- a/malloc/arena.c 2012-03-02 10:22:47.025002715 -0700
3 +++ b/malloc/arena.c 2012-03-02 10:27:47.442361529 -0700
4 @@ -123,14 +123,14 @@ int __malloc_initialized = -1;
6 (void)mutex_lock(&ptr->mutex); \
8 - ptr = arena_get2(ptr, (size)); \
9 + ptr = arena_get2(ptr, (size), false); \
12 #define arena_lock(ptr, size) do { \
13 if(ptr && !mutex_trylock(&ptr->mutex)) { \
14 THREAD_STAT(++(ptr->stat_lock_direct)); \
16 - ptr = arena_get2(ptr, (size)); \
17 + ptr = arena_get2(ptr, (size), false); \
21 @@ -982,7 +982,7 @@ get_free_list (void)
26 +reused_arena (bool retrying)
29 static mstate next_to_use;
30 @@ -999,6 +999,15 @@ reused_arena (void)
32 while (result != next_to_use);
34 + /* If we are retrying due to a failure to allocate in the main
35 + arena, don't wait for the main arena to become available, select
38 + To really fix this right we would have to try the allocation
39 + in every other arena, but that seems like severe overkill. */
40 + if (retrying && result == &main_arena)
41 + result = result->next;
43 /* No arena available. Wait for the next in line. */
44 (void)mutex_lock(&result->mutex);
46 @@ -1014,9 +1023,9 @@ reused_arena (void)
50 -arena_get2(mstate a_tsd, size_t size)
51 +arena_get2(mstate a_tsd, size_t size, bool retrying)
53 -arena_get2(a_tsd, size) mstate a_tsd; size_t size;
54 +arena_get2(a_tsd, size, retrying) mstate a_tsd; size_t size; bool retrying
58 @@ -1055,7 +1064,7 @@ arena_get2(a_tsd, size) mstate a_tsd; si
59 catomic_decrement (&narenas);
62 - a = reused_arena ();
63 + a = reused_arena (retrying);
67 diff -rup a/malloc/malloc.c b/malloc/malloc.c
68 --- a/malloc/malloc.c 2012-03-02 10:22:47.061002519 -0700
69 +++ b/malloc/malloc.c 2012-03-02 10:23:53.151643863 -0700
70 @@ -3671,7 +3671,7 @@ public_mALLOc(size_t bytes)
71 /* ... or sbrk() has failed and there is still a chance to mmap() */
72 mstate prev = ar_ptr->next ? ar_ptr : 0;
73 (void)mutex_unlock(&ar_ptr->mutex);
74 - ar_ptr = arena_get2(prev, bytes);
75 + ar_ptr = arena_get2(prev, bytes, true);
77 victim = _int_malloc(ar_ptr, bytes);
78 (void)mutex_unlock(&ar_ptr->mutex);
79 @@ -3892,7 +3892,7 @@ public_mEMALIGn(size_t alignment, size_t
80 /* ... or sbrk() has failed and there is still a chance to mmap() */
81 mstate prev = ar_ptr->next ? ar_ptr : 0;
82 (void)mutex_unlock(&ar_ptr->mutex);
83 - ar_ptr = arena_get2(prev, bytes);
84 + ar_ptr = arena_get2(prev, bytes, true);
86 p = _int_memalign(ar_ptr, alignment, bytes);
87 (void)mutex_unlock(&ar_ptr->mutex);
88 @@ -3943,7 +3943,7 @@ public_vALLOc(size_t bytes)
89 /* ... or sbrk() has failed and there is still a chance to mmap() */
90 mstate prev = ar_ptr->next ? ar_ptr : 0;
91 (void)mutex_unlock(&ar_ptr->mutex);
92 - ar_ptr = arena_get2(prev, bytes);
93 + ar_ptr = arena_get2(prev, bytes, true);
95 p = _int_memalign(ar_ptr, pagesz, bytes);
96 (void)mutex_unlock(&ar_ptr->mutex);
97 @@ -3992,7 +3992,7 @@ public_pVALLOc(size_t bytes)
98 /* ... or sbrk() has failed and there is still a chance to mmap() */
99 mstate prev = ar_ptr->next ? ar_ptr : 0;
100 (void)mutex_unlock(&ar_ptr->mutex);
101 - ar_ptr = arena_get2(prev, bytes + 2*pagesz + MINSIZE);
102 + ar_ptr = arena_get2(prev, bytes + 2*pagesz + MINSIZE, true);
104 p = _int_memalign(ar_ptr, pagesz, rounded_bytes);
105 (void)mutex_unlock(&ar_ptr->mutex);
106 @@ -4086,7 +4086,7 @@ public_cALLOc(size_t n, size_t elem_size
107 /* ... or sbrk() has failed and there is still a chance to mmap() */
108 mstate prev = av->next ? av : 0;
109 (void)mutex_unlock(&av->mutex);
110 - av = arena_get2(prev, sz);
111 + av = arena_get2(prev, sz, true);
113 mem = _int_malloc(av, sz);
114 (void)mutex_unlock(&av->mutex);