size = malloc_usable_size(ret) + sizeof(void *);
bin = memprof_get_bin(__builtin_return_address(0), MEMPROF_METH_MALLOC);
+ if (unlikely(th_ctx->lock_level & 0x7F))
+ _HA_ATOMIC_ADD(&bin->locked_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_tot, size);
return ret;
size = malloc_usable_size(ret) + sizeof(void *);
bin = memprof_get_bin(__builtin_return_address(0), MEMPROF_METH_CALLOC);
+ if (unlikely(th_ctx->lock_level & 0x7F))
+ _HA_ATOMIC_ADD(&bin->locked_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_tot, size);
return ret;
size += sizeof(void *);
bin = memprof_get_bin(__builtin_return_address(0), MEMPROF_METH_REALLOC);
+ if (unlikely(th_ctx->lock_level & 0x7F))
+ _HA_ATOMIC_ADD(&bin->locked_calls, 1);
if (size > size_before) {
_HA_ATOMIC_ADD(&bin->alloc_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_tot, size - size_before);
size = malloc_usable_size(ret) + sizeof(void *);
bin = memprof_get_bin(__builtin_return_address(0), MEMPROF_METH_STRDUP);
+ if (unlikely(th_ctx->lock_level & 0x7F))
+ _HA_ATOMIC_ADD(&bin->locked_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_tot, size);
return ret;
memprof_free_handler(ptr);
bin = memprof_get_bin(__builtin_return_address(0), MEMPROF_METH_FREE);
+ if (unlikely(th_ctx->lock_level & 0x7F))
+ _HA_ATOMIC_ADD(&bin->locked_calls, 1);
_HA_ATOMIC_ADD(&bin->free_calls, 1);
_HA_ATOMIC_ADD(&bin->free_tot, size_before);
}
size = malloc_usable_size(ret) + sizeof(void *);
bin = memprof_get_bin(__builtin_return_address(0), MEMPROF_METH_STRNDUP);
+ if (unlikely(th_ctx->lock_level & 0x7F))
+ _HA_ATOMIC_ADD(&bin->locked_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_tot, size);
return ret;
size = malloc_usable_size(ret) + sizeof(void *);
bin = memprof_get_bin(__builtin_return_address(0), MEMPROF_METH_VALLOC);
+ if (unlikely(th_ctx->lock_level & 0x7F))
+ _HA_ATOMIC_ADD(&bin->locked_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_tot, size);
return ret;
size = malloc_usable_size(ret) + sizeof(void *);
bin = memprof_get_bin(__builtin_return_address(0), MEMPROF_METH_PVALLOC);
+ if (unlikely(th_ctx->lock_level & 0x7F))
+ _HA_ATOMIC_ADD(&bin->locked_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_tot, size);
return ret;
size = malloc_usable_size(ret) + sizeof(void *);
bin = memprof_get_bin(__builtin_return_address(0), MEMPROF_METH_MEMALIGN);
+ if (unlikely(th_ctx->lock_level & 0x7F))
+ _HA_ATOMIC_ADD(&bin->locked_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_tot, size);
return ret;
size = malloc_usable_size(ret) + sizeof(void *);
bin = memprof_get_bin(__builtin_return_address(0), MEMPROF_METH_ALIGNED_ALLOC);
+ if (unlikely(th_ctx->lock_level & 0x7F))
+ _HA_ATOMIC_ADD(&bin->locked_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_tot, size);
return ret;
size = malloc_usable_size(*ptr) + sizeof(void *);
bin = memprof_get_bin(__builtin_return_address(0), MEMPROF_METH_POSIX_MEMALIGN);
+ if (unlikely(th_ctx->lock_level & 0x7F))
+ _HA_ATOMIC_ADD(&bin->locked_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_tot, size);
return ret;
/* also flush current profiling stats */
for (i = 0; i < sizeof(memprof_stats) / sizeof(memprof_stats[0]); i++) {
+ HA_ATOMIC_STORE(&memprof_stats[i].locked_calls, 0);
HA_ATOMIC_STORE(&memprof_stats[i].alloc_calls, 0);
HA_ATOMIC_STORE(&memprof_stats[i].free_calls, 0);
HA_ATOMIC_STORE(&memprof_stats[i].alloc_tot, 0);
chunk_appendf(&trash," [pool=%s]", pool->name);
}
+ if (entry->locked_calls) {
+ unsigned long long tot_calls = entry->alloc_calls + entry->free_calls;
+
+ chunk_appendf(&trash," [locked=%llu (%d.%1d %%)]",
+ entry->locked_calls,
+ (int)(100ULL * entry->locked_calls / tot_calls),
+ (int)((1000ULL * entry->locked_calls / tot_calls) % 10));
+ }
+
chunk_appendf(&trash, "\n");
if (applet_putchk(appctx, &trash) == -1)
uint64_t mem_wait_start = 0;
int isolated = thread_isolated();
- if (th_ctx->flags & TH_FL_TASK_PROFILING)
+ if (unlikely(th_ctx->flags & TH_FL_TASK_PROFILING))
mem_wait_start = now_mono_time();
if (!isolated)
struct memprof_stats *bin;
bin = memprof_get_bin(__builtin_return_address(0), MEMPROF_METH_P_ALLOC);
+ if (unlikely(th_ctx->lock_level & 0x7F))
+ _HA_ATOMIC_ADD(&bin->locked_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_calls, 1);
_HA_ATOMIC_ADD(&bin->alloc_tot, pool->size);
_HA_ATOMIC_STORE(&bin->info, pool);
struct memprof_stats *bin;
bin = memprof_get_bin(__builtin_return_address(0), MEMPROF_METH_P_FREE);
+ if (unlikely(th_ctx->lock_level & 0x7F))
+ _HA_ATOMIC_ADD(&bin->locked_calls, 1);
_HA_ATOMIC_ADD(&bin->free_calls, 1);
_HA_ATOMIC_ADD(&bin->free_tot, pool->size);
_HA_ATOMIC_STORE(&bin->info, pool);