return VG_(arena_strdup) ( VG_AR_CORE, cc, s );
}
-// Useful for querying user blocks.
-SizeT VG_(malloc_usable_size) ( void* p )
-{
- return VG_(arena_malloc_usable_size)(VG_AR_CLIENT, p);
-}
-
void* VG_(perm_malloc) ( SizeT size, Int align )
{
return VG_(arena_perm_malloc) ( VG_AR_CORE, size, align );
VG_(arena_free) ( VG_AR_CLIENT, p );
}
+// Useful for querying user blocks.
+SizeT VG_(cli_malloc_usable_size) ( void* p )
+{
+ return VG_(arena_malloc_usable_size)(VG_AR_CLIENT, p);
+}
+
Bool VG_(addr_is_in_block)( Addr a, Addr start, SizeT size, SizeT rz_szB )
{
return ( start - rz_szB <= a && a < start + size + rz_szB );
return NULL;
}
if (is_zeroed) VG_(memset)(p, 0, req_szB);
- actual_szB = VG_(malloc_usable_size)(p);
+ actual_szB = VG_(cli_malloc_usable_size)(p);
tl_assert(actual_szB >= req_szB);
/* slop_szB = actual_szB - req_szB; */
} else {
extern void* VG_(realloc) ( const HChar* cc, void* p, SizeT size );
extern HChar* VG_(strdup) ( const HChar* cc, const HChar* s );
-// Returns the usable size of a heap-block. It's the asked-for size plus
-// possibly some more due to rounding up.
-extern SizeT VG_(malloc_usable_size)( void* p );
-
// TODO: move somewhere else
// Call here to bomb the system when out of memory (mmap anon fails)
__attribute__((noreturn))
// on a multiple of align.
// Use the macro vg_alignof (type) to get a safe alignment for a type.
// No other function can be used on these permanently allocated blocks.
-// In particular, do *not* call VG_(free) or VG_(malloc_usable_size)
-// or VG_(realloc).
+// In particular, do *not* call VG_(free) or VG_(realloc).
// Technically, these blocks will be returned from big superblocks
// only containing such permanently allocated blocks.
// Note that there is no cc cost centre : all such blocks will be
* alloc/freeing. */
extern void* VG_(cli_malloc) ( SizeT align, SizeT nbytes );
extern void VG_(cli_free) ( void* p );
+// Returns the usable size of a heap-block. It's the asked-for size plus
+// possibly some more due to rounding up.
+extern SizeT VG_(cli_malloc_usable_size)( void* p );
+
/* If a tool uses deferred freeing (e.g. memcheck to catch accesses to
freed memory) it can maintain number and total size of queued blocks
return NULL;
}
if (is_zeroed) VG_(memset)(p, 0, req_szB);
- actual_szB = VG_(malloc_usable_size)(p);
+ actual_szB = VG_(cli_malloc_usable_size)(p);
tl_assert(actual_szB >= req_szB);
slop_szB = actual_szB - req_szB;
}
VG_(memcpy)(p_new, p_old, old_req_szB + old_slop_szB);
VG_(cli_free)(p_old);
- new_actual_szB = VG_(malloc_usable_size)(p_new);
+ new_actual_szB = VG_(cli_malloc_usable_size)(p_new);
tl_assert(new_actual_szB >= new_req_szB);
new_slop_szB = new_actual_szB - new_req_szB;
}