In the core, include malloc_usable_size() as one of the functions that must

be replaced if malloc() et al are replaced by a tool.  This is because
different tools implement the function in different ways.

Add an appropriate malloc_usable_size() replacement to each of Memcheck,
Helgrind, DRD, Ptrcheck, Massif.

Update memcheck/tests/malloc_usable and add massif/tests/malloc_usable.

Merged from the DARWIN branch.


git-svn-id: svn://svn.valgrind.org/valgrind/trunk@9193
This commit is contained in:
Nicholas Nethercote 2009-02-17 04:31:18 +00:00
parent b327b8dd60
commit 3ed4532cde
22 changed files with 93 additions and 9 deletions

View File

@ -1664,8 +1664,7 @@ void* VG_(arena_memalign) ( ArenaId aid, HChar* cc,
}
// The ThreadId doesn't matter, it's not used.
SizeT VG_(arena_payload_szB) ( ThreadId tid, ArenaId aid, void* ptr )
SizeT VG_(arena_malloc_usable_size) ( ArenaId aid, void* ptr )
{
Arena* a = arenaId_to_ArenaP(aid);
Block* b = get_payload_block(a, ptr);
@ -1853,7 +1852,7 @@ Char* VG_(strdup) ( HChar* cc, const Char* s )
// Useful for querying user blocks.
SizeT VG_(malloc_usable_size) ( void* p )
{
return VG_(arena_payload_szB)(VG_INVALID_THREADID, VG_AR_CLIENT, p);
return VG_(arena_malloc_usable_size)(VG_AR_CLIENT, p);
}

View File

@ -559,8 +559,7 @@ POSIX_MEMALIGN(VG_Z_LIBC_SONAME, memalign_common);
if (NULL == p) \
return 0; \
\
pszB = (SizeT)VALGRIND_NON_SIMD_CALL2( info.arena_payload_szB, \
VG_AR_CLIENT, p ); \
pszB = (SizeT)VALGRIND_NON_SIMD_CALL1( info.tl_malloc_usable_size, p ); \
MALLOC_TRACE(" = %llu", (ULong)pszB ); \
\
return pszB; \

View File

@ -1372,8 +1372,8 @@ void do_client_request ( ThreadId tid )
info->tl_free = VG_(tdict).tool_free;
info->tl___builtin_delete = VG_(tdict).tool___builtin_delete;
info->tl___builtin_vec_delete = VG_(tdict).tool___builtin_vec_delete;
info->tl_malloc_usable_size = VG_(tdict).tool_malloc_usable_size;
info->arena_payload_szB = VG_(arena_payload_szB);
info->mallinfo = VG_(mallinfo);
info->clo_trace_malloc = VG_(clo_trace_malloc);

View File

@ -298,6 +298,7 @@ void VG_(needs_malloc_replacement)(
void (*__builtin_delete) ( ThreadId, void* ),
void (*__builtin_vec_delete) ( ThreadId, void* ),
void* (*realloc) ( ThreadId, void*, SizeT ),
SizeT (*malloc_usable_size) ( ThreadId, void* ),
SizeT client_malloc_redzone_szB
)
{
@ -311,6 +312,7 @@ void VG_(needs_malloc_replacement)(
VG_(tdict).tool___builtin_delete = __builtin_delete;
VG_(tdict).tool___builtin_vec_delete = __builtin_vec_delete;
VG_(tdict).tool_realloc = realloc;
VG_(tdict).tool_malloc_usable_size = malloc_usable_size;
VG_(tdict).tool_client_redzone_szB = client_malloc_redzone_szB;
}

View File

@ -100,6 +100,8 @@ extern Char* VG_(arena_strdup) ( ArenaId aid, HChar* cc,
// Nb: The ThreadId doesn't matter, it's not used.
extern SizeT VG_(arena_payload_szB) ( ThreadId tid, ArenaId aid, void* payload );
extern SizeT VG_(arena_malloc_usable_size) ( ArenaId aid, void* payload );
extern void VG_(mallinfo) ( ThreadId tid, struct vg_mallinfo* mi );
extern void VG_(sanity_check_malloc_all) ( void );

View File

@ -49,7 +49,7 @@ struct vg_mallocfunc_info {
void (*tl___builtin_delete) (ThreadId tid, void* p);
void (*tl___builtin_vec_delete)(ThreadId tid, void* p);
void* (*tl_realloc) (ThreadId tid, void* p, SizeT size);
SizeT (*arena_payload_szB) (ThreadId tid, ArenaId aid, void* payload);
SizeT (*tl_malloc_usable_size) (ThreadId tid, void* payload);
void (*mallinfo) (ThreadId tid, struct vg_mallinfo* mi);
Bool clo_trace_malloc;
};

View File

@ -155,6 +155,7 @@ typedef struct {
void (*tool___builtin_delete) (ThreadId, void*);
void (*tool___builtin_vec_delete)(ThreadId, void*);
void* (*tool_realloc) (ThreadId, void*, SizeT);
SizeT (*tool_malloc_usable_size) (ThreadId, void*);
SizeT tool_client_redzone_szB;
// VG_(needs).final_IR_tidy_pass

View File

@ -243,6 +243,15 @@ static void DRD_(__builtin_vec_delete)(ThreadId tid, void* p)
DRD_(handle_free)(tid, (Addr)p);
}
static SizeT DRD_(malloc_usable_size) ( ThreadId tid, void* p )
{
DRD_Chunk *mc = VG_(HT_lookup)( DRD_(s_malloc_list), (UWord)p );
// There may be slop, but pretend there isn't because only the asked-for
// area will have been shadowed properly.
return ( mc ? mc->size : 0 );
}
void DRD_(register_malloc_wrappers)(const StartUsingMem start_callback,
const StopUsingMem stop_callback)
{
@ -264,6 +273,7 @@ void DRD_(register_malloc_wrappers)(const StartUsingMem start_callback,
DRD_(__builtin_delete),
DRD_(__builtin_vec_delete),
DRD_(realloc),
DRD_(malloc_usable_size),
0);
}

View File

@ -999,6 +999,15 @@ void* h_replace_realloc ( ThreadId tid, void* p_old, SizeT new_size )
}
}
SizeT h_replace_malloc_usable_size ( ThreadId tid, void* p )
{
Seg* seg = find_Seg_by_addr( (Addr)p );
// There may be slop, but pretend there isn't because only the asked-for
// area will have been shadowed properly.
return ( seg ? seg->szB : 0 );
}
/*------------------------------------------------------------*/
/*--- Memory events ---*/

View File

@ -65,6 +65,7 @@ void h_replace_free ( ThreadId tid, void* p );
void h_replace___builtin_delete ( ThreadId tid, void* p );
void h_replace___builtin_vec_delete ( ThreadId tid, void* p );
void* h_replace_realloc ( ThreadId tid, void* p_old, SizeT new_size );
SizeT h_replace_malloc_usable_size ( ThreadId tid, void* p );
void h_new_mem_startup( Addr a, SizeT len,
Bool rr, Bool ww, Bool xx, ULong di_handle );

View File

@ -166,6 +166,7 @@ static void pc_pre_clo_init(void)
h_replace___builtin_delete,
h_replace___builtin_vec_delete,
h_replace_realloc,
h_replace_malloc_usable_size,
0 /* no need for client heap redzones */ );
VG_(needs_var_info) ();

View File

@ -3475,6 +3475,15 @@ static void* hg_cli__realloc ( ThreadId tid, void* payloadV, SizeT new_size )
}
}
static SizeT hg_cli_malloc_usable_size ( ThreadId tid, void* p )
{
MallocMeta *md = VG_(HT_lookup)( hg_mallocmeta_table, (UWord)p );
// There may be slop, but pretend there isn't because only the asked-for
// area will have been shadowed properly.
return ( md ? md->szB : 0 );
}
/*--------------------------------------------------------------*/
/*--- Instrumentation ---*/
@ -4222,6 +4231,7 @@ static void hg_pre_clo_init ( void )
hg_cli____builtin_delete,
hg_cli____builtin_vec_delete,
hg_cli__realloc,
hg_cli_malloc_usable_size,
HG_CLI__MALLOC_REDZONE_SZB );
/* 21 Dec 08: disabled this; it mostly causes H to start more

View File

@ -433,6 +433,7 @@ extern void VG_(needs_malloc_replacement)(
void (*p__builtin_delete) ( ThreadId tid, void* p ),
void (*p__builtin_vec_delete) ( ThreadId tid, void* p ),
void* (*prealloc) ( ThreadId tid, void* p, SizeT new_size ),
SizeT (*pmalloc_usable_size) ( ThreadId tid, void* p),
SizeT client_malloc_redzone_szB
);

View File

@ -1687,6 +1687,12 @@ static void* ms_realloc ( ThreadId tid, void* p_old, SizeT new_szB )
return renew_block(tid, p_old, new_szB);
}
static SizeT ms_malloc_usable_size ( ThreadId tid, void* p )
{
HP_Chunk* hc = VG_(HT_lookup)( malloc_list, (UWord)p );
return ( hc ? hc->req_szB + hc->slop_szB : 0 );
}
//------------------------------------------------------------//
//--- Stacks ---//
@ -2225,6 +2231,7 @@ static void ms_pre_clo_init(void)
ms___builtin_delete,
ms___builtin_vec_delete,
ms_realloc,
ms_malloc_usable_size,
0 );
// HP_Chunks

View File

@ -21,6 +21,7 @@ EXTRA_DIST = $(noinst_SCRIPTS) \
ignoring.post.exp ignoring.stderr.exp ignoring.vgtest \
long-names.post.exp long-names.stderr.exp long-names.vgtest \
long-time.post.exp long-time.stderr.exp long-time.vgtest \
malloc_usable.stderr.exp malloc_usable.vgtest \
new-cpp.post.exp new-cpp.stderr.exp new-cpp.vgtest \
no-stack-no-heap.post.exp no-stack-no-heap.stderr.exp no-stack-no-heap.vgtest \
null.post.exp null.stderr.exp null.vgtest \
@ -57,6 +58,7 @@ check_PROGRAMS = \
insig \
long-names \
long-time \
malloc_usable \
new-cpp \
null \
one \

View File

@ -0,0 +1,21 @@
#include <assert.h>
#include <malloc.h>
#include <stdlib.h>
#include <stdio.h>
int main(void)
{
// Because our allocations are in multiples of 8 or 16, 99 will round up
// to 104 or 112.
int* x = malloc(99);
// XXX: would be better to have a HAVE_MALLOC_USABLE_SIZE variable here
# if !defined(_AIX)
assert(104 == malloc_usable_size(x) ||
112 == malloc_usable_size(x));
assert( 0 == malloc_usable_size(NULL));
assert( 0 == malloc_usable_size((void*)0xdeadbeef));
# endif
return 0;
}

View File

View File

@ -0,0 +1,2 @@
prog: malloc_usable
vgopts: -q

View File

@ -121,6 +121,7 @@ void MC_(free) ( ThreadId tid, void* p );
void MC_(__builtin_delete) ( ThreadId tid, void* p );
void MC_(__builtin_vec_delete) ( ThreadId tid, void* p );
void* MC_(realloc) ( ThreadId tid, void* p, SizeT new_size );
SizeT MC_(malloc_usable_size) ( ThreadId tid, void* p );
/*------------------------------------------------------------*/

View File

@ -5711,6 +5711,7 @@ static void mc_pre_clo_init(void)
MC_(__builtin_delete),
MC_(__builtin_vec_delete),
MC_(realloc),
MC_(malloc_usable_size),
MC_MALLOC_REDZONE_SZB );
VG_(needs_xml_output) ();

View File

@ -483,6 +483,15 @@ void* MC_(realloc) ( ThreadId tid, void* p_old, SizeT new_szB )
return p_new;
}
SizeT MC_(malloc_usable_size) ( ThreadId tid, void* p )
{
MC_Chunk* mc = VG_(HT_lookup) ( MC_(malloc_list), (UWord)p );
// There may be slop, but pretend there isn't because only the asked-for
// area will be marked as addressable.
return ( mc ? mc->szB : 0 );
}
/* Memory pool stuff. */
void MC_(create_mempool)(Addr pool, UInt rzB, Bool is_zeroed)

View File

@ -5,10 +5,16 @@
int main(void)
{
// Since our allocations are in multiples of 8, 99 will round up to 104.
// Because Memcheck marks any slop as inaccessible, it doesn't round up
// sizes for malloc_usable_size().
int* x = malloc(99);
// DDD: would be better to have a HAVE_MALLOC_USABLE_SIZE variable here
# if !defined(_AIX)
assert(104 == malloc_usable_size(x));
assert(99 == malloc_usable_size(x));
assert( 0 == malloc_usable_size(NULL));
assert( 0 == malloc_usable_size((void*)0xdeadbeef));
# endif
return 0;
}