Fix a race condition in arena_ralloc() for shrinking in-place large

reallocation, when junk filling is enabled.  Junk filling must occur
prior to shrinking, since any deallocated trailing pages are immediately
available for use by other threads.

Reported by:	Mats Palmgren <mats.palmgren@bredband.net>
This commit is contained in:
Jason Evans 2008-02-17 18:34:17 +00:00
parent 196d0d4b59
commit 1945c7bd47

View File

@ -894,11 +894,11 @@ static void *arena_palloc(arena_t *arena, size_t alignment, size_t size,
static size_t arena_salloc(const void *ptr);
static void arena_dalloc_large(arena_t *arena, arena_chunk_t *chunk,
void *ptr);
static void arena_ralloc_resize_shrink(arena_t *arena, arena_chunk_t *chunk,
static void arena_ralloc_large_shrink(arena_t *arena, arena_chunk_t *chunk,
void *ptr, size_t size, size_t oldsize);
static bool arena_ralloc_resize_grow(arena_t *arena, arena_chunk_t *chunk,
static bool arena_ralloc_large_grow(arena_t *arena, arena_chunk_t *chunk,
void *ptr, size_t size, size_t oldsize);
static bool arena_ralloc_resize(void *ptr, size_t size, size_t oldsize);
static bool arena_ralloc_large(void *ptr, size_t size, size_t oldsize);
static void *arena_ralloc(void *ptr, size_t size, size_t oldsize);
static bool arena_new(arena_t *arena);
static arena_t *arenas_extend(unsigned ind);
@ -3345,7 +3345,7 @@ idalloc(void *ptr)
}
static void
arena_ralloc_resize_shrink(arena_t *arena, arena_chunk_t *chunk, void *ptr,
arena_ralloc_large_shrink(arena_t *arena, arena_chunk_t *chunk, void *ptr,
size_t size, size_t oldsize)
{
extent_node_t *node, key;
@ -3373,7 +3373,7 @@ arena_ralloc_resize_shrink(arena_t *arena, arena_chunk_t *chunk, void *ptr,
}
static bool
arena_ralloc_resize_grow(arena_t *arena, arena_chunk_t *chunk, void *ptr,
arena_ralloc_large_grow(arena_t *arena, arena_chunk_t *chunk, void *ptr,
size_t size, size_t oldsize)
{
extent_node_t *nodeC, key;
@ -3431,21 +3431,44 @@ arena_ralloc_resize_grow(arena_t *arena, arena_chunk_t *chunk, void *ptr,
* always fail if growing an object, and the following run is already in use.
*/
static bool
arena_ralloc_resize(void *ptr, size_t size, size_t oldsize)
arena_ralloc_large(void *ptr, size_t size, size_t oldsize)
{
arena_chunk_t *chunk;
arena_t *arena;
size_t psize;
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
arena = chunk->arena;
assert(arena->magic == ARENA_MAGIC);
if (size < oldsize) {
arena_ralloc_resize_shrink(arena, chunk, ptr, size, oldsize);
psize = PAGE_CEILING(size);
if (psize == oldsize) {
/* Same size class. */
if (opt_junk && size < oldsize) {
memset((void *)((uintptr_t)ptr + size), 0x5a, oldsize -
size);
}
return (false);
} else {
return (arena_ralloc_resize_grow(arena, chunk, ptr, size,
oldsize));
arena_chunk_t *chunk;
arena_t *arena;
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
arena = chunk->arena;
assert(arena->magic == ARENA_MAGIC);
if (psize < oldsize) {
/* Fill before shrinking in order avoid a race. */
if (opt_junk) {
memset((void *)((uintptr_t)ptr + size), 0x5a,
oldsize - size);
}
arena_ralloc_large_shrink(arena, chunk, ptr, psize,
oldsize);
return (false);
} else {
bool ret = arena_ralloc_large_grow(arena, chunk, ptr,
psize, oldsize);
if (ret == false && opt_zero) {
memset((void *)((uintptr_t)ptr + oldsize), 0,
size - oldsize);
}
return (ret);
}
}
}
@ -3471,16 +3494,9 @@ arena_ralloc(void *ptr, size_t size, size_t oldsize)
pow2_ceil(size) == pow2_ceil(oldsize))
goto IN_PLACE; /* Same size class. */
} else if (oldsize > bin_maxclass && oldsize <= arena_maxclass) {
size_t psize;
assert(size > bin_maxclass);
psize = PAGE_CEILING(size);
if (psize == oldsize)
goto IN_PLACE; /* Same size class. */
if (arena_ralloc_resize(ptr, psize, oldsize) == false)
goto IN_PLACE;
if (arena_ralloc_large(ptr, size, oldsize) == false)
return (ptr);
}
/*