Bug 618790 handling of chunk in arena_run_alloc while loop is odd
authortimeless@mozdev.org
Mon, 28 Mar 2011 17:26:33 -0400
changeset 64118 3788ab7b5b4b1a5f14e371729a48057b054e049e
parent 64117 dd6d201316bb8dee95afc6cdd679a7bad7fd4d66
child 64119 be9814a5a456c9fec511adaee7720c781c263005
push idunknown
push userunknown
push dateunknown
bugs618790
milestone2.2a1pre
Bug 618790 handling of chunk in arena_run_alloc while loop is odd r=jasone
memory/jemalloc/jemalloc.c
--- a/memory/jemalloc/jemalloc.c
+++ b/memory/jemalloc/jemalloc.c
@@ -3195,74 +3195,68 @@ arena_chunk_dealloc(arena_t *arena, aren
 
 	arena->spare = chunk;
 }
 
 static arena_run_t *
 arena_run_alloc(arena_t *arena, arena_bin_t *bin, size_t size, bool large,
     bool zero)
 {
-	arena_chunk_t *chunk;
 	arena_run_t *run;
 	arena_chunk_map_t *mapelm, key;
 
 	assert(size <= arena_maxclass);
 	assert((size & pagesize_mask) == 0);
 
-	chunk = NULL;
-	while (true) {
-		/* Search the arena's chunks for the lowest best fit. */
-		key.bits = size | CHUNK_MAP_KEY;
-		mapelm = arena_avail_tree_nsearch(&arena->runs_avail, &key);
-		if (mapelm != NULL) {
-			arena_chunk_t *run_chunk =
-			    (arena_chunk_t*)CHUNK_ADDR2BASE(mapelm);
-			size_t pageind = ((uintptr_t)mapelm -
-			    (uintptr_t)run_chunk->map) /
-			    sizeof(arena_chunk_map_t);
-
-			if (chunk != NULL)
-				chunk_dealloc(chunk, chunksize);
-			run = (arena_run_t *)((uintptr_t)run_chunk + (pageind
-			    << pagesize_2pow));
-			arena_run_split(arena, run, size, large, zero);
-			return (run);
-		}
-
-		if (arena->spare != NULL) {
-			/* Use the spare. */
-			chunk = arena->spare;
-			arena->spare = NULL;
-			run = (arena_run_t *)((uintptr_t)chunk +
-			    (arena_chunk_header_npages << pagesize_2pow));
-			/* Insert the run into the runs_avail tree. */
-			arena_avail_tree_insert(&arena->runs_avail,
-			    &chunk->map[arena_chunk_header_npages]);
-			arena_run_split(arena, run, size, large, zero);
-			return (run);
-		}
-
-		/*
-		 * No usable runs.  Create a new chunk from which to allocate
-		 * the run.
-		 */
-		if (chunk == NULL) {
-			chunk = (arena_chunk_t *)chunk_alloc(chunksize, true,
-			    true);
-			if (chunk == NULL)
-				return (NULL);
-		}
+	/* Search the arena's chunks for the lowest best fit. */
+	key.bits = size | CHUNK_MAP_KEY;
+	mapelm = arena_avail_tree_nsearch(&arena->runs_avail, &key);
+	if (mapelm != NULL) {
+		arena_chunk_t *chunk =
+		    (arena_chunk_t*)CHUNK_ADDR2BASE(mapelm);
+		size_t pageind = ((uintptr_t)mapelm -
+		    (uintptr_t)chunk->map) /
+		    sizeof(arena_chunk_map_t);
+
+		run = (arena_run_t *)((uintptr_t)chunk + (pageind
+		    << pagesize_2pow));
+		arena_run_split(arena, run, size, large, zero);
+		return (run);
+	}
+
+	if (arena->spare != NULL) {
+		/* Use the spare. */
+		arena_chunk_t *chunk = arena->spare;
+		arena->spare = NULL;
+		run = (arena_run_t *)((uintptr_t)chunk +
+		    (arena_chunk_header_npages << pagesize_2pow));
+		/* Insert the run into the runs_avail tree. */
+		arena_avail_tree_insert(&arena->runs_avail,
+		    &chunk->map[arena_chunk_header_npages]);
+		arena_run_split(arena, run, size, large, zero);
+		return (run);
+	}
+
+	/*
+	 * No usable runs.  Create a new chunk from which to allocate
+	 * the run.
+	 */
+	{
+		arena_chunk_t *chunk = (arena_chunk_t *)
+		    chunk_alloc(chunksize, true, true);
+		if (chunk == NULL)
+			return (NULL);
 
 		arena_chunk_init(arena, chunk);
 		run = (arena_run_t *)((uintptr_t)chunk +
 		    (arena_chunk_header_npages << pagesize_2pow));
-		/* Update page map. */
-		arena_run_split(arena, run, size, large, zero);
-		return (run);
 	}
+	/* Update page map. */
+	arena_run_split(arena, run, size, large, zero);
+	return (run);
 }
 
 static void
 arena_purge(arena_t *arena)
 {
 	arena_chunk_t *chunk;
 	size_t i, npages;
 #ifdef MALLOC_DEBUG